Allow creating magnet links with imdl torrent link

Magnet links can now be created from a metainfo file with:

    imdl torrent link --input METAINFO

type: added
This commit is contained in:
Casey Rodarmor 2020-03-17 03:02:02 -07:00
parent 0b486cc681
commit 57a358e458
No known key found for this signature in database
GPG Key ID: 556186B153EC6FE0
25 changed files with 896 additions and 192 deletions

52
Cargo.lock generated
View File

@ -124,6 +124,15 @@ dependencies = [
"time",
]
[[package]]
name = "claim"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b2e893ee68bf12771457cceea72497bc9cb7da404ec8a5311226d354b895ba4"
dependencies = [
"autocfg",
]
[[package]]
name = "clap"
version = "2.33.0"
@ -313,12 +322,13 @@ dependencies = [
"atty",
"bendy",
"chrono",
"claim",
"console",
"globset",
"imdl-indicatif",
"indoc",
"lazy_static",
"libc",
"log",
"md5",
"pretty_assertions",
"pretty_env_logger",
@ -351,29 +361,6 @@ dependencies = [
"regex",
]
[[package]]
name = "indoc"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9553c1e16c114b8b77ebeb329e5f2876eed62a8d51178c8bc6bff0d65f98f8"
dependencies = [
"indoc-impl",
"proc-macro-hack",
]
[[package]]
name = "indoc-impl"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b714fc08d0961716390977cdff1536234415ac37b509e34e5a983def8340fb75"
dependencies = [
"proc-macro-hack",
"proc-macro2",
"quote",
"syn",
"unindent",
]
[[package]]
name = "kernel32-sys"
version = "0.2.2"
@ -529,17 +516,6 @@ dependencies = [
"version_check",
]
[[package]]
name = "proc-macro-hack"
version = "0.5.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ecd45702f76d6d3c75a80564378ae228a85f0b59d2f3ed43c91b4a69eb2ebfc5"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "proc-macro2"
version = "1.0.9"
@ -944,12 +920,6 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
[[package]]
name = "unindent"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63f18aa3b0e35fed5a0048f029558b1518095ffe2a0a31fb87c93dece93a4993"
[[package]]
name = "update-readme"
version = "0.0.0"

View File

@ -18,7 +18,9 @@ atty = "0.2.0"
chrono = "0.4.1"
console = "0.10.0"
globset = "0.4.0"
lazy_static = "1.4.0"
libc = "0.2.0"
log = "0.4.8"
md5 = "0.7.0"
pretty_assertions = "0.6.0"
pretty_env_logger = "0.4.0"
@ -34,7 +36,6 @@ tempfile = "3.0.0"
unicode-width = "0.1.0"
url = "2.0.0"
walkdir = "2.1.0"
lazy_static = "1.4.0"
[dependencies.bendy]
version = "0.3.0"
@ -53,7 +54,7 @@ version = "0.3.0"
features = ["default", "wrap_help"]
[dev-dependencies]
indoc = "0.3.4"
claim = "0.3.1"
temptree = "0.0.0"
[workspace]

View File

@ -2,4 +2,4 @@
set -euxo pipefail
! grep --color -REni 'FIXME|TODO|XXX' src
! grep --color -REni 'FIXME|TODO|XXX|todo!|#\[ignore]' src

View File

@ -6,9 +6,13 @@ bt := "0"
export RUST_BACKTRACE := bt
log := "warn"
export RUST_LOG := log
# watch filesystem for changes and rerun tests
watch:
cargo watch --exec test
watch +ARGS='':
cargo watch --clear --exec 'test {{ARGS}}'
# show stats about torrents at `PATH`
stats PATH:

View File

@ -3,7 +3,7 @@ pub(crate) use std::{
borrow::Cow,
char,
cmp::Reverse,
collections::{BTreeMap, BTreeSet, HashMap},
collections::{BTreeMap, BTreeSet, HashMap, HashSet},
convert::{Infallible, TryInto},
env,
ffi::{OsStr, OsString},
@ -11,12 +11,13 @@ pub(crate) use std::{
fs::{self, File},
hash::Hash,
io::{self, Read, Write},
iter::Sum,
iter::{self, Sum},
num::{ParseFloatError, ParseIntError, TryFromIntError},
ops::{AddAssign, Div, DivAssign, Mul, MulAssign, Sub, SubAssign},
path::{self, Path, PathBuf},
process::{self, Command, ExitStatus},
str::{self, FromStr},
sync::Once,
time::{SystemTime, SystemTimeError},
usize,
};
@ -42,8 +43,12 @@ pub(crate) use unicode_width::UnicodeWidthStr;
pub(crate) use url::{Host, Url};
pub(crate) use walkdir::WalkDir;
// logging functions
#[allow(unused_imports)]
pub(crate) use log::trace;
// modules
pub(crate) use crate::{consts, error};
pub(crate) use crate::{consts, error, host_port_parse_error};
// traits
pub(crate) use crate::{
@ -55,11 +60,13 @@ pub(crate) use crate::{
pub(crate) use crate::{
arguments::Arguments, bytes::Bytes, env::Env, error::Error, file_error::FileError,
file_info::FileInfo, file_path::FilePath, file_status::FileStatus, files::Files, hasher::Hasher,
info::Info, lint::Lint, linter::Linter, md5_digest::Md5Digest, metainfo::Metainfo, mode::Mode,
node::Node, options::Options, output_stream::OutputStream, output_target::OutputTarget,
piece_length_picker::PieceLengthPicker, piece_list::PieceList, platform::Platform,
sha1_digest::Sha1Digest, status::Status, style::Style, subcommand::Subcommand, table::Table,
torrent_summary::TorrentSummary, use_color::UseColor, verifier::Verifier, walker::Walker,
host_port::HostPort, host_port_parse_error::HostPortParseError, info::Info, infohash::Infohash,
lint::Lint, linter::Linter, magnet_link::MagnetLink, md5_digest::Md5Digest, metainfo::Metainfo,
metainfo_error::MetainfoError, mode::Mode, options::Options, output_stream::OutputStream,
output_target::OutputTarget, piece_length_picker::PieceLengthPicker, piece_list::PieceList,
platform::Platform, sha1_digest::Sha1Digest, status::Status, style::Style,
subcommand::Subcommand, table::Table, torrent_summary::TorrentSummary, use_color::UseColor,
verifier::Verifier, walker::Walker,
};
// type aliases

View File

@ -27,8 +27,7 @@ impl Env {
#[cfg(windows)]
ansi_term::enable_ansi_support().ok();
#[cfg(not(test))]
pretty_env_logger::init();
Self::initialize_logging();
let args = Arguments::from_iter_safe(&self.args)?;
@ -39,6 +38,37 @@ impl Env {
args.run(self)
}
/// Initialize `pretty-env-logger` as the global logging backend.
///
/// This function is called in `Env::run`, so the logger will always be
/// initialized when the program runs via main, and in tests which construct
/// and `Env` and run them.
///
/// The logger will not be initialized in tests which don't construct an
/// `Env`, for example in unit tests that test functionality below the level
/// of a full program invocation.
///
/// To enable logging in those tests, call `Env::initialize_logging()` like
/// so:
///
/// ```
/// #[test]
/// fn foo() {
/// Env::initialize_logging();
/// // Rest of the test...
/// }
/// ```
///
/// If the logger has already been initialized, `Env::initialize_logging()` is
/// a no-op, so it's safe to call more than once.
pub(crate) fn initialize_logging() {
static ONCE: Once = Once::new();
ONCE.call_once(|| {
pretty_env_logger::init();
});
}
pub(crate) fn new<S, I>(dir: PathBuf, args: I, out: OutputStream, err: OutputStream) -> Self
where
S: Into<OsString>,

View File

@ -5,17 +5,25 @@ use structopt::clap;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub(crate) enum Error {
#[snafu(display("Must provide at least one announce URL"))]
AnnounceEmpty,
#[snafu(display("Failed to parse announce URL: {}", source))]
AnnounceUrlParse { source: url::ParseError },
#[snafu(display("Failed to deserialize torrent metainfo from `{}`: {}", path.display(), source))]
MetainfoLoad {
MetainfoDeserialize {
source: bendy::serde::Error,
path: PathBuf,
},
#[snafu(display("Failed to serialize torrent metainfo: {}", source))]
MetainfoSerialize { source: bendy::serde::Error },
#[snafu(display("Failed to decode torrent metainfo from `{}`: {}", path.display(), error))]
MetainfoDecode {
path: PathBuf,
error: bendy::decoding::Error,
},
#[snafu(display("Metainfo from `{}` failed to validate: {}", path.display(), source))]
MetainfoValidate {
path: PathBuf,
source: MetainfoError,
},
#[snafu(display("Failed to parse byte count `{}`: {}", text, source))]
ByteParse {
text: String,
@ -39,25 +47,18 @@ pub(crate) enum Error {
GlobParse { source: globset::Error },
#[snafu(display("Unknown lint: {}", text))]
LintUnknown { text: String },
#[snafu(display("DHT node port missing: {}", text))]
NodeParsePortMissing { text: String },
#[snafu(display("Failed to parse DHT node host `{}`: {}", text, source))]
NodeParseHost {
text: String,
source: url::ParseError,
},
#[snafu(display("Failed to parse DHT node port `{}`: {}", text, source))]
NodeParsePort { text: String, source: ParseIntError },
#[snafu(display("Failed to find opener utility, please install one of {}", tried.join(",")))]
OpenerMissing { tried: &'static [&'static str] },
#[snafu(display("Output path already exists: `{}`", path.display()))]
OutputExists { path: PathBuf },
#[snafu(display("Failed to serialize torrent info dictionary: {}", source))]
InfoSerialize { source: bendy::serde::Error },
#[snafu(display(
"Interal error, this may indicate a bug in intermodal: {}\n\
Consider filing an issue: https://github.com/casey/imdl/issues/new",
message,
))]
Internal { message: String },
#[snafu(display("Failed to find opener utility, please install one of {}", tried.join(",")))]
OpenerMissing { tried: &'static [&'static str] },
#[snafu(display("Output path already exists: `{}`", path.display()))]
OutputExists { path: PathBuf },
#[snafu(display(
"Path `{}` contains non-normal component: {}",
path.display(),
@ -102,6 +103,8 @@ pub(crate) enum Error {
PieceLengthSmall,
#[snafu(display("Piece length cannot be zero"))]
PieceLengthZero,
#[snafu(display("Private torrents must have tracker"))]
PrivateTrackerless,
#[snafu(display("Failed to write to standard error: {}", source))]
Stderr { source: io::Error },
#[snafu(display("Failed to write to standard output: {}", source))]
@ -128,6 +131,7 @@ impl Error {
match self {
Self::PieceLengthUneven { .. } => Some(Lint::UnevenPieceLength),
Self::PieceLengthSmall { .. } => Some(Lint::SmallPieceLength),
Self::PrivateTrackerless => Some(Lint::PrivateTrackerless),
_ => None,
}
}

View File

@ -1,13 +1,13 @@
use crate::common::*;
#[derive(Debug, PartialEq, Clone)]
pub(crate) struct Node {
pub(crate) struct HostPort {
host: Host,
port: u16,
}
impl FromStr for Node {
type Err = Error;
impl FromStr for HostPort {
type Err = HostPortParseError;
fn from_str(text: &str) -> Result<Self, Self::Err> {
let socket_address_re = Regex::new(
@ -25,24 +25,26 @@ impl FromStr for Node {
let host_text = captures.name("host").unwrap().as_str();
let port_text = captures.name("port").unwrap().as_str();
let host = Host::parse(&host_text).context(error::NodeParseHost {
let host = Host::parse(&host_text).context(host_port_parse_error::Host {
text: text.to_owned(),
})?;
let port = port_text.parse::<u16>().context(error::NodeParsePort {
text: text.to_owned(),
})?;
let port = port_text
.parse::<u16>()
.context(host_port_parse_error::Port {
text: text.to_owned(),
})?;
Ok(Self { host, port })
} else {
Err(Error::NodeParsePortMissing {
Err(HostPortParseError::PortMissing {
text: text.to_owned(),
})
}
}
}
impl Display for Node {
impl Display for HostPort {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}:{}", self.host, self.port)
}
@ -51,8 +53,8 @@ impl Display for Node {
#[derive(Serialize, Deserialize)]
struct Tuple(String, u16);
impl From<&Node> for Tuple {
fn from(node: &Node) -> Self {
impl From<&HostPort> for Tuple {
fn from(node: &HostPort) -> Self {
let host = match &node.host {
Host::Domain(domain) => domain.to_string(),
Host::Ipv4(ipv4) => ipv4.to_string(),
@ -62,7 +64,7 @@ impl From<&Node> for Tuple {
}
}
impl Serialize for Node {
impl Serialize for HostPort {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
@ -71,7 +73,7 @@ impl Serialize for Node {
}
}
impl<'de> Deserialize<'de> for Node {
impl<'de> Deserialize<'de> for HostPort {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
@ -85,7 +87,7 @@ impl<'de> Deserialize<'de> for Node {
}
.map_err(|error| D::Error::custom(format!("Failed to parse node host: {}", error)))?;
Ok(Node {
Ok(HostPort {
host,
port: tuple.1,
})
@ -99,8 +101,8 @@ mod tests {
use std::net::{Ipv4Addr, Ipv6Addr};
fn case(host: Host, port: u16, text: &str, bencode: &str) {
let node = Node { host, port };
let parsed: Node = text.parse().expect(&format!("Failed to parse {}", text));
let node = HostPort { host, port };
let parsed: HostPort = text.parse().expect(&format!("Failed to parse {}", text));
assert_eq!(parsed, node);
let ser = bendy::serde::to_bytes(&node).unwrap();
assert_eq!(
@ -110,7 +112,7 @@ mod tests {
String::from_utf8_lossy(&ser),
bencode,
);
let de = bendy::serde::from_bytes::<Node>(&ser).unwrap();
let de = bendy::serde::from_bytes::<HostPort>(&ser).unwrap();
assert_eq!(de, node);
}

View File

@ -0,0 +1,15 @@
use crate::common::*;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub(crate) enum HostPortParseError {
#[snafu(display("Failed to parse host `{}`: {}", text, source))]
Host {
text: String,
source: url::ParseError,
},
#[snafu(display("Failed to parse port `{}`: {}", text, source))]
Port { text: String, source: ParseIntError },
#[snafu(display("Port missing: `{}`", text))]
PortMissing { text: String },
}

View File

@ -26,4 +26,9 @@ impl Info {
pub(crate) fn content_size(&self) -> Bytes {
self.mode.content_size()
}
pub(crate) fn infohash(&self) -> Result<Infohash> {
let encoded = bendy::serde::ser::to_bytes(self).context(error::InfoSerialize)?;
Ok(Infohash::from_bencoded_info_dict(&encoded))
}
}

130
src/infohash.rs Normal file
View File

@ -0,0 +1,130 @@
use crate::*;
#[derive(Debug, Eq, PartialEq, Copy, Clone)]
pub(crate) struct Infohash {
inner: Sha1Digest,
}
impl Infohash {
pub(crate) fn load(path: &Path) -> Result<Infohash, Error> {
let bytes = fs::read(path).context(error::Filesystem { path })?;
let value = Value::from_bencode(&bytes).map_err(|error| Error::MetainfoDecode {
path: path.to_owned(),
error,
})?;
match value {
Value::Dict(metainfo) => {
let info = metainfo
.iter()
.find(|pair: &(&Cow<[u8]>, &Value)| pair.0.as_ref() == b"info")
.ok_or_else(|| Error::MetainfoValidate {
path: path.to_owned(),
source: MetainfoError::InfoMissing,
})?
.1;
if let Value::Dict(_) = info {
let encoded = info.to_bencode().map_err(|error| {
Error::internal(format!("Failed to re-encode info dictionary: {}", error))
})?;
Ok(Self::from_bencoded_info_dict(&encoded))
} else {
Err(Error::MetainfoValidate {
path: path.to_owned(),
source: MetainfoError::InfoType,
})
}
}
_ => Err(Error::MetainfoValidate {
path: path.to_owned(),
source: MetainfoError::Type,
}),
}
}
pub(crate) fn from_bencoded_info_dict(info: &[u8]) -> Infohash {
Infohash {
inner: Sha1Digest::from_data(info),
}
}
}
impl Into<Sha1Digest> for Infohash {
fn into(self) -> Sha1Digest {
self.inner
}
}
impl Display for Infohash {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}", self.inner)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn load_invalid() {
let tempdir = temptree! {
foo: "x",
};
let input = tempdir.path().join("foo");
assert_matches!(
Infohash::load(&input),
Err(Error::MetainfoDecode{path, .. })
if path == input
);
}
#[test]
fn load_wrong_type() {
let tempdir = temptree! {
foo: "i0e",
};
let input = tempdir.path().join("foo");
assert_matches!(
Infohash::load(&input),
Err(Error::MetainfoValidate{path, source: MetainfoError::Type})
if path == input
);
}
#[test]
fn load_no_info() {
let tempdir = temptree! {
foo: "de",
};
let input = tempdir.path().join("foo");
assert_matches!(
Infohash::load(&input),
Err(Error::MetainfoValidate{path, source: MetainfoError::InfoMissing})
if path == input
);
}
#[test]
fn load_info_type() {
let tempdir = temptree! {
foo: "d4:infoi0ee",
};
let input = tempdir.path().join("foo");
assert_matches!(
Infohash::load(&input),
Err(Error::MetainfoValidate{path, source: MetainfoError::InfoType})
if path == input
);
}
}

View File

@ -2,18 +2,24 @@ use crate::common::*;
#[derive(Eq, PartialEq, Debug, Copy, Clone, Ord, PartialOrd)]
pub(crate) enum Lint {
UnevenPieceLength,
PrivateTrackerless,
SmallPieceLength,
UnevenPieceLength,
}
impl Lint {
const PRIVATE_TRACKERLESS: &'static str = "private-trackerless";
const SMALL_PIECE_LENGTH: &'static str = "small-piece-length";
const UNEVEN_PIECE_LENGTH: &'static str = "uneven-piece-length";
pub(crate) const VALUES: &'static [&'static str] =
&[Self::SMALL_PIECE_LENGTH, Self::UNEVEN_PIECE_LENGTH];
pub(crate) const VALUES: &'static [&'static str] = &[
Self::PRIVATE_TRACKERLESS,
Self::SMALL_PIECE_LENGTH,
Self::UNEVEN_PIECE_LENGTH,
];
pub(crate) fn name(self) -> &'static str {
match self {
Self::PrivateTrackerless => Self::PRIVATE_TRACKERLESS,
Self::SmallPieceLength => Self::SMALL_PIECE_LENGTH,
Self::UnevenPieceLength => Self::UNEVEN_PIECE_LENGTH,
}
@ -25,6 +31,7 @@ impl FromStr for Lint {
fn from_str(text: &str) -> Result<Self, Self::Err> {
match text.replace('_', "-").to_lowercase().as_str() {
Self::PRIVATE_TRACKERLESS => Ok(Self::PrivateTrackerless),
Self::SMALL_PIECE_LENGTH => Ok(Self::SmallPieceLength),
Self::UNEVEN_PIECE_LENGTH => Ok(Self::UnevenPieceLength),
_ => Err(Error::LintUnknown {
@ -62,6 +69,18 @@ mod tests {
);
}
#[test]
fn convert() {
fn case(text: &str, value: Lint) {
assert_eq!(value, text.parse().unwrap());
assert_eq!(value.name(), text);
}
case("private-trackerless", Lint::PrivateTrackerless);
case("small-piece-length", Lint::SmallPieceLength);
case("uneven-piece-length", Lint::UnevenPieceLength);
}
#[test]
fn from_str_err() {
assert_matches!(

125
src/magnet_link.rs Normal file
View File

@ -0,0 +1,125 @@
use crate::common::*;
pub(crate) struct MagnetLink {
infohash: Infohash,
name: Option<String>,
peers: Vec<HostPort>,
trackers: Vec<Url>,
}
impl MagnetLink {
pub(crate) fn with_infohash(infohash: Infohash) -> MagnetLink {
MagnetLink {
infohash,
name: None,
peers: Vec::new(),
trackers: Vec::new(),
}
}
#[allow(dead_code)]
pub(crate) fn set_name(&mut self, name: impl Into<String>) {
self.name = Some(name.into());
}
#[allow(dead_code)]
pub(crate) fn add_peer(&mut self, peer: HostPort) {
self.peers.push(peer);
}
pub(crate) fn add_tracker(&mut self, tracker: Url) {
self.trackers.push(tracker);
}
pub(crate) fn to_url(&self) -> Url {
let mut url = Url::parse("magnet:").unwrap();
let mut query = format!("xt=urn:btih:{}", self.infohash);
if let Some(name) = &self.name {
query.push_str("&dn=");
query.push_str(&name);
}
for tracker in &self.trackers {
query.push_str("&tr=");
query.push_str(tracker.as_str());
}
for peer in &self.peers {
query.push_str("&x.pe=");
query.push_str(&peer.to_string());
}
url.set_query(Some(&query));
url
}
}
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
#[test]
fn basic() {
let link = MagnetLink::with_infohash(Infohash::from_bencoded_info_dict("".as_bytes()));
assert_eq!(
link.to_url().as_str(),
"magnet:?xt=urn:btih:da39a3ee5e6b4b0d3255bfef95601890afd80709"
);
}
#[test]
fn with_name() {
let mut link = MagnetLink::with_infohash(Infohash::from_bencoded_info_dict("".as_bytes()));
link.set_name("foo");
assert_eq!(
link.to_url().as_str(),
"magnet:?xt=urn:btih:da39a3ee5e6b4b0d3255bfef95601890afd80709&dn=foo"
);
}
#[test]
fn with_peer() {
let mut link = MagnetLink::with_infohash(Infohash::from_bencoded_info_dict("".as_bytes()));
link.add_peer("foo.com:1337".parse().unwrap());
assert_eq!(
link.to_url().as_str(),
"magnet:?xt=urn:btih:da39a3ee5e6b4b0d3255bfef95601890afd80709&x.pe=foo.com:1337"
);
}
#[test]
fn with_tracker() {
let mut link = MagnetLink::with_infohash(Infohash::from_bencoded_info_dict("".as_bytes()));
link.add_tracker(Url::parse("http://foo.com/announce").unwrap());
assert_eq!(
link.to_url().as_str(),
"magnet:?xt=urn:btih:da39a3ee5e6b4b0d3255bfef95601890afd80709&tr=http://foo.com/announce"
);
}
#[test]
fn complex() {
let mut link = MagnetLink::with_infohash(Infohash::from_bencoded_info_dict("".as_bytes()));
link.set_name("foo");
link.add_tracker(Url::parse("http://foo.com/announce").unwrap());
link.add_tracker(Url::parse("http://bar.net/announce").unwrap());
link.add_peer("foo.com:1337".parse().unwrap());
link.add_peer("bar.net:666".parse().unwrap());
assert_eq!(
link.to_url().as_str(),
concat!(
"magnet:?xt=urn:btih:da39a3ee5e6b4b0d3255bfef95601890afd80709",
"&dn=foo",
"&tr=http://foo.com/announce",
"&tr=http://bar.net/announce",
"&x.pe=foo.com:1337",
"&x.pe=bar.net:666",
),
);
}
}

View File

@ -60,15 +60,19 @@ mod file_path;
mod file_status;
mod files;
mod hasher;
mod host_port;
mod host_port_parse_error;
mod info;
mod infohash;
mod into_u64;
mod into_usize;
mod lint;
mod linter;
mod magnet_link;
mod md5_digest;
mod metainfo;
mod metainfo_error;
mod mode;
mod node;
mod options;
mod output_stream;
mod output_target;

View File

@ -37,7 +37,7 @@ impl From<md5::Digest> for Md5Digest {
impl Display for Md5Digest {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
for byte in &self.bytes {
write!(f, "{:x}", byte)?;
write!(f, "{:02x}", byte)?;
}
Ok(())
@ -65,4 +65,13 @@ mod tests {
assert_eq!(bytes, string_bytes);
}
#[test]
fn display() {
let digest = Md5Digest {
bytes: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
};
assert_eq!(digest.to_string(), "000102030405060708090a0b0c0d0e0f");
}
}

View File

@ -2,7 +2,12 @@ use crate::common::*;
#[derive(Deserialize, Serialize, Debug, PartialEq, Clone)]
pub(crate) struct Metainfo {
pub(crate) announce: String,
#[serde(
skip_serializing_if = "Option::is_none",
default,
with = "unwrap_or_skip"
)]
pub(crate) announce: Option<String>,
#[serde(
rename = "announce-list",
skip_serializing_if = "Option::is_none",
@ -42,7 +47,7 @@ pub(crate) struct Metainfo {
default,
with = "unwrap_or_skip"
)]
pub(crate) nodes: Option<Vec<Node>>,
pub(crate) nodes: Option<Vec<HostPort>>,
}
impl Metainfo {
@ -54,7 +59,8 @@ impl Metainfo {
pub(crate) fn deserialize(path: impl AsRef<Path>, bytes: &[u8]) -> Result<Metainfo, Error> {
let path = path.as_ref();
let metainfo = bendy::serde::de::from_bytes(&bytes).context(error::MetainfoLoad { path })?;
let metainfo =
bendy::serde::de::from_bytes(&bytes).context(error::MetainfoDeserialize { path })?;
Ok(metainfo)
}
@ -82,6 +88,17 @@ impl Metainfo {
pub(crate) fn content_size(&self) -> Bytes {
self.info.content_size()
}
pub(crate) fn trackers<'a>(&'a self) -> impl Iterator<Item = Result<Url>> + 'a {
iter::once(&self.announce)
.flatten()
.chain(self.announce_list.iter().flatten().flatten())
.map(|text| text.parse().context(error::AnnounceUrlParse))
}
pub(crate) fn infohash(&self) -> Result<Infohash> {
self.info.infohash()
}
}
#[cfg(test)]
@ -91,7 +108,7 @@ mod tests {
#[test]
fn round_trip_single() {
let value = Metainfo {
announce: "announce".into(),
announce: Some("announce".into()),
announce_list: Some(vec![vec!["announce".into(), "b".into()], vec!["c".into()]]),
comment: Some("comment".into()),
created_by: Some("created by".into()),
@ -121,7 +138,7 @@ mod tests {
#[test]
fn round_trip_multiple() {
let value = Metainfo {
announce: "announce".into(),
announce: Some("announce".into()),
announce_list: Some(vec![vec!["announce".into(), "b".into()], vec!["c".into()]]),
nodes: Some(vec!["x:12".parse().unwrap(), "1.1.1.1:16".parse().unwrap()]),
comment: Some("comment".into()),
@ -166,7 +183,7 @@ mod tests {
#[test]
fn bencode_representation_single_some() {
let value = Metainfo {
announce: "ANNOUNCE".into(),
announce: Some("ANNOUNCE".into()),
announce_list: Some(vec![vec!["A".into(), "B".into()], vec!["C".into()]]),
nodes: Some(vec![
"domain:1".parse().unwrap(),
@ -227,7 +244,7 @@ mod tests {
#[test]
fn bencode_representation_single_none() {
let value = Metainfo {
announce: "ANNOUNCE".into(),
announce: Some("ANNOUNCE".into()),
announce_list: None,
nodes: None,
comment: None,
@ -266,7 +283,7 @@ mod tests {
#[test]
fn bencode_representation_multiple_some() {
let value = Metainfo {
announce: "ANNOUNCE".into(),
announce: Some("ANNOUNCE".into()),
announce_list: None,
nodes: None,
comment: None,
@ -314,7 +331,7 @@ mod tests {
#[test]
fn bencode_representation_multiple_none() {
let value = Metainfo {
announce: "ANNOUNCE".into(),
announce: Some("ANNOUNCE".into()),
announce_list: None,
nodes: None,
comment: None,
@ -361,7 +378,7 @@ mod tests {
#[test]
fn private_false() {
let value = Metainfo {
announce: "ANNOUNCE".into(),
announce: Some("ANNOUNCE".into()),
announce_list: None,
nodes: None,
comment: None,

49
src/metainfo_error.rs Normal file
View File

@ -0,0 +1,49 @@
use crate::common::*;
#[derive(Debug, Copy, Clone)]
pub(crate) enum MetainfoError {
Type,
InfoMissing,
InfoType,
}
impl MetainfoError {
fn message(self) -> &'static str {
match self {
Self::Type => "Top-level value not dictionary",
Self::InfoMissing => "Dictionary missing info key",
Self::InfoType => "Info value not dictionary",
}
}
}
impl Display for MetainfoError {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}", self.message())
}
}
impl std::error::Error for MetainfoError {}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn display() {
assert_eq!(
MetainfoError::Type.to_string(),
"Top-level value not dictionary"
);
assert_eq!(
MetainfoError::InfoMissing.to_string(),
"Dictionary missing info key",
);
assert_eq!(
MetainfoError::InfoType.to_string(),
"Info value not dictionary",
);
}
}

View File

@ -15,6 +15,10 @@ impl Sha1Digest {
pub(crate) fn bytes(self) -> [u8; Self::LENGTH] {
self.bytes
}
pub(crate) fn from_data(data: impl AsRef<[u8]>) -> Self {
Sha1::from(data).digest().into()
}
}
impl From<sha1::Digest> for Sha1Digest {
@ -24,3 +28,32 @@ impl From<sha1::Digest> for Sha1Digest {
}
}
}
impl Display for Sha1Digest {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
for byte in &self.bytes {
write!(f, "{:02x}", byte)?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn display() {
let digest = Sha1Digest {
bytes: [
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
],
};
assert_eq!(
digest.to_string(),
"000102030405060708090a0b0c0d0e0f10111213"
);
}
}

View File

@ -1,6 +1,7 @@
use crate::common::*;
mod create;
mod link;
mod piece_length;
mod show;
mod stats;
@ -14,6 +15,7 @@ mod verify;
)]
pub(crate) enum Torrent {
Create(create::Create),
Link(link::Link),
#[structopt(alias = "piece-size")]
PieceLength(piece_length::PieceLength),
Show(show::Show),
@ -25,6 +27,7 @@ impl Torrent {
pub(crate) fn run(self, env: &mut Env, options: &Options) -> Result<(), Error> {
match self {
Self::Create(create) => create.run(env),
Self::Link(link) => link.run(env),
Self::PieceLength(piece_length) => piece_length.run(env),
Self::Show(show) => show.run(env),
Self::Stats(stats) => stats.run(env, options),

View File

@ -14,16 +14,16 @@ pub(crate) struct Create {
long = "announce",
short = "a",
value_name = "URL",
required(true),
help = "Use `URL` as the primary tracker announce URL. To supply multiple announce URLs, also \
use `--announce-tier`."
)]
announce: Url,
announce: Option<Url>,
#[structopt(
long = "allow",
short = "A",
value_name = "LINT",
possible_values = Lint::VALUES,
set(ArgSettings::CaseInsensitive),
help = "Allow `LINT`. Lints check for conditions which, although permitted, are not usually \
desirable. For example, piece length can be any non-zero value, but probably \
shouldn't be below 16 KiB. The lint `small-piece-size` checks for this, and \
@ -68,7 +68,7 @@ pub(crate) struct Create {
`--node 203.0.113.0:2290`
`--node [2001:db8:4275:7920:6269:7463:6f69:6e21]:8832`"
)]
dht_nodes: Vec<Node>,
dht_nodes: Vec<HostPort>,
#[structopt(
long = "follow-symlinks",
short = "F",
@ -193,6 +193,9 @@ impl Create {
pub(crate) fn run(self, env: &mut Env) -> Result<(), Error> {
let input = env.resolve(&self.input);
let mut linter = Linter::new();
linter.allow(self.allowed_lints.iter().cloned());
let mut announce_list = Vec::new();
for tier in &self.announce_tiers {
let tier = tier.split(',').map(str::to_string).collect::<Vec<String>>();
@ -206,6 +209,10 @@ impl Create {
announce_list.push(tier);
}
if linter.is_denied(Lint::PrivateTrackerless) && self.private && self.announce.is_none() {
return Err(Error::PrivateTrackerless);
}
CreateStep::Searching.print(env)?;
let spinner = if env.err().is_styled_term() {
@ -230,9 +237,6 @@ impl Create {
.piece_length
.unwrap_or_else(|| PieceLengthPicker::from_content_size(files.total_size()));
let mut linter = Linter::new();
linter.allow(self.allowed_lints.iter().cloned());
if piece_length.count() == 0 {
return Err(Error::PieceLengthZero);
}
@ -335,7 +339,7 @@ impl Create {
let metainfo = Metainfo {
comment: self.comment,
encoding: Some(consts::ENCODING_UTF8.to_string()),
announce: self.announce.to_string(),
announce: self.announce.map(|url| url.to_string()),
announce_list: if announce_list.is_empty() {
None
} else {
@ -428,6 +432,23 @@ mod tests {
assert!(matches!(env.run(), Err(Error::Filesystem { .. })));
}
#[test]
fn announce_is_optional() {
let mut env = test_env! {
args: [
"torrent",
"create",
"--input",
"foo",
],
tree: {
foo: "",
},
};
assert_matches!(env.run(), Ok(()));
}
#[test]
fn torrent_file_is_bencode_dict() {
let mut env = test_env! {
@ -547,7 +568,7 @@ mod tests {
};
env.run().unwrap();
let metainfo = env.load_metainfo("foo.torrent");
assert_eq!(metainfo.announce, "http://bar/");
assert_eq!(metainfo.announce, Some("http://bar/".into()));
assert!(metainfo.announce_list.is_none());
}
@ -569,8 +590,8 @@ mod tests {
env.run().unwrap();
let metainfo = env.load_metainfo("foo.torrent");
assert_eq!(
metainfo.announce,
"udp://tracker.opentrackr.org:1337/announce"
metainfo.announce.as_deref(),
Some("udp://tracker.opentrackr.org:1337/announce")
);
assert!(metainfo.announce_list.is_none());
}
@ -592,7 +613,10 @@ mod tests {
};
env.run().unwrap();
let metainfo = env.load_metainfo("foo.torrent");
assert_eq!(metainfo.announce, "wss://tracker.btorrent.xyz/");
assert_eq!(
metainfo.announce.as_deref(),
Some("wss://tracker.btorrent.xyz/")
);
assert!(metainfo.announce_list.is_none());
}
@ -615,7 +639,7 @@ mod tests {
};
env.run().unwrap();
let metainfo = env.load_metainfo("foo.torrent");
assert_eq!(metainfo.announce, "http://bar/");
assert_eq!(metainfo.announce.as_deref(), Some("http://bar/"));
assert_eq!(
metainfo.announce_list,
Some(vec![vec!["http://bar".into(), "http://baz".into()]]),
@ -643,7 +667,7 @@ mod tests {
};
env.run().unwrap();
let metainfo = env.load_metainfo("foo.torrent");
assert_eq!(metainfo.announce, "http://bar/");
assert_eq!(metainfo.announce.as_deref(), Some("http://bar/"));
assert_eq!(
metainfo.announce_list,
Some(vec![
@ -2222,4 +2246,46 @@ Content Size 9 bytes
assert_eq!(env.err(), want);
}
#[test]
fn private_requires_announce() {
let mut env = test_env! {
args: [
"torrent",
"create",
"--input",
"foo",
"--private",
],
tree: {
foo: "",
},
};
assert_matches!(
env.run(),
Err(error @ Error::PrivateTrackerless)
if error.lint() == Some(Lint::PrivateTrackerless)
);
}
#[test]
fn private_trackerless_announce() {
let mut env = test_env! {
args: [
"torrent",
"create",
"--input",
"foo",
"--private",
"--allow",
"private-trackerLESS",
],
tree: {
foo: "",
},
};
assert_matches!(env.run(), Ok(()));
}
}

View File

@ -0,0 +1,147 @@
use crate::common::*;
#[derive(StructOpt)]
#[structopt(
help_message(consts::HELP_MESSAGE),
version_message(consts::VERSION_MESSAGE),
about("Generate a magnet link from a `.torrent` file.")
)]
pub(crate) struct Link {
#[structopt(
long = "input",
short = "i",
value_name = "METAINFO",
help = "Generate magnet link from metainfo at `PATH`.",
parse(from_os_str)
)]
input: PathBuf,
}
impl Link {
pub(crate) fn run(self, env: &mut Env) -> Result<(), Error> {
let input = env.resolve(&self.input);
let infohash = Infohash::load(&input)?;
let metainfo = Metainfo::load(&input)?;
let mut link = MagnetLink::with_infohash(infohash);
let mut trackers = HashSet::new();
for result in metainfo.trackers() {
let tracker = result?;
if !trackers.contains(&tracker) {
trackers.insert(tracker.clone());
link.add_tracker(tracker);
}
}
outln!(env, "{}", link.to_url())?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use claim::assert_ok;
use pretty_assertions::assert_eq;
#[test]
fn no_announce() {
let mut env = test_env! {
args: [
"torrent",
"link",
"--input",
"foo.torrent",
],
tree: {
"foo.torrent": "d4:infod6:lengthi0e4:name3:foo12:piece lengthi1e6:pieces0:ee",
}
};
assert_ok!(env.run());
const INFO: &str = "d6:lengthi0e4:name3:foo12:piece lengthi1e6:pieces0:e";
let infohash = Sha1Digest::from_data(INFO.as_bytes());
assert_eq!(env.out(), format!("magnet:?xt=urn:btih:{}\n", infohash),);
}
#[test]
fn with_announce() {
let mut env = test_env! {
args: [
"torrent",
"link",
"--input",
"foo.torrent",
],
tree: {
"foo.torrent": "d\
8:announce24:https://foo.com/announce\
4:infod6:lengthi0e4:name3:foo12:piece lengthi1e6:pieces0:e\
e",
}
};
assert_ok!(env.run());
const INFO: &str = "d6:lengthi0e4:name3:foo12:piece lengthi1e6:pieces0:e";
let infohash = Sha1Digest::from_data(INFO.as_bytes());
assert_eq!(
env.out(),
format!(
"magnet:?xt=urn:btih:{}&tr=https://foo.com/announce\n",
infohash
),
);
}
#[test]
fn infohash_correct_with_nonstandard_info_dict() {
let mut env = test_env! {
args: [
"torrent",
"link",
"--input",
"foo.torrent",
],
tree: {
"foo.torrent": "d4:infod1:ai0e6:lengthi0e4:name3:foo12:piece lengthi1e6:pieces0:ee",
}
};
assert_ok!(env.run());
const INFO: &str = "d1:ai0e6:lengthi0e4:name3:foo12:piece lengthi1e6:pieces0:e";
let infohash = Sha1Digest::from_data(INFO.as_bytes());
assert_eq!(env.out(), format!("magnet:?xt=urn:btih:{}\n", infohash),);
}
#[test]
fn bad_metainfo_error() {
let mut env = test_env! {
args: [
"torrent",
"link",
"--input",
"foo.torrent",
],
tree: {
"foo.torrent": "i0e",
}
};
assert_matches!(
env.run(), Err(Error::MetainfoValidate { path, source: MetainfoError::Type })
if path == env.resolve("foo.torrent")
);
}
}

View File

@ -35,7 +35,7 @@ mod tests {
#[test]
fn output() {
let metainfo = Metainfo {
announce: "announce".into(),
announce: Some("announce".into()),
announce_list: Some(vec![vec!["announce".into(), "b".into()], vec!["c".into()]]),
nodes: Some(vec![
"x:12".parse().unwrap(),
@ -81,7 +81,8 @@ Creation Date 1970-01-01 00:00:01 UTC
Torrent Size 339 bytes
Content Size 20 bytes
Private yes
Trackers Tier 1: announce
Tracker announce
Announce List Tier 1: announce
b
Tier 2: c
DHT Nodes x:12
@ -118,7 +119,8 @@ info hash\te12253978dc6d50db11d05747abcea1ad03b51c5
torrent size\t339
content size\t20
private\tyes
trackers\tannounce\tb\tc
tracker\tannounce
announce list\tannounce\tb\tc
dht nodes\tx:12\t1.1.1.1:16\t[2001:db8:85a3::8a2e:370]:7334
piece size\t16384
piece count\t2
@ -133,7 +135,7 @@ files\tfoo
#[test]
fn tier_list_with_main() {
let metainfo = Metainfo {
announce: "a".into(),
announce: Some("a".into()),
announce_list: Some(vec![vec!["x".into()], vec!["y".into()], vec!["z".into()]]),
comment: Some("comment".into()),
created_by: Some("created by".into()),
@ -179,10 +181,10 @@ Creation Date 1970-01-01 00:00:01 UTC
Torrent Size 327 bytes
Content Size 20 bytes
Private yes
Trackers a
x
y
z
Tracker a
Announce List Tier 1: x
Tier 2: y
Tier 3: z
DHT Nodes x:12
1.1.1.1:16
[2001:db8:85a3::8a2e:370]:7334
@ -217,7 +219,8 @@ info hash\te12253978dc6d50db11d05747abcea1ad03b51c5
torrent size\t327
content size\t20
private\tyes
trackers\ta\tx\ty\tz
tracker\ta
announce list\tx\ty\tz
dht nodes\tx:12\t1.1.1.1:16\t[2001:db8:85a3::8a2e:370]:7334
piece size\t16384
piece count\t2
@ -232,7 +235,7 @@ files\tfoo
#[test]
fn tier_list_without_main() {
let metainfo = Metainfo {
announce: "a".into(),
announce: Some("a".into()),
announce_list: Some(vec![vec!["b".into()], vec!["c".into()], vec!["a".into()]]),
comment: Some("comment".into()),
nodes: Some(vec![
@ -278,9 +281,10 @@ Creation Date 1970-01-01 00:00:01 UTC
Torrent Size 307 bytes
Content Size 20 bytes
Private yes
Trackers b
c
a
Tracker a
Announce List Tier 1: b
Tier 2: c
Tier 3: a
DHT Nodes x:12
1.1.1.1:16
[2001:db8:85a3::8a2e:370]:7334
@ -315,7 +319,102 @@ info hash\tb9cd9cae5748518c99d00d8ae86c0162510be4d9
torrent size\t307
content size\t20
private\tyes
trackers\tb\tc\ta
tracker\ta
announce list\tb\tc\ta
dht nodes\tx:12\t1.1.1.1:16\t[2001:db8:85a3::8a2e:370]:7334
piece size\t16384
piece count\t1
file count\t1
files\tfoo
";
assert_eq!(have, want);
}
}
#[test]
fn trackerless() {
let metainfo = Metainfo {
announce: None,
announce_list: None,
comment: Some("comment".into()),
nodes: Some(vec![
"x:12".parse().unwrap(),
"1.1.1.1:16".parse().unwrap(),
"[2001:0db8:85a3::8a2e:0370]:7334".parse().unwrap(),
]),
created_by: Some("created by".into()),
creation_date: Some(1),
encoding: Some("UTF-8".into()),
info: Info {
private: Some(true),
piece_length: Bytes(16 * 1024),
source: Some("source".into()),
name: "foo".into(),
pieces: PieceList::from_pieces(&["abc"]),
mode: Mode::Single {
length: Bytes(20),
md5sum: None,
},
},
};
{
let mut env = TestEnvBuilder::new()
.arg_slice(&["imdl", "torrent", "show", "--input", "foo.torrent"])
.out_is_term()
.build();
let path = env.resolve("foo.torrent");
metainfo.dump(path).unwrap();
env.run().unwrap();
let have = env.out();
let want = " Name foo
Comment comment
Creation Date 1970-01-01 00:00:01 UTC
Created By created by
Source source
Info Hash b9cd9cae5748518c99d00d8ae86c0162510be4d9
Torrent Size 261 bytes
Content Size 20 bytes
Private yes
DHT Nodes x:12
1.1.1.1:16
[2001:db8:85a3::8a2e:370]:7334
Piece Size 16 KiB
Piece Count 1
File Count 1
Files foo
";
assert_eq!(have, want);
}
{
let mut env = TestEnvBuilder::new()
.arg_slice(&["imdl", "torrent", "show", "--input", "foo.torrent"])
.build();
let path = env.resolve("foo.torrent");
metainfo.dump(path).unwrap();
env.run().unwrap();
let have = env.out();
let want = "\
name\tfoo
comment\tcomment
creation date\t1970-01-01 00:00:01 UTC
created by\tcreated by
source\tsource
info hash\tb9cd9cae5748518c99d00d8ae86c0162510be4d9
torrent size\t261
content size\t20
private\tyes
dht nodes\tx:12\t1.1.1.1:16\t[2001:db8:85a3::8a2e:370]:7334
piece size\t16384
piece count\t1

View File

@ -255,7 +255,7 @@ impl Extractor {
} else {
buffer.push('<');
for byte in string {
buffer.push_str(&format!("{:X}", byte));
buffer.push_str(&format!("{:02X}", byte));
}
buffer.push('>');
}

View File

@ -322,8 +322,8 @@ mod tests {
"[2/2] \u{1F9EE} Verifying pieces from `{}`…",
create_env.resolve("foo").display()
),
"a: MD5 checksum mismatch: d16fb36f911f878998c136191af705e (expected \
90150983cd24fb0d6963f7d28e17f72)",
"a: MD5 checksum mismatch: d16fb36f0911f878998c136191af705e (expected \
900150983cd24fb0d6963f7d28e17f72)",
"d: 1 byte too long",
"h: 1 byte too short",
"l: File missing",
@ -431,8 +431,8 @@ mod tests {
&format!(
"{} MD5 checksum mismatch: {} (expected {})",
style.message().paint("a:"),
style.error().paint("d16fb36f911f878998c136191af705e"),
style.good().paint("90150983cd24fb0d6963f7d28e17f72"),
style.error().paint("d16fb36f0911f878998c136191af705e"),
style.good().paint("900150983cd24fb0d6963f7d28e17f72"),
),
&error("d", "1 byte too long"),
&error("h", "1 byte too short"),

View File

@ -1,49 +1,36 @@
use crate::common::*;
pub(crate) struct TorrentSummary {
infohash: Infohash,
metainfo: Metainfo,
infohash: sha1::Digest,
size: Bytes,
}
impl TorrentSummary {
fn new(bytes: &[u8], metainfo: Metainfo) -> Result<Self, Error> {
let value = Value::from_bencode(&bytes).unwrap();
let infohash = if let Value::Dict(items) = value {
let info = items
.iter()
.find(|pair: &(&Cow<[u8]>, &Value)| pair.0.as_ref() == b"info")
.unwrap()
.1
.to_bencode()
.unwrap();
Sha1::from(info).digest()
} else {
unreachable!()
};
Ok(Self {
size: Bytes::from(bytes.len().into_u64()),
fn new(metainfo: Metainfo, infohash: Infohash, size: Bytes) -> Self {
Self {
infohash,
metainfo,
})
size,
}
}
pub(crate) fn from_metainfo(metainfo: Metainfo) -> Result<Self, Error> {
pub(crate) fn from_metainfo(metainfo: Metainfo) -> Result<Self> {
let bytes = metainfo.serialize()?;
Self::new(&bytes, metainfo)
let size = Bytes(bytes.len().into_u64());
let infohash = metainfo.infohash()?;
Ok(Self::new(metainfo, infohash, size))
}
pub(crate) fn load(path: &Path) -> Result<Self, Error> {
pub(crate) fn load(path: &Path) -> Result<Self> {
let bytes = fs::read(path).context(error::Filesystem { path })?;
let metainfo = Metainfo::deserialize(path, &bytes)?;
Self::new(&bytes, metainfo)
Ok(Self::from_metainfo(metainfo)?)
}
pub(crate) fn write(&self, env: &mut Env) -> Result<(), Error> {
pub(crate) fn write(&self, env: &mut Env) -> Result<()> {
let table = self.table();
if env.out().is_term() {
@ -106,40 +93,18 @@ impl TorrentSummary {
},
);
match &self.metainfo.announce_list {
Some(tiers) => {
if tiers.iter().all(|tier| tier.len() == 1) {
let mut list = Vec::new();
if !tiers
.iter()
.any(|tier| tier.contains(&self.metainfo.announce))
{
list.push(self.metainfo.announce.clone());
}
if let Some(announce) = &self.metainfo.announce {
table.row("Tracker", announce);
}
for tier in tiers {
list.push(tier[0].clone());
}
if let Some(tiers) = &self.metainfo.announce_list {
let mut value = Vec::new();
table.list("Trackers", list);
} else {
let mut value = Vec::new();
if !tiers
.iter()
.any(|tier| tier.contains(&self.metainfo.announce))
{
value.push(("Main".to_owned(), vec![self.metainfo.announce.clone()]));
}
for (i, tier) in tiers.iter().enumerate() {
value.push((format!("Tier {}", i + 1), tier.clone()));
}
table.tiers("Trackers", value);
}
for (i, tier) in tiers.iter().enumerate() {
value.push((format!("Tier {}", i + 1), tier.clone()));
}
None => table.row("Tracker", &self.metainfo.announce),
table.tiers("Announce List", value);
}
if let Some(nodes) = &self.metainfo.nodes {