Use list of SHA1 digests for piece list

type: reform
This commit is contained in:
Casey Rodarmor 2020-02-15 18:08:36 -08:00
parent 9787344d9e
commit 1227628306
No known key found for this signature in database
GPG Key ID: 556186B153EC6FE0
12 changed files with 243 additions and 96 deletions

View File

@ -54,9 +54,9 @@ pub(crate) use crate::{
bytes::Bytes, env::Env, error::Error, file_info::FileInfo, file_path::FilePath, bytes::Bytes, env::Env, error::Error, file_info::FileInfo, file_path::FilePath,
file_status::FileStatus, files::Files, hasher::Hasher, info::Info, lint::Lint, linter::Linter, file_status::FileStatus, files::Files, hasher::Hasher, info::Info, lint::Lint, linter::Linter,
md5_digest::Md5Digest, metainfo::Metainfo, mode::Mode, node::Node, opt::Opt, md5_digest::Md5Digest, metainfo::Metainfo, mode::Mode, node::Node, opt::Opt,
piece_length_picker::PieceLengthPicker, platform::Platform, status::Status, style::Style, piece_length_picker::PieceLengthPicker, piece_list::PieceList, platform::Platform,
table::Table, target::Target, torrent_summary::TorrentSummary, use_color::UseColor, sha1_digest::Sha1Digest, status::Status, style::Style, table::Table, target::Target,
verifier::Verifier, walker::Walker, torrent_summary::TorrentSummary, use_color::UseColor, verifier::Verifier, walker::Walker,
}; };
// type aliases // type aliases

View File

@ -6,7 +6,7 @@ pub(crate) struct Hasher {
md5sum: bool, md5sum: bool,
piece_bytes_hashed: usize, piece_bytes_hashed: usize,
piece_length: usize, piece_length: usize,
pieces: Vec<u8>, pieces: PieceList,
sha1: Sha1, sha1: Sha1,
} }
@ -15,7 +15,7 @@ impl Hasher {
files: &Files, files: &Files,
md5sum: bool, md5sum: bool,
piece_length: usize, piece_length: usize,
) -> Result<(Mode, Vec<u8>), Error> { ) -> Result<(Mode, PieceList), Error> {
Self::new(md5sum, piece_length).hash_files(files) Self::new(md5sum, piece_length).hash_files(files)
} }
@ -24,14 +24,14 @@ impl Hasher {
buffer: vec![0; piece_length], buffer: vec![0; piece_length],
length: 0, length: 0,
piece_bytes_hashed: 0, piece_bytes_hashed: 0,
pieces: Vec::new(), pieces: PieceList::new(),
sha1: Sha1::new(), sha1: Sha1::new(),
piece_length, piece_length,
md5sum, md5sum,
} }
} }
fn hash_files(mut self, files: &Files) -> Result<(Mode, Vec<u8>), Error> { fn hash_files(mut self, files: &Files) -> Result<(Mode, PieceList), Error> {
let mode = if let Some(contents) = files.contents() { let mode = if let Some(contents) = files.contents() {
let files = self.hash_contents(&files.root(), contents)?; let files = self.hash_contents(&files.root(), contents)?;
@ -46,7 +46,7 @@ impl Hasher {
}; };
if self.piece_bytes_hashed > 0 { if self.piece_bytes_hashed > 0 {
self.pieces.extend(&self.sha1.digest().bytes()); self.pieces.push(self.sha1.digest().into());
self.sha1.reset(); self.sha1.reset();
self.piece_bytes_hashed = 0; self.piece_bytes_hashed = 0;
} }
@ -111,7 +111,7 @@ impl Hasher {
self.piece_bytes_hashed += 1; self.piece_bytes_hashed += 1;
if self.piece_bytes_hashed == self.piece_length { if self.piece_bytes_hashed == self.piece_length {
self.pieces.extend(&self.sha1.digest().bytes()); self.pieces.push(self.sha1.digest().into());
self.sha1.reset(); self.sha1.reset();
self.piece_bytes_hashed = 0; self.piece_bytes_hashed = 0;
} }

View File

@ -17,8 +17,7 @@ pub(crate) struct Info {
with = "unwrap_or_skip" with = "unwrap_or_skip"
)] )]
pub(crate) source: Option<String>, pub(crate) source: Option<String>,
#[serde(with = "serde_bytes")] pub(crate) pieces: PieceList,
pub(crate) pieces: Vec<u8>,
#[serde(flatten)] #[serde(flatten)]
pub(crate) mode: Mode, pub(crate) mode: Mode,
} }

View File

@ -76,9 +76,11 @@ mod node;
mod opt; mod opt;
mod path_ext; mod path_ext;
mod piece_length_picker; mod piece_length_picker;
mod piece_list;
mod platform; mod platform;
mod platform_interface; mod platform_interface;
mod reckoner; mod reckoner;
mod sha1_digest;
mod status; mod status;
mod style; mod style;
mod table; mod table;

View File

@ -9,7 +9,7 @@ pub(crate) struct Md5Digest {
impl Md5Digest { impl Md5Digest {
#[cfg(test)] #[cfg(test)]
pub(crate) fn from_hex(hex: &str) -> Md5Digest { pub(crate) fn from_hex(hex: &str) -> Self {
assert_eq!(hex.len(), 32); assert_eq!(hex.len(), 32);
let mut bytes: [u8; 16] = [0; 16]; let mut bytes: [u8; 16] = [0; 16];
@ -19,13 +19,13 @@ impl Md5Digest {
bytes[n] = u8::from_str_radix(&hex[i..i + 2], 16).unwrap(); bytes[n] = u8::from_str_radix(&hex[i..i + 2], 16).unwrap();
} }
Md5Digest { bytes } Self { bytes }
} }
} }
impl From<md5::Digest> for Md5Digest { impl From<md5::Digest> for Md5Digest {
fn from(digest: md5::Digest) -> Self { fn from(digest: md5::Digest) -> Self {
Md5Digest { bytes: digest.0 } Self { bytes: digest.0 }
} }
} }

View File

@ -115,9 +115,7 @@ mod tests {
piece_length: Bytes(16 * 1024), piece_length: Bytes(16 * 1024),
source: Some("source".into()), source: Some("source".into()),
name: "foo".into(), name: "foo".into(),
pieces: vec![ pieces: PieceList::from_pieces(&["abc"]),
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
],
mode: Mode::Single { mode: Mode::Single {
length: Bytes(20), length: Bytes(20),
md5sum: None, md5sum: None,
@ -147,9 +145,7 @@ mod tests {
piece_length: Bytes(16 * 1024), piece_length: Bytes(16 * 1024),
source: Some("source".into()), source: Some("source".into()),
name: "foo".into(), name: "foo".into(),
pieces: vec![ pieces: PieceList::from_pieces(&["abc"]),
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
],
mode: Mode::Multiple { mode: Mode::Multiple {
files: vec![FileInfo { files: vec![FileInfo {
length: Bytes(10), length: Bytes(10),

View File

@ -307,6 +307,10 @@ impl Create {
#[cfg(test)] #[cfg(test)]
{ {
let deserialized = bendy::serde::de::from_bytes::<Metainfo>(&bytes).unwrap();
assert_eq!(deserialized, metainfo);
let status = metainfo.verify(&input)?; let status = metainfo.verify(&input)?;
if !status.good() { if !status.good() {
@ -683,16 +687,19 @@ mod tests {
#[test] #[test]
fn single_small() { fn single_small() {
let mut env = environment(&["--input", "foo", "--announce", "http://bar"]); let mut env = env! {
let contents = "bar"; args: ["--input", "foo", "--announce", "http://bar"],
fs::write(env.resolve("foo"), contents).unwrap(); tree: {
foo: "bar",
},
};
env.run().unwrap(); env.run().unwrap();
let metainfo = env.load_metainfo("foo.torrent"); let metainfo = env.load_metainfo("foo.torrent");
assert_eq!(metainfo.info.pieces, Sha1::from(contents).digest().bytes()); assert_eq!(metainfo.info.pieces, PieceList::from_pieces(&["bar"]));
assert_eq!( assert_eq!(
metainfo.info.mode, metainfo.info.mode,
Mode::Single { Mode::Single {
length: Bytes(contents.len() as u64), length: Bytes(3),
md5sum: None, md5sum: None,
} }
) )
@ -714,16 +721,10 @@ mod tests {
fs::write(env.resolve("foo"), contents).unwrap(); fs::write(env.resolve("foo"), contents).unwrap();
env.run().unwrap(); env.run().unwrap();
let metainfo = env.load_metainfo("foo.torrent"); let metainfo = env.load_metainfo("foo.torrent");
let pieces = Sha1::from("b") assert_eq!(
.digest() metainfo.info.pieces,
.bytes() PieceList::from_pieces(&["b", "a", "r"])
.iter() );
.chain(Sha1::from("a").digest().bytes().iter())
.chain(Sha1::from("r").digest().bytes().iter())
.cloned()
.collect::<Vec<u8>>();
assert_eq!(metainfo.info.pieces, pieces);
assert_eq!( assert_eq!(
metainfo.info.mode, metainfo.info.mode,
Mode::Single { Mode::Single {
@ -740,7 +741,7 @@ mod tests {
fs::write(env.resolve("foo"), contents).unwrap(); fs::write(env.resolve("foo"), contents).unwrap();
env.run().unwrap(); env.run().unwrap();
let metainfo = env.load_metainfo("foo.torrent"); let metainfo = env.load_metainfo("foo.torrent");
assert_eq!(metainfo.info.pieces.len(), 0); assert_eq!(metainfo.info.pieces.count(), 0);
assert_eq!( assert_eq!(
metainfo.info.mode, metainfo.info.mode,
Mode::Single { Mode::Single {
@ -757,21 +758,23 @@ mod tests {
fs::create_dir(&dir).unwrap(); fs::create_dir(&dir).unwrap();
env.run().unwrap(); env.run().unwrap();
let metainfo = env.load_metainfo("foo.torrent"); let metainfo = env.load_metainfo("foo.torrent");
assert_eq!(metainfo.info.pieces.len(), 0); assert_eq!(metainfo.info.pieces.count(), 0);
assert_eq!(metainfo.info.mode, Mode::Multiple { files: Vec::new() }) assert_eq!(metainfo.info.mode, Mode::Multiple { files: Vec::new() })
} }
#[test] #[test]
fn multiple_one_file_md5() { fn multiple_one_file_md5() {
let mut env = environment(&["--input", "foo", "--announce", "http://bar", "--md5sum"]); let mut env = env! {
let dir = env.resolve("foo"); args: ["--input", "foo", "--announce", "http://bar", "--md5sum"],
fs::create_dir(&dir).unwrap(); tree: {
let file = dir.join("bar"); foo: {
let contents = "bar"; bar: "bar",
fs::write(file, contents).unwrap(); },
},
};
env.run().unwrap(); env.run().unwrap();
let metainfo = env.load_metainfo("foo.torrent"); let metainfo = env.load_metainfo("foo.torrent");
assert_eq!(metainfo.info.pieces, Sha1::from(contents).digest().bytes()); assert_eq!(metainfo.info.pieces, PieceList::from_pieces(&["bar"]));
match metainfo.info.mode { match metainfo.info.mode {
Mode::Multiple { files } => { Mode::Multiple { files } => {
assert_eq!( assert_eq!(
@ -789,15 +792,17 @@ mod tests {
#[test] #[test]
fn multiple_one_file_md5_off() { fn multiple_one_file_md5_off() {
let mut env = environment(&["--input", "foo", "--announce", "http://bar"]); let mut env = env! {
let dir = env.resolve("foo"); args: ["--input", "foo", "--announce", "http://bar"],
fs::create_dir(&dir).unwrap(); tree: {
let file = dir.join("bar"); foo: {
let contents = "bar"; bar: "bar",
fs::write(file, contents).unwrap(); },
},
};
env.run().unwrap(); env.run().unwrap();
let metainfo = env.load_metainfo("foo.torrent"); let metainfo = env.load_metainfo("foo.torrent");
assert_eq!(metainfo.info.pieces, Sha1::from(contents).digest().bytes()); assert_eq!(metainfo.info.pieces, PieceList::from_pieces(&["bar"]));
match metainfo.info.mode { match metainfo.info.mode {
Mode::Multiple { files } => { Mode::Multiple { files } => {
assert_eq!( assert_eq!(
@ -823,10 +828,7 @@ mod tests {
fs::write(dir.join("h"), "hij").unwrap(); fs::write(dir.join("h"), "hij").unwrap();
env.run().unwrap(); env.run().unwrap();
let metainfo = env.load_metainfo("foo.torrent"); let metainfo = env.load_metainfo("foo.torrent");
assert_eq!( assert_eq!(metainfo.info.pieces, PieceList::from_pieces(&["abchijxyz"]));
metainfo.info.pieces,
Sha1::from("abchijxyz").digest().bytes()
);
match metainfo.info.mode { match metainfo.info.mode {
Mode::Multiple { files } => { Mode::Multiple { files } => {
assert_eq!( assert_eq!(
@ -1084,7 +1086,7 @@ Content Size 9 bytes
metainfo.info.mode, metainfo.info.mode,
Mode::Multiple { files } if files.is_empty() Mode::Multiple { files } if files.is_empty()
); );
assert_eq!(metainfo.info.pieces, &[]); assert_eq!(metainfo.info.pieces, PieceList::new());
} }
#[test] #[test]
@ -1106,7 +1108,7 @@ Content Size 9 bytes
metainfo.info.mode, metainfo.info.mode,
Mode::Multiple { files } if files.len() == 2 Mode::Multiple { files } if files.len() == 2
); );
assert_eq!(metainfo.info.pieces, Sha1::from("abcabc").digest().bytes()); assert_eq!(metainfo.info.pieces, PieceList::from_pieces(&["abcabc"]));
} }
#[test] #[test]
@ -1141,7 +1143,7 @@ Content Size 9 bytes
metainfo.info.mode, metainfo.info.mode,
Mode::Multiple { files } if files.len() == 0 Mode::Multiple { files } if files.len() == 0
); );
assert_eq!(metainfo.info.pieces, &[]); assert_eq!(metainfo.info.pieces, PieceList::new());
} }
#[test] #[test]
@ -1162,7 +1164,7 @@ Content Size 9 bytes
metainfo.info.mode, metainfo.info.mode,
Mode::Multiple { files } if files.len() == 1 Mode::Multiple { files } if files.len() == 1
); );
assert_eq!(metainfo.info.pieces, Sha1::from("abc").digest().bytes()); assert_eq!(metainfo.info.pieces, PieceList::from_pieces(&["abc"]));
} }
fn populate_symlinks(env: &Env) { fn populate_symlinks(env: &Env) {
@ -1205,7 +1207,7 @@ Content Size 9 bytes
metainfo.info.mode, metainfo.info.mode,
Mode::Multiple { files } if files.is_empty() Mode::Multiple { files } if files.is_empty()
); );
assert_eq!(metainfo.info.pieces, &[]); assert_eq!(metainfo.info.pieces, PieceList::new());
} }
#[test] #[test]
@ -1222,7 +1224,9 @@ Content Size 9 bytes
populate_symlinks(&env); populate_symlinks(&env);
env.run().unwrap(); env.run().unwrap();
let metainfo = env.load_metainfo("foo.torrent"); let metainfo = env.load_metainfo("foo.torrent");
assert_eq!(metainfo.info.pieces, Sha1::from("barbaz").digest().bytes()); let mut pieces = PieceList::new();
pieces.push(Sha1::from("barbaz").digest().into());
assert_eq!(metainfo.info.pieces, pieces);
match metainfo.info.mode { match metainfo.info.mode {
Mode::Multiple { files } => { Mode::Multiple { files } => {
assert_eq!( assert_eq!(
@ -1273,7 +1277,7 @@ Content Size 9 bytes
metainfo.info.mode, metainfo.info.mode,
Mode::Multiple { files } if files.is_empty() Mode::Multiple { files } if files.is_empty()
); );
assert_eq!(metainfo.info.pieces, &[]); assert_eq!(metainfo.info.pieces, PieceList::new());
} }
#[test] #[test]
@ -1306,7 +1310,7 @@ Content Size 9 bytes
metainfo.info.mode, metainfo.info.mode,
Mode::Multiple { files } if files.is_empty() Mode::Multiple { files } if files.is_empty()
); );
assert_eq!(metainfo.info.pieces, &[]); assert_eq!(metainfo.info.pieces, PieceList::new());
} }
#[test] #[test]
@ -1322,7 +1326,9 @@ Content Size 9 bytes
metainfo.info.mode, metainfo.info.mode,
Mode::Multiple { files } if files.len() == 2 Mode::Multiple { files } if files.len() == 2
); );
assert_eq!(metainfo.info.pieces, Sha1::from("bc").digest().bytes()); let mut pieces = PieceList::new();
pieces.push(Sha1::from("bc").digest().into());
assert_eq!(metainfo.info.pieces, pieces);
} }
#[test] #[test]
@ -1338,7 +1344,9 @@ Content Size 9 bytes
metainfo.info.mode, metainfo.info.mode,
Mode::Multiple { files } if files.len() == 3 Mode::Multiple { files } if files.len() == 3
); );
assert_eq!(metainfo.info.pieces, Sha1::from("abc").digest().bytes()); let mut pieces = PieceList::new();
pieces.push(Sha1::from("abc").digest().into());
assert_eq!(metainfo.info.pieces, pieces);
} }
#[test] #[test]
@ -1361,7 +1369,9 @@ Content Size 9 bytes
metainfo.info.mode, metainfo.info.mode,
Mode::Multiple { files } if files.len() == 2 Mode::Multiple { files } if files.len() == 2
); );
assert_eq!(metainfo.info.pieces, Sha1::from("bc").digest().bytes()); let mut pieces = PieceList::new();
pieces.push(Sha1::from("bc").digest().into());
assert_eq!(metainfo.info.pieces, pieces);
} }
#[test] #[test]
@ -1377,7 +1387,7 @@ Content Size 9 bytes
metainfo.info.mode, metainfo.info.mode,
Mode::Multiple { files } if files.is_empty() Mode::Multiple { files } if files.is_empty()
); );
assert_eq!(metainfo.info.pieces, &[]); assert_eq!(metainfo.info.pieces, PieceList::new());
} }
#[test] #[test]
@ -1404,7 +1414,9 @@ Content Size 9 bytes
metainfo.info.mode, metainfo.info.mode,
Mode::Multiple { files } if files.len() == 1 Mode::Multiple { files } if files.len() == 1
); );
assert_eq!(metainfo.info.pieces, Sha1::from("a").digest().bytes()); let mut pieces = PieceList::new();
pieces.push(Sha1::from("a").digest().into());
assert_eq!(metainfo.info.pieces, pieces);
} }
#[test] #[test]

View File

@ -50,9 +50,7 @@ mod tests {
piece_length: Bytes(16 * 1024), piece_length: Bytes(16 * 1024),
source: Some("source".into()), source: Some("source".into()),
name: "foo".into(), name: "foo".into(),
pieces: vec![ pieces: PieceList::from_pieces(&["xyz", "abc"]),
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
],
mode: Mode::Single { mode: Mode::Single {
length: Bytes(20), length: Bytes(20),
md5sum: None, md5sum: None,
@ -78,8 +76,8 @@ mod tests {
Created 1970-01-01 00:00:01 UTC Created 1970-01-01 00:00:01 UTC
Created By created by Created By created by
Source source Source source
Info Hash b7595205a46491b3e8686e10b28efe7144d066cc Info Hash e12253978dc6d50db11d05747abcea1ad03b51c5
Torrent Size 319 bytes Torrent Size 339 bytes
Content Size 20 bytes Content Size 20 bytes
Private yes Private yes
Trackers Tier 1: announce Trackers Tier 1: announce
@ -89,7 +87,7 @@ Content Size 20 bytes
1.1.1.1:16 1.1.1.1:16
[2001:db8:85a3::8a2e:370]:7334 [2001:db8:85a3::8a2e:370]:7334
Piece Size 16 KiB Piece Size 16 KiB
Piece Count 1 Piece Count 2
File Count 1 File Count 1
Files foo Files foo
"; ";
@ -115,14 +113,14 @@ Comment\tcomment
Created\t1970-01-01 00:00:01 UTC Created\t1970-01-01 00:00:01 UTC
Created By\tcreated by Created By\tcreated by
Source\tsource Source\tsource
Info Hash\tb7595205a46491b3e8686e10b28efe7144d066cc Info Hash\te12253978dc6d50db11d05747abcea1ad03b51c5
Torrent Size\t319 Torrent Size\t339
Content Size\t20 Content Size\t20
Private\tyes Private\tyes
Trackers\tannounce\tb\tc Trackers\tannounce\tb\tc
DHT Nodes\tx:12\t1.1.1.1:16\t[2001:db8:85a3::8a2e:370]:7334 DHT Nodes\tx:12\t1.1.1.1:16\t[2001:db8:85a3::8a2e:370]:7334
Piece Size\t16384 Piece Size\t16384
Piece Count\t1 Piece Count\t2
File Count\t1 File Count\t1
Files\tfoo Files\tfoo
"; ";
@ -150,9 +148,7 @@ Files\tfoo
piece_length: Bytes(16 * 1024), piece_length: Bytes(16 * 1024),
source: Some("source".into()), source: Some("source".into()),
name: "foo".into(), name: "foo".into(),
pieces: vec![ pieces: PieceList::from_pieces(&["xyz", "abc"]),
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
],
mode: Mode::Single { mode: Mode::Single {
length: Bytes(20), length: Bytes(20),
md5sum: None, md5sum: None,
@ -178,8 +174,8 @@ Files\tfoo
Created 1970-01-01 00:00:01 UTC Created 1970-01-01 00:00:01 UTC
Created By created by Created By created by
Source source Source source
Info Hash b7595205a46491b3e8686e10b28efe7144d066cc Info Hash e12253978dc6d50db11d05747abcea1ad03b51c5
Torrent Size 307 bytes Torrent Size 327 bytes
Content Size 20 bytes Content Size 20 bytes
Private yes Private yes
Trackers a Trackers a
@ -190,7 +186,7 @@ Content Size 20 bytes
1.1.1.1:16 1.1.1.1:16
[2001:db8:85a3::8a2e:370]:7334 [2001:db8:85a3::8a2e:370]:7334
Piece Size 16 KiB Piece Size 16 KiB
Piece Count 1 Piece Count 2
File Count 1 File Count 1
Files foo Files foo
"; ";
@ -216,14 +212,14 @@ Comment\tcomment
Created\t1970-01-01 00:00:01 UTC Created\t1970-01-01 00:00:01 UTC
Created By\tcreated by Created By\tcreated by
Source\tsource Source\tsource
Info Hash\tb7595205a46491b3e8686e10b28efe7144d066cc Info Hash\te12253978dc6d50db11d05747abcea1ad03b51c5
Torrent Size\t307 Torrent Size\t327
Content Size\t20 Content Size\t20
Private\tyes Private\tyes
Trackers\ta\tx\ty\tz Trackers\ta\tx\ty\tz
DHT Nodes\tx:12\t1.1.1.1:16\t[2001:db8:85a3::8a2e:370]:7334 DHT Nodes\tx:12\t1.1.1.1:16\t[2001:db8:85a3::8a2e:370]:7334
Piece Size\t16384 Piece Size\t16384
Piece Count\t1 Piece Count\t2
File Count\t1 File Count\t1
Files\tfoo Files\tfoo
"; ";
@ -251,9 +247,7 @@ Files\tfoo
piece_length: Bytes(16 * 1024), piece_length: Bytes(16 * 1024),
source: Some("source".into()), source: Some("source".into()),
name: "foo".into(), name: "foo".into(),
pieces: vec![ pieces: PieceList::from_pieces(&["abc"]),
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
],
mode: Mode::Single { mode: Mode::Single {
length: Bytes(20), length: Bytes(20),
md5sum: None, md5sum: None,
@ -279,7 +273,7 @@ Files\tfoo
Created 1970-01-01 00:00:01 UTC Created 1970-01-01 00:00:01 UTC
Created By created by Created By created by
Source source Source source
Info Hash b7595205a46491b3e8686e10b28efe7144d066cc Info Hash b9cd9cae5748518c99d00d8ae86c0162510be4d9
Torrent Size 307 bytes Torrent Size 307 bytes
Content Size 20 bytes Content Size 20 bytes
Private yes Private yes
@ -316,7 +310,7 @@ Comment\tcomment
Created\t1970-01-01 00:00:01 UTC Created\t1970-01-01 00:00:01 UTC
Created By\tcreated by Created By\tcreated by
Source\tsource Source\tsource
Info Hash\tb7595205a46491b3e8686e10b28efe7144d066cc Info Hash\tb9cd9cae5748518c99d00d8ae86c0162510be4d9
Torrent Size\t307 Torrent Size\t307
Content Size\t20 Content Size\t20
Private\tyes Private\tyes

118
src/piece_list.rs Normal file
View File

@ -0,0 +1,118 @@
use crate::common::*;
#[derive(Debug, PartialEq, Clone)]
pub(crate) struct PieceList {
piece_hashes: Vec<Sha1Digest>,
}
impl PieceList {
pub(crate) fn new() -> Self {
Self {
piece_hashes: Vec::new(),
}
}
pub(crate) fn count(&self) -> usize {
self.piece_hashes.len()
}
pub(crate) fn push(&mut self, digest: Sha1Digest) {
self.piece_hashes.push(digest);
}
#[cfg(test)]
pub(crate) fn from_pieces<I, B>(pieces: I) -> Self
where
I: IntoIterator<Item = B>,
B: AsRef<[u8]>,
{
Self {
piece_hashes: pieces
.into_iter()
.map(|piece| Sha1::from(piece).digest().into())
.collect(),
}
}
}
impl Serialize for PieceList {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut bytes = Vec::with_capacity(self.piece_hashes.len() * sha1::DIGEST_LENGTH);
for piece in &self.piece_hashes {
bytes.extend_from_slice(&piece.bytes());
}
serde_bytes::Bytes::new(&bytes).serialize(serializer)
}
}
impl<'de> Deserialize<'de> for PieceList {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let bytes = serde_bytes::ByteBuf::deserialize(deserializer)?.into_vec();
if bytes.len() % Sha1Digest::LENGTH != 0 {
return Err(D::Error::custom(format!(
"buffer length {} is not a multiple of {}",
bytes.len(),
sha1::DIGEST_LENGTH
)));
}
let piece_hashes = bytes
.chunks_exact(Sha1Digest::LENGTH)
.map(|chunk| Sha1Digest::from_bytes(chunk.try_into().unwrap()))
.collect();
Ok(Self { piece_hashes })
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn basic() {
let mut pieces = PieceList::new();
assert_eq!(pieces.count(), 0);
pieces.push(Sha1::new().digest().into());
assert_eq!(pieces.count(), 1);
}
fn case(pieces: PieceList, want: impl AsRef<[u8]>) {
let want = want.as_ref();
let have = bendy::serde::to_bytes(&pieces).unwrap();
assert_eq!(
have,
want,
"{} != {}",
String::from_utf8_lossy(&have),
String::from_utf8_lossy(want)
);
let have = bendy::serde::from_bytes::<PieceList>(want).unwrap();
assert_eq!(have, pieces);
}
#[test]
fn empty() {
case(PieceList::new(), "0:");
}
#[test]
fn single() {
let mut pieces = PieceList::new();
pieces.push(Sha1::new().digest().into());
case(
pieces,
b"20:\xda\x39\xa3\xee\x5e\x6b\x4b\x0d\x32\x55\xbf\xef\x95\x60\x18\x90\xaf\xd8\x07\x09",
);
}
}

26
src/sha1_digest.rs Normal file
View File

@ -0,0 +1,26 @@
use crate::common::*;
#[derive(Debug, Eq, PartialEq, Copy, Clone)]
pub(crate) struct Sha1Digest {
bytes: [u8; Self::LENGTH],
}
impl Sha1Digest {
pub(crate) const LENGTH: usize = 20;
pub(crate) fn from_bytes(bytes: [u8; Self::LENGTH]) -> Self {
Sha1Digest { bytes }
}
pub(crate) fn bytes(self) -> [u8; Self::LENGTH] {
self.bytes
}
}
impl From<sha1::Digest> for Sha1Digest {
fn from(digest: sha1::Digest) -> Self {
Self {
bytes: digest.bytes(),
}
}
}

View File

@ -154,7 +154,7 @@ impl TorrentSummary {
table.size("Piece Size", self.metainfo.info.piece_length); table.size("Piece Size", self.metainfo.info.piece_length);
table.row("Piece Count", self.metainfo.info.pieces.len() / 20); table.row("Piece Count", self.metainfo.info.pieces.count());
match &self.metainfo.info.mode { match &self.metainfo.info.mode {
Mode::Single { .. } => { Mode::Single { .. } => {

View File

@ -3,7 +3,7 @@ use crate::common::*;
pub(crate) struct Verifier { pub(crate) struct Verifier {
buffer: Vec<u8>, buffer: Vec<u8>,
piece_length: usize, piece_length: usize,
pieces: Vec<u8>, pieces: PieceList,
sha1: Sha1, sha1: Sha1,
piece_bytes_hashed: usize, piece_bytes_hashed: usize,
} }
@ -14,7 +14,7 @@ impl Verifier {
buffer: vec![0; piece_length], buffer: vec![0; piece_length],
piece_bytes_hashed: 0, piece_bytes_hashed: 0,
sha1: Sha1::new(), sha1: Sha1::new(),
pieces: Vec::new(), pieces: PieceList::new(),
piece_length, piece_length,
} }
} }
@ -34,7 +34,7 @@ impl Verifier {
} }
if hasher.piece_bytes_hashed > 0 { if hasher.piece_bytes_hashed > 0 {
hasher.pieces.extend(&hasher.sha1.digest().bytes()); hasher.pieces.push(hasher.sha1.digest().into());
hasher.sha1.reset(); hasher.sha1.reset();
hasher.piece_bytes_hashed = 0; hasher.piece_bytes_hashed = 0;
} }
@ -65,7 +65,7 @@ impl Verifier {
self.piece_bytes_hashed += 1; self.piece_bytes_hashed += 1;
if self.piece_bytes_hashed == self.piece_length { if self.piece_bytes_hashed == self.piece_length {
self.pieces.extend(&self.sha1.digest().bytes()); self.pieces.push(self.sha1.digest().into());
self.sha1.reset(); self.sha1.reset();
self.piece_bytes_hashed = 0; self.piece_bytes_hashed = 0;
} }