Skip to content

Commit

Permalink
Configure rustfmt's max_width
Browse files Browse the repository at this point in the history
  • Loading branch information
ivanjermakov committed Nov 10, 2023
1 parent 000cf68 commit d2edb81
Show file tree
Hide file tree
Showing 12 changed files with 57 additions and 215 deletions.
1 change: 1 addition & 0 deletions rustfmt.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
max_width = 120
40 changes: 9 additions & 31 deletions src/bencode.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,8 @@ pub enum BencodeValue {
impl BencodeValue {
pub fn encode(&self) -> ByteString {
match self {
BencodeValue::String(s) => {
[s.len().to_string().as_bytes(), ":".as_bytes(), s.as_slice()].concat()
}
BencodeValue::Int(i) => {
["i".as_bytes(), i.to_string().as_bytes(), "e".as_bytes()].concat()
}
BencodeValue::String(s) => [s.len().to_string().as_bytes(), ":".as_bytes(), s.as_slice()].concat(),
BencodeValue::Int(i) => ["i".as_bytes(), i.to_string().as_bytes(), "e".as_bytes()].concat(),
BencodeValue::List(l) => vec![
"l".as_bytes().to_vec(),
l.iter().flat_map(|v| v.encode()).collect(),
Expand All @@ -33,9 +29,7 @@ impl BencodeValue {
d.iter()
.flat_map(|(k, v)| {
[
BencodeValue::String(k.as_bytes().to_vec())
.encode()
.as_slice(),
BencodeValue::String(k.as_bytes().to_vec()).encode().as_slice(),
v.encode().as_slice(),
]
.concat()
Expand Down Expand Up @@ -153,10 +147,7 @@ pub fn parse_int(bencoded: ByteString) -> (Option<BencodeValue>, ByteString) {
}
i += 1;

(
Some(BencodeValue::Int(int)),
bencoded.iter().skip(i).cloned().collect(),
)
(Some(BencodeValue::Int(int)), bencoded.iter().skip(i).cloned().collect())
}

/// Format: l<bencoded values>e
Expand Down Expand Up @@ -200,9 +191,7 @@ pub fn parse_dict(bencoded: ByteString) -> (Option<BencodeValue>, ByteString) {
i += 1;

while bencoded.get(i).is_some() && bencoded.get(i).filter(|c| (**c as char) == 'e').is_none() {
let key = if let (Some(item), left) =
parse_bencoded(bencoded.iter().skip(i).cloned().collect())
{
let key = if let (Some(item), left) = parse_bencoded(bencoded.iter().skip(i).cloned().collect()) {
i = bencoded.len() - left.len();
match item {
BencodeValue::String(s) => String::from_utf8_lossy(s.as_slice()).to_string(),
Expand All @@ -211,9 +200,7 @@ pub fn parse_dict(bencoded: ByteString) -> (Option<BencodeValue>, ByteString) {
} else {
return (None, bencoded);
};
let value = if let (Some(item), left) =
parse_bencoded(bencoded.iter().skip(i).cloned().collect())
{
let value = if let (Some(item), left) = parse_bencoded(bencoded.iter().skip(i).cloned().collect()) {
i = bencoded.len() - left.len();
item
} else {
Expand All @@ -240,10 +227,7 @@ mod test {
#[test]
fn should_parse_string() {
let (str, left) = parse_bencoded(String::into_bytes("5:hello".into()));
assert_eq!(
str,
Some(BencodeValue::String(String::into_bytes("hello".into())))
);
assert_eq!(str, Some(BencodeValue::String(String::into_bytes("hello".into()))));
assert!(left.is_empty());
}

Expand Down Expand Up @@ -281,14 +265,8 @@ mod test {
str,
Some(BencodeValue::Dict(
[
(
"cow".into(),
BencodeValue::String(String::into_bytes("moo".into()))
),
(
"spam".into(),
BencodeValue::String(String::into_bytes("eggs".into()))
)
("cow".into(), BencodeValue::String(String::into_bytes("moo".into()))),
("spam".into(), BencodeValue::String(String::into_bytes("eggs".into())))
]
.into_iter()
.collect()
Expand Down
10 changes: 2 additions & 8 deletions src/dht.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,7 @@ pub async fn find_peers(
loop {
debug!("dht queue: {} nodes", queue.len());

let chunk = queue
.drain(..cmp::min(queue.len(), dht_chunk))
.collect::<Vec<_>>();
let chunk = queue.drain(..cmp::min(queue.len(), dht_chunk)).collect::<Vec<_>>();
if chunk.is_empty() {
break;
}
Expand Down Expand Up @@ -128,11 +126,7 @@ async fn find_peers_single(
Err(Error::msg("malformed dht response"))
}

async fn dht_find_peers(
peer: &PeerInfo,
peer_id: &ByteString,
info_hash: ByteString,
) -> Result<BencodeValue> {
async fn dht_find_peers(peer: &PeerInfo, peer_id: &ByteString, info_hash: ByteString) -> Result<BencodeValue> {
let tx_id = thread_rng()
.sample_iter(&Alphanumeric)
.take(2)
Expand Down
22 changes: 7 additions & 15 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,9 +46,7 @@ async fn main() {
}

async fn try_main() -> Result<()> {
env_logger::init_from_env(
env_logger::Env::default().filter_or(env_logger::DEFAULT_FILTER_ENV, "info"),
);
env_logger::init_from_env(env_logger::Env::default().filter_or(env_logger::DEFAULT_FILTER_ENV, "info"));

let arg = match env::args().nth(1) {
Some(arg) => arg,
Expand All @@ -68,13 +66,11 @@ async fn try_main() -> Result<()> {
};

let state_path = expanduser("~/.local/state/biter")?;
let p_state = PersistState::load(&state_path)
.ok()
.unwrap_or_else(|| PersistState {
path: state_path,
peer_id: generate_peer_id(),
dht_peers: BTreeSet::new(),
});
let p_state = PersistState::load(&state_path).ok().unwrap_or_else(|| PersistState {
path: state_path,
peer_id: generate_peer_id(),
dht_peers: BTreeSet::new(),
});
debug!("read persist state from file: {:?}", p_state);
let p_state = Arc::new(Mutex::new(p_state));

Expand All @@ -88,11 +84,7 @@ async fn try_main() -> Result<()> {
.1
.to_string();
trace!("xt: {}", xt);
let info_hash = xt
.split("urn:btih:")
.last()
.context("invalid magnet")?
.to_lowercase();
let info_hash = xt.split("urn:btih:").last().context("invalid magnet")?.to_lowercase();
info!("magnet info hash: {}", info_hash);
download_torrent(from_hex(&info_hash), None, &config, p_state).await?;
} else {
Expand Down
17 changes: 4 additions & 13 deletions src/message.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,12 +64,8 @@ impl From<Message> for Vec<u8> {
Message::Unchoke => [u32tb(1).as_slice(), &[1]].concat(),
Message::Interested => [u32tb(1).as_slice(), &[2]].concat(),
Message::NotInterested => [u32tb(1).as_slice(), &[3]].concat(),
Message::Have { piece_index } => {
[u32tb(5).as_slice(), &[4], &u32tb(piece_index)].concat()
}
Message::Bitfield { bitfield } => {
[u32tb(1 + bitfield.len() as u32).as_slice(), &[5], &bitfield].concat()
}
Message::Have { piece_index } => [u32tb(5).as_slice(), &[4], &u32tb(piece_index)].concat(),
Message::Bitfield { bitfield } => [u32tb(1 + bitfield.len() as u32).as_slice(), &[5], &bitfield].concat(),
Message::Request {
piece_index,
begin,
Expand Down Expand Up @@ -154,10 +150,7 @@ pub async fn read_message(stream: &mut OwnedReadHalf) -> Result<Message> {
}

let mut id_p = [0; 1];
stream
.read_exact(&mut id_p)
.await
.context("id_p read error")?;
stream.read_exact(&mut id_p).await.context("id_p read error")?;
let id = u8::from_be_bytes(id_p);

let msg = match id {
Expand All @@ -176,9 +169,7 @@ pub async fn read_message(stream: &mut OwnedReadHalf) -> Result<Message> {
4 if len == 5 => Ok(Message::Have {
piece_index: u32_from_slice(&payload_p[0..4])?,
}),
5 => Ok(Message::Bitfield {
bitfield: payload_p,
}),
5 => Ok(Message::Bitfield { bitfield: payload_p }),
6 if len == 13 => Ok(Message::Request {
piece_index: u32_from_slice(&payload_p[0..4])?,
begin: u32_from_slice(&payload_p[4..8])?,
Expand Down
4 changes: 1 addition & 3 deletions src/metainfo.rs
Original file line number Diff line number Diff line change
Expand Up @@ -179,9 +179,7 @@ fn parse_files_info(value: &BencodeValue) -> Result<Vec<PathInfo>> {
},
path,
md5_sum: match d.get("md5_sum") {
Some(BencodeValue::String(s)) => {
Some(String::from_utf8_lossy(s).to_string())
}
Some(BencodeValue::String(s)) => Some(String::from_utf8_lossy(s).to_string()),
_ => None,
},
})
Expand Down
Loading

0 comments on commit d2edb81

Please sign in to comment.