diff --git a/crates/bit_rev/src/file.rs b/crates/bit_rev/src/file.rs index 83a1a33..f93c995 100644 --- a/crates/bit_rev/src/file.rs +++ b/crates/bit_rev/src/file.rs @@ -4,7 +4,9 @@ use serde_bencode::de; use serde_bencode::ser; use serde_bytes::ByteBuf; use std::fmt::Write; -use std::{error::Error, io::Read}; +use std::io::Read; + +use anyhow::Result; #[derive(Debug, Serialize, Deserialize, Clone)] pub struct Node(String, i64); @@ -95,7 +97,7 @@ impl TorrentMeta { } } -pub fn from_filename(filename: &str) -> Result> { +pub fn from_filename(filename: &str) -> Result { let mut file = std::fs::File::open(filename)?; let mut content = Vec::new(); file.read_to_end(&mut content)?; @@ -103,7 +105,7 @@ pub fn from_filename(filename: &str) -> Result> { Ok(TorrentMeta::new(torrent)) } -pub fn url_encode_bytes(content: &[u8]) -> Result> { +pub fn url_encode_bytes(content: &[u8]) -> Result { let mut out: String = String::new(); for byte in content.iter() { @@ -121,21 +123,27 @@ pub fn build_tracker_url( peer_id: &[u8], port: u16, tracker_url: &str, -) -> String { +) -> Result { // let announce_url = torrent_meta.torrent_file.announce.as_ref().unwrap(); - let info_hash_encoded = url_encode_bytes(torrent_meta.info_hash.as_ref()).unwrap(); - let peer_id_encoded = url_encode_bytes(peer_id).unwrap(); + let info_hash_encoded = url_encode_bytes(torrent_meta.info_hash.as_ref())?; + let peer_id_encoded = url_encode_bytes(peer_id)?; // let info_hash_encoded = urlencoding::encode_binary(&torrent_meta.info_hash); // let peer_id_encoded = urlencoding::encode_binary(&peer_id); - format!( + let total_length = if let Some(length) = torrent_meta.torrent_file.info.length { + length + } else if let Some(files) = &torrent_meta.torrent_file.info.files { + files.iter().map(|f| f.length).sum() + } else { + return Err(anyhow::anyhow!( + "Invalid torrent file: missing length information" + )); + }; + + Ok(format!( // "{}?info_hash={}&peer_id={}&port={}&uploaded=0&downloaded=0&compact=1&left={}&event=started?supportcrypto=1&numwant=80&key=DF45C574", "{}?info_hash={}&peer_id={}&port={}&uploaded=0&downloaded=0&compact=1&left={}", - tracker_url, - info_hash_encoded, - peer_id_encoded, - port, - torrent_meta.torrent_file.info.length.as_ref().unwrap() + tracker_url, info_hash_encoded, peer_id_encoded, port, total_length ) - .to_string() + .to_string()) } diff --git a/crates/bit_rev/src/session.rs b/crates/bit_rev/src/session.rs index b7f5e60..4a7f065 100644 --- a/crates/bit_rev/src/session.rs +++ b/crates/bit_rev/src/session.rs @@ -77,7 +77,7 @@ impl Session { &self, add_torrent: AddTorrentOptions, ) -> anyhow::Result { - let torrent = Torrent::new(&add_torrent.torrent_meta.clone()); + let torrent = Torrent::new(&add_torrent.torrent_meta.clone())?; let torrent_meta = add_torrent.torrent_meta.clone(); let (pr_tx, pr_rx) = flume::bounded::(torrent.piece_hashes.len()); let have_broadcast = Arc::new(tokio::sync::broadcast::channel(128).0); diff --git a/crates/bit_rev/src/torrent.rs b/crates/bit_rev/src/torrent.rs index 4899d72..d975e94 100644 --- a/crates/bit_rev/src/torrent.rs +++ b/crates/bit_rev/src/torrent.rs @@ -1,20 +1,65 @@ use crate::file::TorrentMeta; +use anyhow::Result; + #[derive(Debug, Clone, PartialEq)] pub struct Torrent { pub info_hash: [u8; 20], pub piece_hashes: Vec<[u8; 20]>, pub piece_length: i64, pub length: i64, + pub files: Vec, + pub name: String, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct TorrentFileInfo { + pub path: Vec, + pub length: i64, + pub offset: i64, } impl Torrent { - pub fn new(torrent_meta: &TorrentMeta) -> Torrent { - Torrent { + pub fn new(torrent_meta: &TorrentMeta) -> Result { + let info = &torrent_meta.torrent_file.info; + + let (total_length, files) = if let Some(file_list) = &info.files { + // Multi-file torrent + let mut total = 0i64; + let mut torrent_files = Vec::new(); + + for file in file_list { + torrent_files.push(TorrentFileInfo { + path: file.path.clone(), + length: file.length, + offset: total, + }); + total += file.length; + } + + (total, torrent_files) + } else if let Some(length) = info.length { + // Single-file torrent + let single_file = TorrentFileInfo { + path: vec![info.name.clone()], + length, + offset: 0, + }; + + (length, vec![single_file]) + } else { + return Err(anyhow::anyhow!( + "Invalid torrent file: missing length information" + )); + }; + + Ok(Torrent { info_hash: torrent_meta.info_hash, piece_hashes: torrent_meta.piece_hashes.clone(), - piece_length: torrent_meta.torrent_file.info.piece_length, - length: torrent_meta.torrent_file.info.length.unwrap(), - } + piece_length: info.piece_length, + length: total_length, + files, + name: info.name.clone(), + }) } } diff --git a/crates/bit_rev/src/tracker_peers.rs b/crates/bit_rev/src/tracker_peers.rs index 8ff12d4..e291b34 100644 --- a/crates/bit_rev/src/tracker_peers.rs +++ b/crates/bit_rev/src/tracker_peers.rs @@ -92,7 +92,12 @@ impl TrackerPeers { let have_broadcast = have_broadcast.clone(); let torrent_downloaded_state = torrent_downloaded_state.clone(); tokio::spawn(async move { - let url = file::build_tracker_url(&torrent_meta, &peer_id, 6881, &tracker); + let url = file::build_tracker_url(&torrent_meta, &peer_id, 6881, &tracker) + .map_err(|e| { + error!("Failed to build tracker URL for {}: {}", tracker, e); + e + }) + .unwrap(); match request_peers(&url).await { Ok(request_peers_res) => { diff --git a/crates/bit_rev/src/utils.rs b/crates/bit_rev/src/utils.rs index b42d5b8..bee94f8 100644 --- a/crates/bit_rev/src/utils.rs +++ b/crates/bit_rev/src/utils.rs @@ -1,4 +1,4 @@ -use crate::torrent::Torrent; +use crate::torrent::{Torrent, TorrentFileInfo}; use rand::Rng; const BLOCK_SIZE: u32 = 16384; @@ -42,3 +42,44 @@ pub fn generate_peer_id() -> [u8; 20] { .try_into() .unwrap() } + +#[derive(Debug, Clone)] +pub struct PieceFileMapping { + pub file_index: usize, + pub file_offset: usize, + pub length: usize, +} + +pub fn map_piece_to_files(torrent: &Torrent, piece_index: usize) -> Vec { + let (piece_start, piece_end) = calculate_bounds_for_piece(torrent, piece_index); + let mut mappings = Vec::new(); + + for (file_index, file) in torrent.files.iter().enumerate() { + let file_start = file.offset as usize; + let file_end = file_start + file.length as usize; + + // Check if piece overlaps with this file + if piece_start < file_end && piece_end > file_start { + let overlap_start = piece_start.max(file_start); + let overlap_end = piece_end.min(file_end); + let file_offset = overlap_start - file_start; + let length = overlap_end - overlap_start; + + mappings.push(PieceFileMapping { + file_index, + file_offset, + length, + }); + } + } + + mappings +} + +pub fn get_full_file_path(torrent: &Torrent, file_info: &TorrentFileInfo) -> std::path::PathBuf { + let mut path = std::path::PathBuf::from(&torrent.name); + for component in &file_info.path { + path.push(component); + } + path +} diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs index 42ddb75..73da317 100644 --- a/crates/cli/src/main.rs +++ b/crates/cli/src/main.rs @@ -1,11 +1,12 @@ use indicatif::{ProgressBar, ProgressState, ProgressStyle}; use std::{ + collections::HashMap, fmt::Write, io::SeekFrom, sync::{atomic::AtomicU64, Arc}, }; use tokio::{ - fs::File, + fs::{create_dir_all, File}, io::{AsyncSeekExt, AsyncWriteExt}, }; use tracing::trace; @@ -33,7 +34,7 @@ pub async fn download_file(filename: &str, out_file: Option) -> anyhow:: let add_torrent_result = session.add_torrent(filename.into()).await?; let torrent = add_torrent_result.torrent.clone(); - let torrent_meta = add_torrent_result.torrent_meta; + let _torrent_meta = add_torrent_result.torrent_meta; let total_size = torrent.length as u64; let pb = ProgressBar::new(total_size); @@ -47,13 +48,41 @@ pub async fn download_file(filename: &str, out_file: Option) -> anyhow:: ).progress_chars("#>-") ); - let out_filename = match out_file { + // Determine the output directory + let output_dir = match out_file { Some(name) => name, - None => torrent_meta.clone().torrent_file.info.name.clone(), + None => torrent.name.clone(), }; - let mut file = File::create(out_filename).await?; - // File + // Create output directory and prepare file handles + let mut file_handles: HashMap = HashMap::new(); + + // Create directories and prepare files for multi-file torrents + for (file_index, file_info) in torrent.files.iter().enumerate() { + let file_path = if torrent.files.len() == 1 { + // Single file torrent - use output_dir as filename + std::path::PathBuf::from(&output_dir) + } else { + // Multi-file torrent - create subdirectory structure + let mut path = std::path::PathBuf::from(&output_dir); + for component in &file_info.path { + path.push(component); + } + path + }; + + // Create parent directories if needed + if let Some(parent) = file_path.parent() { + create_dir_all(parent).await?; + } + + // Create the file + let file = File::create(&file_path).await?; + file_handles.insert(file_index, file); + + trace!("Created file: {:?}", file_path); + } + let total_downloaded = Arc::new(AtomicU64::new(0)); let total_downloaded_clone = total_downloaded.clone(); @@ -71,21 +100,41 @@ pub async fn download_file(filename: &str, out_file: Option) -> anyhow:: let pr = add_torrent_result.pr_rx.recv_async().await?; hashset.insert(pr.index); - let (start, end) = utils::calculate_bounds_for_piece(&torrent, pr.index as usize); - trace!( - "index: {}, start: {}, end: {} len {}", - pr.index, - start, - end, - pr.length - ); - file.seek(SeekFrom::Start(start as u64)).await?; - file.write_all(pr.buf.as_slice()).await?; + + // Map piece to files and write data accordingly + let file_mappings = utils::map_piece_to_files(&torrent, pr.index as usize); + let mut piece_offset = 0; + + for mapping in file_mappings { + let file = file_handles.get_mut(&mapping.file_index).ok_or_else(|| { + anyhow::anyhow!("File handle not found for index {}", mapping.file_index) + })?; + + // Seek to correct position in file + file.seek(SeekFrom::Start(mapping.file_offset as u64)) + .await?; + + // Write the portion of the piece that belongs to this file + let piece_data = &pr.buf[piece_offset..piece_offset + mapping.length]; + file.write_all(piece_data).await?; + + piece_offset += mapping.length; + + trace!( + "Wrote {} bytes to file {} at offset {}", + mapping.length, + mapping.file_index, + mapping.file_offset + ); + } total_downloaded.fetch_add(pr.length as u64, std::sync::atomic::Ordering::Relaxed); } - file.sync_all().await?; + // Sync all files + for (_, file) in file_handles { + file.sync_all().await?; + } Ok(()) }