diff options
Diffstat (limited to 'src/net/i2p_package.rs')
| -rw-r--r-- | src/net/i2p_package.rs | 310 |
1 files changed, 232 insertions, 78 deletions
diff --git a/src/net/i2p_package.rs b/src/net/i2p_package.rs index d149454..970f719 100644 --- a/src/net/i2p_package.rs +++ b/src/net/i2p_package.rs @@ -1,6 +1,8 @@ use crate::cfg::config::Config; - +use crate::pkgtoolkit::pkgtools::Package; +use serde::Deserialize; use tokio; + /* use emissary_core::runtime::{ AsyncRead, @@ -8,10 +10,9 @@ use emissary_core::runtime::{ }; */ -use std::{fs::File, io::Write, path::Path}; -// use emissary_core::Profile; -// use emissary_core::i2np::Message; +use std::{collections::HashMap, path::Path}; use tokio::io::{AsyncReadExt, AsyncWriteExt, BufReader}; +use indicatif::{ProgressBar, ProgressStyle}; use url; use yosemite::Session; @@ -19,6 +20,12 @@ use yosemite::SessionOptions; pub struct I2PPackage { config: Config, + index_packages: Option<HashMap<String, Package>>, +} + +#[derive(Deserialize, Debug)] +struct IndexData { + packages: Vec<Package>, } impl I2PPackage { @@ -28,7 +35,10 @@ impl I2PPackage { /// /// A new I2P object with the given configuration. The session is initially set to None and the connected status is set to false. pub fn new(config: Config) -> Self { - I2PPackage { config: config } + I2PPackage { + config: config, + index_packages: None + } } /// Downloads the INDEX.tar.gz file from the configured repository @@ -37,17 +47,12 @@ impl I2PPackage { /// # Errors /// /// Returns an error if the request fails, if the response status is not successful, or if there's an issue while reading or writing the file. - pub async fn fetch_index(&mut self) -> Result<bool, std::io::Error> { + pub async fn fetch_index(&mut self) -> Result<bool, Box<dyn std::error::Error>> { let repo_url_str = &self.config.repo.repo_url; let cache_dir = &self.config.paths.cache_dir; - let url = url::Url::parse(repo_url_str).map_err(|_| { - std::io::Error::new(std::io::ErrorKind::InvalidInput, "Invalid repo URL") - })?; - - let host = url.host_str().ok_or_else(|| { - std::io::Error::new(std::io::ErrorKind::InvalidInput, "No host in URL") - })?; + let url = url::Url::parse(repo_url_str)?; + let host = url.host_str().ok_or("No host in URL")?; let request_path = url.path(); let request_path = if request_path.ends_with(".tar.gz") { @@ -57,91 +62,240 @@ impl I2PPackage { }; let session_options = SessionOptions::default(); - let mut session = Session::new(session_options).await.map_err(|e| { - std::io::Error::new( - std::io::ErrorKind::Other, - format!("Failed to create SAM session: {}", e), - ) - })?; - - let mut stream = session.connect(host).await.map_err(|e| { - std::io::Error::new( - std::io::ErrorKind::ConnectionAborted, - format!("Failed to connect: {}", e), - ) - })?; + let mut session = Session::new(session_options).await?; + let mut stream = session.connect(host).await?; let request = format!( "GET {} HTTP/1.1\r\nHost: {}\r\nConnection: close\r\n\r\n", request_path, host ); - - stream.write_all(request.as_bytes()).await.map_err(|e| { - std::io::Error::new( - std::io::ErrorKind::Other, - format!("Failed to write request: {}", e), - ) - })?; + stream.write_all(request.as_bytes()).await?; let mut reader = BufReader::new(stream); let mut response_buffer = Vec::new(); - reader - .read_to_end(&mut response_buffer) - .await - .map_err(|e| { - std::io::Error::new( - std::io::ErrorKind::Other, - format!("Failed to read response: {}", e), - ) - })?; + reader.read_to_end(&mut response_buffer).await?; let headers_end = response_buffer .windows(4) .position(|window| window == b"\r\n\r\n") - .ok_or_else(|| { - std::io::Error::new( - std::io::ErrorKind::InvalidData, - "Invalid response: no headers end", - ) - })?; - - let headers_str = std::str::from_utf8(&response_buffer[..headers_end]).map_err(|_| { - std::io::Error::new(std::io::ErrorKind::InvalidData, "Invalid header encoding") - })?; + .ok_or("Invalid response: no headers end")?; + let headers_str = std::str::from_utf8(&response_buffer[..headers_end])?; if !headers_str.starts_with("HTTP/1.1 200") && !headers_str.starts_with("HTTP/1.0 200") { - return Err(std::io::Error::new( - std::io::ErrorKind::Other, - format!( - "HTTP Error: {}", - headers_str.lines().next().unwrap_or("Unknown") - ), - )); + return Err(format!( + "HTTP Error: {}", + headers_str.lines().next().unwrap_or("Unknown") + ).into()); } + let content_length = headers_str + .lines() + .find(|line| line.to_lowercase().starts_with("content-length:")) + .and_then(|line| line.split_at(15).1.trim().parse::<u64>().ok()) + .unwrap_or(0); + let body_start = headers_end + 4; + let mut body_reader = std::io::Cursor::new(&response_buffer[body_start..]); let file_path = Path::new(cache_dir).join("INDEX.tar.gz"); - let mut file = File::create(&file_path).map_err(|e| { - std::io::Error::new( - std::io::ErrorKind::Other, - format!("Failed to create file: {}", e), - ) - })?; - file.write_all(&response_buffer[body_start..]) - .map_err(|e| { - std::io::Error::new( - std::io::ErrorKind::Other, - format!("Failed to write file: {}", e), - ) - })?; - file.flush().map_err(|e| { - std::io::Error::new( - std::io::ErrorKind::Other, - format!("Failed to flush file: {}", e), - ) - })?; + let pb = if content_length > 0 { + let pb = ProgressBar::new(content_length); + pb.set_style(ProgressStyle::default_bar() + .template("{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {bytes}/{total_bytes} ({eta})")? + .progress_chars("#>-")); + pb + } else { + let pb = ProgressBar::new_spinner(); + pb.set_style(ProgressStyle::default_spinner() + .template("{spinner:.green} [{elapsed_precise}] Fetching INDEX.tar.gz...")? + ); + pb + }; + + let mut file = tokio::fs::File::create(&file_path).await?; + + let chunk_size = 8192u64; + let mut buffer = vec![0; chunk_size as usize]; + + loop { + + let bytes_read_result = std::io::Read::read(&mut body_reader, &mut buffer); + let bytes_read = match bytes_read_result { + Ok(n) => n, + Err(e) => return Err(e.into()), + }; + + if bytes_read == 0 { + break; + } + file.write_all(&buffer[..bytes_read]).await?; + + pb.inc(bytes_read as u64); + + if bytes_read < chunk_size as usize { + break; + } + } + + pb.finish_with_message("INDEX.tar.gz download finished"); + + // --- НОВОЕ: Извлечение и парсинг INDEX.toml --- + log::info!("Extracting INDEX.tar.gz to cache directory..."); + Package::extract_archive(&file_path.to_string_lossy())?; // Используем существующую функцию из pkgtoolkit + + let index_toml_path = Path::new(cache_dir).join("INDEX.toml"); // Предполагаем, что INDEX.toml внутри архива + if !index_toml_path.exists() { + log::warn!("INDEX.toml not found in INDEX.tar.gz. Proceeding without index data."); + self.index_packages = Some(HashMap::new()); // Или None, если это ошибка + return Ok(true); + } + + let index_content = std::fs::read_to_string(&index_toml_path)?; + let index_data: IndexData = toml::from_str(&index_content)?; + + let mut package_map = HashMap::new(); + for pkg in index_data.packages { + // PKG_URL = /repo/package.mesk + // FULL URL = "http://mesk.anthrill.i2p/i2p/repo/pkg.mesk" + let base_url = url::Url::parse(&self.config.repo.repo_url)?; + let full_url = base_url.join(&pkg.url)?; + let mut pkg_clone = pkg.clone(); + pkg_clone.url = full_url.to_string(); + + package_map.insert(pkg_clone.name.clone(), pkg_clone); + } + + self.index_packages = Some(package_map.clone()); + log::info!("Index loaded successfully, {} packages found.", package_map.len()); + + Ok(true) + } + + + /// An internal auxiliary function for downloading data and writing it to a file with a progress display. + /// + /// # Arguments + /// * `data' - Byte slice (&[u8]) with data to write. + /// * `file_path' is the path to the file to write the data to. + /// * `content_length' is the expected data size (for the progress bar). Maybe 0. + /// * `description' - Description of the operation for the progress bar. + /// + /// # Errors + /// + /// Returns an error if there is a problem when creating or writing to a file. + async fn download_and_write_file_with_progress( + data: &[u8], + file_path: &Path, + content_length: u64, + description: &str, + ) -> Result<(), Box<dyn std::error::Error>> { + // Прогресс-бар для загрузки + let pb = if content_length > 0 { + let pb = ProgressBar::new(content_length); + pb.set_style(ProgressStyle::default_bar() + .template(&format!("{{spinner:.green}} [{{elapsed_precise}}] [{{bar:40.cyan/blue}}] {{bytes}}/{{total_bytes}} ({} {{eta}})", description))? + .progress_chars("#>-")); + pb + } else { + let pb = ProgressBar::new_spinner(); + pb.set_style(ProgressStyle::default_spinner() + .template(&format!("{{spinner:.green}} [{{elapsed_precise}}] {}...", description))? + ); + pb + }; + + let mut file = tokio::fs::File::create(&file_path).await?; + + let chunk_size = 8192usize; + let mut pos = 0; + + while pos < data.len() { + let end = std::cmp::min(pos + chunk_size, data.len()); + let chunk = &data[pos..end]; + file.write_all(chunk).await?; + pb.inc(chunk.len() as u64); + pos = end; + } + + pb.finish_with_message(format!("{} download finished", description)); + Ok(()) + } + + /// Fetches a specific package identified by `index` (likely the package name). + /// Assumes `fetch_index` has been called and `self.index_packages` is populated. + pub fn fetch_package_info(&self, package_name: &str) -> Result<&Package, Box<dyn std::error::Error>> { + let packages = self.index_packages.as_ref() + .ok_or("Index not loaded. Call fetch_index first.")?; + let pkg_info = packages.get(package_name) + .ok_or(format!("Package '{}' not found in index.", package_name))?; + Ok(pkg_info) + } + + /// Fetches a specific package identified by `package_name`. + /// Assumes `fetch_index` has been called and `self.index_packages` is populated to get the URL. + /// Downloads the package file (.mesk) to the cache directory. + /// + /// # Errors + /// + /// Returns an error if the index is not loaded, the package is not found in the index, + /// the package URL is invalid, the request fails, or if there's an issue writing the file. + /// Why didn't I just abstract the download functionality into a separate function initially? + /// Yes, I'm scared to work with fetch_index, even I don't often write such shit code. + pub async fn fetch_package(&self, package_name: &str) -> Result<bool, Box<dyn std::error::Error>> { + let package_info = self.fetch_package_info(package_name)?; + let url = url::Url::parse(&package_info.url)?; + let host = url.host_str().ok_or("No host in package URL")?; + let request_path = url.path(); + + let session_options = SessionOptions::default(); + let mut session = Session::new(session_options).await?; + let mut stream = session.connect(host).await?; + + let request = format!( + "GET {} HTTP/1.1\r\nHost: {}\r\nConnection: close\r\n\r\n", + request_path, host + ); + stream.write_all(request.as_bytes()).await?; + + let mut reader = BufReader::new(stream); + let mut response_buffer = Vec::new(); + reader.read_to_end(&mut response_buffer).await?; + + let headers_end = response_buffer + .windows(4) + .position(|window| window == b"\r\n\r\n") + .ok_or("Invalid response: no headers end")?; + + let headers_str = std::str::from_utf8(&response_buffer[..headers_end])?; + if !headers_str.starts_with("HTTP/1.1 200") && !headers_str.starts_with("HTTP/1.0 200") { + return Err(format!( + "HTTP Error: {}", + headers_str.lines().next().unwrap_or("Unknown") + ).into()); + } + + let content_length = headers_str + .lines() + .find(|line| line.to_lowercase().starts_with("content-length:")) + .and_then(|line| line.split_at(15).1.trim().parse::<u64>().ok()) + .unwrap_or(0); + + let body_start = headers_end + 4; + let body_bytes = &response_buffer[body_start..]; + + let file_name = Path::new(request_path) + .file_name() + .ok_or("Could not determine filename from URL path")? + .to_str() + .ok_or("Filename is not valid UTF-8")?; + let cache_dir = &self.config.paths.cache_dir; + let file_path = Path::new(cache_dir).join(file_name); + + Self::download_and_write_file_with_progress(body_bytes, &file_path, content_length, file_name).await?; + + log::info!("Package '{}' downloaded successfully to {:?}", package_name, file_path); Ok(true) } } + |
