use crate::cfg::config::Config; use crate::pkgtoolkit::pkgtools::Package; use serde::Deserialize; use tokio; /* use emissary_core::runtime::{ AsyncRead, AsyncWrite, }; */ use std::{collections::HashMap, path::Path}; use tokio::io::{AsyncReadExt, AsyncWriteExt, BufReader}; use indicatif::{ProgressBar, ProgressStyle}; use url; use yosemite::Session; use yosemite::SessionOptions; pub struct I2PPackage { config: Config, index_packages: Option>, } #[derive(Deserialize, Debug)] struct IndexData { packages: Vec, } impl I2PPackage { /// Creates a new I2P object with the given configuration. /// /// # Returns /// /// A new I2P object with the given configuration. The session is initially set to None and the connected status is set to false. pub fn new(config: Config) -> Self { I2PPackage { config: config, index_packages: None } } /// Downloads the INDEX.tar.gz file from the configured repository /// and stores it in the configured cache directory. /// /// # Errors /// /// Returns an error if the request fails, if the response status is not successful, or if there's an issue while reading or writing the file. pub async fn fetch_index(&mut self) -> Result> { let repo_url_str = &self.config.repo.repo_url; let cache_dir = &self.config.paths.cache_dir; let url = url::Url::parse(repo_url_str)?; let host = url.host_str().ok_or("No host in URL")?; let request_path = url.path(); let request_path = if request_path.ends_with(".tar.gz") { request_path.to_string() } else { format!("{}/INDEX.tar.gz", request_path.trim_end_matches('/')) }; let session_options = SessionOptions::default(); let mut session = Session::new(session_options).await?; let mut stream = session.connect(host).await?; let request = format!( "GET {} HTTP/1.1\r\nHost: {}\r\nConnection: close\r\n\r\n", request_path, host ); stream.write_all(request.as_bytes()).await?; let mut reader = BufReader::new(stream); let mut response_buffer = Vec::new(); reader.read_to_end(&mut response_buffer).await?; let headers_end = response_buffer .windows(4) .position(|window| window == b"\r\n\r\n") .ok_or("Invalid response: no headers end")?; let headers_str = std::str::from_utf8(&response_buffer[..headers_end])?; if !headers_str.starts_with("HTTP/1.1 200") && !headers_str.starts_with("HTTP/1.0 200") { return Err(format!( "HTTP Error: {}", headers_str.lines().next().unwrap_or("Unknown") ).into()); } let content_length = headers_str .lines() .find(|line| line.to_lowercase().starts_with("content-length:")) .and_then(|line| line.split_at(15).1.trim().parse::().ok()) .unwrap_or(0); let body_start = headers_end + 4; let mut body_reader = std::io::Cursor::new(&response_buffer[body_start..]); let file_path = Path::new(cache_dir).join("INDEX.tar.gz"); let pb = if content_length > 0 { let pb = ProgressBar::new(content_length); pb.set_style(ProgressStyle::default_bar() .template("{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {bytes}/{total_bytes} ({eta})")? .progress_chars("#>-")); pb } else { let pb = ProgressBar::new_spinner(); pb.set_style(ProgressStyle::default_spinner() .template("{spinner:.green} [{elapsed_precise}] Fetching INDEX.tar.gz...")? ); pb }; let mut file = tokio::fs::File::create(&file_path).await?; let chunk_size = 8192u64; let mut buffer = vec![0; chunk_size as usize]; loop { let bytes_read_result = std::io::Read::read(&mut body_reader, &mut buffer); let bytes_read = match bytes_read_result { Ok(n) => n, Err(e) => return Err(e.into()), }; if bytes_read == 0 { break; } file.write_all(&buffer[..bytes_read]).await?; pb.inc(bytes_read as u64); if bytes_read < chunk_size as usize { break; } } pb.finish_with_message("INDEX.tar.gz download finished"); log::info!("Extracting INDEX.tar.gz to cache directory..."); Package::extract_archive(&file_path.to_string_lossy())?; let index_toml_path = Path::new(cache_dir).join("INDEX.toml"); if !index_toml_path.exists() { log::warn!("INDEX.toml not found in INDEX.tar.gz. Proceeding without index data."); self.index_packages = Some(HashMap::new()); return Ok(true); } let index_content = std::fs::read_to_string(&index_toml_path)?; let index_data: IndexData = toml::from_str(&index_content)?; let mut package_map = HashMap::new(); for pkg in index_data.packages { // PKG_URL = /repo/package.mesk // FULL URL = "http://mesk.anthrill.i2p/i2p/repo/pkg.mesk" let base_url = url::Url::parse(&self.config.repo.repo_url)?; let full_url = base_url.join(&pkg.url)?; let mut pkg_clone = pkg.clone(); pkg_clone.url = full_url.to_string(); package_map.insert(pkg_clone.name.clone(), pkg_clone); } self.index_packages = Some(package_map.clone()); log::info!("Index loaded successfully, {} packages found.", package_map.len()); Ok(true) } /// An internal auxiliary function for downloading data and writing it to a file with a progress display. /// /// # Arguments /// * `data' - Byte slice (&[u8]) with data to write. /// * `file_path' is the path to the file to write the data to. /// * `content_length' is the expected data size (for the progress bar). Maybe 0. /// * `description' - Description of the operation for the progress bar. /// /// # Errors /// /// Returns an error if there is a problem when creating or writing to a file. async fn download_and_write_file_with_progress( data: &[u8], file_path: &Path, content_length: u64, description: &str, ) -> Result<(), Box> { // Прогресс-бар для загрузки let pb = if content_length > 0 { let pb = ProgressBar::new(content_length); pb.set_style(ProgressStyle::default_bar() .template(&format!("{{spinner:.green}} [{{elapsed_precise}}] [{{bar:40.cyan/blue}}] {{bytes}}/{{total_bytes}} ({} {{eta}})", description))? .progress_chars("#>-")); pb } else { let pb = ProgressBar::new_spinner(); pb.set_style(ProgressStyle::default_spinner() .template(&format!("{{spinner:.green}} [{{elapsed_precise}}] {}...", description))? ); pb }; let mut file = tokio::fs::File::create(&file_path).await?; let chunk_size = 8192usize; let mut pos = 0; while pos < data.len() { let end = std::cmp::min(pos + chunk_size, data.len()); let chunk = &data[pos..end]; file.write_all(chunk).await?; pb.inc(chunk.len() as u64); pos = end; } pb.finish_with_message(format!("{} download finished", description)); Ok(()) } /// Fetches a specific package identified by `index` (likely the package name). /// Assumes `fetch_index` has been called and `self.index_packages` is populated. pub fn fetch_package_info(&self, package_name: &str) -> Result<&Package, Box> { let packages = self.index_packages.as_ref() .ok_or("Index not loaded. Call fetch_index first.")?; let pkg_info = packages.get(package_name) .ok_or(format!("Package '{}' not found in index.", package_name))?; Ok(pkg_info) } /// Fetches a specific package identified by `package_name`. /// Assumes `fetch_index` has been called and `self.index_packages` is populated to get the URL. /// Downloads the package file (.mesk) to the cache directory. /// /// # Errors /// /// Returns an error if the index is not loaded, the package is not found in the index, /// the package URL is invalid, the request fails, or if there's an issue writing the file. /// Why didn't I just abstract the download functionality into a separate function initially? /// Yes, I'm scared to work with fetch_index, even I don't often write such shit code. pub async fn fetch_package(&self, package_name: &str) -> Result> { let package_info = self.fetch_package_info(package_name)?; let url = url::Url::parse(&package_info.url)?; let host = url.host_str().ok_or("No host in package URL")?; let request_path = url.path(); let session_options = SessionOptions::default(); let mut session = Session::new(session_options).await?; let mut stream = session.connect(host).await?; let request = format!( "GET {} HTTP/1.1\r\nHost: {}\r\nConnection: close\r\n\r\n", request_path, host ); stream.write_all(request.as_bytes()).await?; let mut reader = BufReader::new(stream); let mut response_buffer = Vec::new(); reader.read_to_end(&mut response_buffer).await?; let headers_end = response_buffer .windows(4) .position(|window| window == b"\r\n\r\n") .ok_or("Invalid response: no headers end")?; let headers_str = std::str::from_utf8(&response_buffer[..headers_end])?; if !headers_str.starts_with("HTTP/1.1 200") && !headers_str.starts_with("HTTP/1.0 200") { return Err(format!( "HTTP Error: {}", headers_str.lines().next().unwrap_or("Unknown") ).into()); } let content_length = headers_str .lines() .find(|line| line.to_lowercase().starts_with("content-length:")) .and_then(|line| line.split_at(15).1.trim().parse::().ok()) .unwrap_or(0); let body_start = headers_end + 4; let body_bytes = &response_buffer[body_start..]; let file_name = Path::new(request_path) .file_name() .ok_or("Could not determine filename from URL path")? .to_str() .ok_or("Filename is not valid UTF-8")?; let cache_dir = &self.config.paths.cache_dir; let file_path = Path::new(cache_dir).join(file_name); Self::download_and_write_file_with_progress(body_bytes, &file_path, content_length, file_name).await?; log::info!("Package '{}' downloaded successfully to {:?}", package_name, file_path); Ok(true) } }