summaryrefslogtreecommitdiff
path: root/src/net
diff options
context:
space:
mode:
authorNamilskyy <alive6863@gmail.com>2025-11-30 16:26:39 +0300
committerNamilskyy <alive6863@gmail.com>2025-11-30 16:31:25 +0300
commit4bf685f951217b0b5b4df067b8dfeec8bae01357 (patch)
tree7e0065d13a3eb88b783e9468e785856784d8734e /src/net
parentf756b0d1c97193a0c1ad440977fb279c88c4e66a (diff)
Major updates in network sector, implemented package fetch by index, some fixes in pkgtoolkit
Diffstat (limited to 'src/net')
-rw-r--r--src/net/http_packages.rs269
-rw-r--r--src/net/i2p_package.rs310
2 files changed, 434 insertions, 145 deletions
diff --git a/src/net/http_packages.rs b/src/net/http_packages.rs
index fe1833d..84b4444 100644
--- a/src/net/http_packages.rs
+++ b/src/net/http_packages.rs
@@ -1,53 +1,44 @@
use crate::cfg::config::Config;
+use crate::pkgtoolkit::pkgtools::Package;
+use indicatif::{ProgressBar, ProgressStyle};
use reqwest;
-use std::fs::File;
-use std::io::Write;
+use serde::Deserialize;
+use std::collections::HashMap;
use std::path::Path;
+use tokio::fs::File;
+use tokio::io::AsyncWriteExt;
+use futures_util::stream::TryStreamExt;
-#[allow(dead_code)]
pub struct HTTPPackage {
config: Config,
+ index_packages: Option<HashMap<String, Package>>,
+}
+
+#[derive(Deserialize, Debug)]
+struct IndexData {
+ packages: Vec<Package>,
}
-#[allow(dead_code)]
impl HTTPPackage {
- /// Creates a new Downloader object with the given configuration.
- ///
- /// # Arguments
- ///
- /// * `config` - The full mesk
+ /// Creates a new HTTPPackage object with the given configuration.
///
/// # Returns
///
- /// A new Downloader object with the given configuration.
+ /// A new HTTPPackage object with the given configuration.
pub fn new(config: Config) -> Self {
- Self { config }
- }
-
- /// Parse the mesk configuration file and return the Config object.
- ///
- /// This function reads the file at `config_path`, parses it and returns the Config object.
- ///
- /// # Arguments
- ///
- /// * `config_path` - A string representing the path to the mesk configuration file.
- ///
- /// # Returns
- ///
- /// A new Config object with the parsed configuration. If there's an error while reading or parsing the file, returns an Err containing a Box<dyn std::error::Error>.
- pub fn parse_config() -> Result<Config, Box<dyn std::error::Error>> {
- let config = Config::parse()?;
- Ok(config)
+ HTTPPackage {
+ config,
+ index_packages: None,
+ }
}
/// Downloads the INDEX.tar.gz file from the configured repository
- /// and stores it in the configured cache directory.
+ /// and stores it in the configured cache directory with a progress bar.
///
/// # Errors
///
/// Returns an error if the request fails, if the response status is not successful, or if there's an issue while reading or writing the file.
- ///
- pub async fn fetch_index_http(&mut self) -> Result<bool, std::io::Error> {
+ pub async fn fetch_index_http(&mut self) -> Result<bool, Box<dyn std::error::Error>> {
let repo_url_str = &self.config.repo.repo_url;
let cache_dir = &self.config.paths.cache_dir;
@@ -59,49 +50,193 @@ impl HTTPPackage {
let client = reqwest::Client::new();
- let response = client.get(&index_url).send().await.map_err(|e| {
- std::io::Error::new(std::io::ErrorKind::Other, format!("Request failed: {}", e))
- })?;
+ // Make a HEAD request to get the content length for the progress bar
+ let head_response = client.head(&index_url).send().await?;
+ let content_length: u64 = head_response
+ .headers()
+ .get(reqwest::header::CONTENT_LENGTH)
+ .and_then(|ct_len| ct_len.to_str().ok())
+ .and_then(|ct_len| ct_len.parse().ok())
+ .unwrap_or(0);
+ // Create progress bar
+ let pb = if content_length > 0 {
+ let pb = ProgressBar::new(content_length);
+ pb.set_style(
+ ProgressStyle::default_bar()
+ .template("{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {bytes}/{total_bytes} ({eta})")?
+ .progress_chars("#>-"),
+ );
+ pb
+ } else {
+ let pb = ProgressBar::new_spinner();
+ pb.set_style(
+ ProgressStyle::default_spinner()
+ .template("{spinner:.green} [{elapsed_precise}] Fetching INDEX.tar.gz...")?
+ );
+ pb
+ };
+
+ // Send GET request and stream the response body
+ let response = client.get(&index_url).send().await?;
if !response.status().is_success() {
- return Err(std::io::Error::new(
- std::io::ErrorKind::Other,
- format!("HTTP Error: {}", response.status()),
- ));
+ return Err(format!("HTTP Error: {}", response.status()).into());
}
- let index_data = response
- .bytes()
- .await
- .map_err(|e| {
- std::io::Error::new(
- std::io::ErrorKind::Other,
- format!("Failed to read response body: {}", e),
- )
- })?
- .to_vec();
-
+ let mut stream = response.bytes_stream();
let file_path = Path::new(cache_dir).join("INDEX.tar.gz");
- let mut file = File::create(&file_path).map_err(|e| {
- std::io::Error::new(
- std::io::ErrorKind::Other,
- format!("Failed to create file: {}", e),
- )
- })?;
- file.write_all(&index_data).map_err(|e| {
- std::io::Error::new(
- std::io::ErrorKind::Other,
- format!("Failed to write file: {}", e),
- )
- })?;
- file.flush().map_err(|e| {
- std::io::Error::new(
- std::io::ErrorKind::Other,
- format!("Failed to flush file: {}", e),
- )
- })?;
+ let mut file = File::create(&file_path).await?;
+ let mut downloaded: u64 = 0;
+
+ while let Some(chunk) = stream.try_next().await? {
+ file.write_all(&chunk).await?;
+ let chunk_len = chunk.len() as u64;
+ downloaded += chunk_len;
+ pb.set_position(downloaded);
+ }
+
+ pb.finish_with_message("INDEX.tar.gz download finished");
+
+ // --- Извлечение и парсинг INDEX.toml ---
+ log::info!("Extracting INDEX.tar.gz to cache directory...");
+ Package::extract_archive(&file_path.to_string_lossy())?; // Используем существующую функцию из pkgtoolkit
+
+ let index_toml_path = Path::new(cache_dir).join("INDEX.toml");
+ if !index_toml_path.exists() {
+ log::warn!("INDEX.toml not found in INDEX.tar.gz. Proceeding without index data.");
+ self.index_packages = Some(HashMap::new());
+ return Ok(true);
+ }
+
+ let index_content = tokio::fs::read_to_string(&index_toml_path).await?;
+ let index_data: IndexData = toml::from_str(&index_content)?;
+
+ let mut package_map = HashMap::new();
+ for pkg in index_data.packages {
+ // PKG_URL = /repo/package.mesk
+ // FULL URL = "http://mesk.anthrill.i2p/i2p/repo/pkg.mesk"
+ let base_url = url::Url::parse(&self.config.repo.repo_url)?;
+ let full_url = base_url.join(&pkg.url)?;
+ let mut pkg_clone = pkg.clone();
+ pkg_clone.url = full_url.to_string();
+
+ package_map.insert(pkg_clone.name.clone(), pkg_clone);
+ }
+
+ self.index_packages = Some(package_map.clone());
+ log::info!("Index loaded successfully, {} packages found.", package_map.len());
Ok(true)
}
-}
+
+ /// An internal auxiliary function for downloading data and writing it to a file with a progress display.
+ ///
+ /// # Arguments
+ /// * `url` - The URL to download from.
+ /// * `file_path` - The path to the file to write the data to.
+ /// * `description` - Description of the operation for the progress bar.
+ ///
+ /// # Errors
+ ///
+ /// Returns an error if the request fails, if the response status is not successful, or if there's an issue while writing the file.
+ async fn download_file_with_progress(
+ client: &reqwest::Client,
+ url: &str,
+ file_path: &Path,
+ description: &str,
+ ) -> Result<(), Box<dyn std::error::Error>> {
+ // Make a HEAD request to get the content length for the progress bar
+ let head_response = client.head(url).send().await?;
+ let content_length: u64 = head_response
+ .headers()
+ .get(reqwest::header::CONTENT_LENGTH)
+ .and_then(|ct_len| ct_len.to_str().ok())
+ .and_then(|ct_len| ct_len.parse().ok())
+ .unwrap_or(0);
+
+ // Create progress bar
+ let pb = if content_length > 0 {
+ let pb = ProgressBar::new(content_length);
+ pb.set_style(
+ ProgressStyle::default_bar()
+ .template(&format!(
+ "{{spinner:.green}} [{{elapsed_precise}}] [{{bar:40.cyan/blue}}] {{bytes}}/{{total_bytes}} ({} {{eta}})",
+ description
+ ))?
+ .progress_chars("#>-"),
+ );
+ pb
+ } else {
+ let pb = ProgressBar::new_spinner();
+ pb.set_style(
+ ProgressStyle::default_spinner()
+ .template(&format!("{{spinner:.green}} [{{elapsed_precise}}] {}...", description))?
+ );
+ pb
+ };
+
+ // Send GET request and stream the response body
+ let response = client.get(url).send().await?;
+ if !response.status().is_success() {
+ return Err(format!("HTTP Error: {}", response.status()).into());
+ }
+
+ let mut stream = response.bytes_stream();
+ let mut file = File::create(&file_path).await?;
+ let mut downloaded: u64 = 0;
+
+ while let Some(chunk) = stream.try_next().await? {
+ file.write_all(&chunk).await?;
+ let chunk_len = chunk.len() as u64;
+ downloaded += chunk_len;
+ pb.set_position(downloaded);
+ }
+
+ pb.finish_with_message(format!("{} download finished", description));
+ Ok(())
+ }
+
+ /// Fetches a specific package identified by `package_name`.
+ /// Assumes `fetch_index_http` has been called and `self.index_packages` is populated to get the URL.
+ /// Downloads the package file (.mesk) to the cache directory with a progress bar.
+ ///
+ /// # Errors
+ ///
+ /// Returns an error if the index is not loaded, the package is not found in the index,
+ /// the package URL is invalid, the request fails, or if there's an issue writing the file.
+ pub async fn fetch_package_http(&self, package_name: &str) -> Result<bool, Box<dyn std::error::Error>> {
+ let package_info = self.fetch_package_info(package_name)?;
+ let url = &package_info.url;
+
+ let client = reqwest::Client::new();
+
+ let file_name = Path::new(url)
+ .file_name()
+ .ok_or("Could not determine filename from URL")?
+ .to_str()
+ .ok_or("Filename is not valid UTF-8")?;
+ let cache_dir = &self.config.paths.cache_dir;
+ let file_path = Path::new(cache_dir).join(file_name);
+
+ Self::download_file_with_progress(&client, url, &file_path, file_name).await?;
+
+ log::info!(
+ "Package '{}' downloaded successfully to {:?}",
+ package_name,
+ file_path
+ );
+ Ok(true)
+ }
+
+ /// Fetches a specific package identified by `index` (likely the package name).
+ /// Assumes `fetch_index_http` has been called and `self.index_packages` is populated.
+ pub fn fetch_package_info(&self, package_name: &str) -> Result<&Package, Box<dyn std::error::Error>> {
+ let packages = self.index_packages.as_ref()
+ .ok_or("Index not loaded. Call fetch_index_http first.")?;
+ let pkg_info = packages
+ .get(package_name)
+ .ok_or(format!("Package '{}' not found in index.", package_name))?;
+ Ok(pkg_info)
+ }
+} \ No newline at end of file
diff --git a/src/net/i2p_package.rs b/src/net/i2p_package.rs
index d149454..970f719 100644
--- a/src/net/i2p_package.rs
+++ b/src/net/i2p_package.rs
@@ -1,6 +1,8 @@
use crate::cfg::config::Config;
-
+use crate::pkgtoolkit::pkgtools::Package;
+use serde::Deserialize;
use tokio;
+
/*
use emissary_core::runtime::{
AsyncRead,
@@ -8,10 +10,9 @@ use emissary_core::runtime::{
};
*/
-use std::{fs::File, io::Write, path::Path};
-// use emissary_core::Profile;
-// use emissary_core::i2np::Message;
+use std::{collections::HashMap, path::Path};
use tokio::io::{AsyncReadExt, AsyncWriteExt, BufReader};
+use indicatif::{ProgressBar, ProgressStyle};
use url;
use yosemite::Session;
@@ -19,6 +20,12 @@ use yosemite::SessionOptions;
pub struct I2PPackage {
config: Config,
+ index_packages: Option<HashMap<String, Package>>,
+}
+
+#[derive(Deserialize, Debug)]
+struct IndexData {
+ packages: Vec<Package>,
}
impl I2PPackage {
@@ -28,7 +35,10 @@ impl I2PPackage {
///
/// A new I2P object with the given configuration. The session is initially set to None and the connected status is set to false.
pub fn new(config: Config) -> Self {
- I2PPackage { config: config }
+ I2PPackage {
+ config: config,
+ index_packages: None
+ }
}
/// Downloads the INDEX.tar.gz file from the configured repository
@@ -37,17 +47,12 @@ impl I2PPackage {
/// # Errors
///
/// Returns an error if the request fails, if the response status is not successful, or if there's an issue while reading or writing the file.
- pub async fn fetch_index(&mut self) -> Result<bool, std::io::Error> {
+ pub async fn fetch_index(&mut self) -> Result<bool, Box<dyn std::error::Error>> {
let repo_url_str = &self.config.repo.repo_url;
let cache_dir = &self.config.paths.cache_dir;
- let url = url::Url::parse(repo_url_str).map_err(|_| {
- std::io::Error::new(std::io::ErrorKind::InvalidInput, "Invalid repo URL")
- })?;
-
- let host = url.host_str().ok_or_else(|| {
- std::io::Error::new(std::io::ErrorKind::InvalidInput, "No host in URL")
- })?;
+ let url = url::Url::parse(repo_url_str)?;
+ let host = url.host_str().ok_or("No host in URL")?;
let request_path = url.path();
let request_path = if request_path.ends_with(".tar.gz") {
@@ -57,91 +62,240 @@ impl I2PPackage {
};
let session_options = SessionOptions::default();
- let mut session = Session::new(session_options).await.map_err(|e| {
- std::io::Error::new(
- std::io::ErrorKind::Other,
- format!("Failed to create SAM session: {}", e),
- )
- })?;
-
- let mut stream = session.connect(host).await.map_err(|e| {
- std::io::Error::new(
- std::io::ErrorKind::ConnectionAborted,
- format!("Failed to connect: {}", e),
- )
- })?;
+ let mut session = Session::new(session_options).await?;
+ let mut stream = session.connect(host).await?;
let request = format!(
"GET {} HTTP/1.1\r\nHost: {}\r\nConnection: close\r\n\r\n",
request_path, host
);
-
- stream.write_all(request.as_bytes()).await.map_err(|e| {
- std::io::Error::new(
- std::io::ErrorKind::Other,
- format!("Failed to write request: {}", e),
- )
- })?;
+ stream.write_all(request.as_bytes()).await?;
let mut reader = BufReader::new(stream);
let mut response_buffer = Vec::new();
- reader
- .read_to_end(&mut response_buffer)
- .await
- .map_err(|e| {
- std::io::Error::new(
- std::io::ErrorKind::Other,
- format!("Failed to read response: {}", e),
- )
- })?;
+ reader.read_to_end(&mut response_buffer).await?;
let headers_end = response_buffer
.windows(4)
.position(|window| window == b"\r\n\r\n")
- .ok_or_else(|| {
- std::io::Error::new(
- std::io::ErrorKind::InvalidData,
- "Invalid response: no headers end",
- )
- })?;
-
- let headers_str = std::str::from_utf8(&response_buffer[..headers_end]).map_err(|_| {
- std::io::Error::new(std::io::ErrorKind::InvalidData, "Invalid header encoding")
- })?;
+ .ok_or("Invalid response: no headers end")?;
+ let headers_str = std::str::from_utf8(&response_buffer[..headers_end])?;
if !headers_str.starts_with("HTTP/1.1 200") && !headers_str.starts_with("HTTP/1.0 200") {
- return Err(std::io::Error::new(
- std::io::ErrorKind::Other,
- format!(
- "HTTP Error: {}",
- headers_str.lines().next().unwrap_or("Unknown")
- ),
- ));
+ return Err(format!(
+ "HTTP Error: {}",
+ headers_str.lines().next().unwrap_or("Unknown")
+ ).into());
}
+ let content_length = headers_str
+ .lines()
+ .find(|line| line.to_lowercase().starts_with("content-length:"))
+ .and_then(|line| line.split_at(15).1.trim().parse::<u64>().ok())
+ .unwrap_or(0);
+
let body_start = headers_end + 4;
+ let mut body_reader = std::io::Cursor::new(&response_buffer[body_start..]);
let file_path = Path::new(cache_dir).join("INDEX.tar.gz");
- let mut file = File::create(&file_path).map_err(|e| {
- std::io::Error::new(
- std::io::ErrorKind::Other,
- format!("Failed to create file: {}", e),
- )
- })?;
- file.write_all(&response_buffer[body_start..])
- .map_err(|e| {
- std::io::Error::new(
- std::io::ErrorKind::Other,
- format!("Failed to write file: {}", e),
- )
- })?;
- file.flush().map_err(|e| {
- std::io::Error::new(
- std::io::ErrorKind::Other,
- format!("Failed to flush file: {}", e),
- )
- })?;
+ let pb = if content_length > 0 {
+ let pb = ProgressBar::new(content_length);
+ pb.set_style(ProgressStyle::default_bar()
+ .template("{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {bytes}/{total_bytes} ({eta})")?
+ .progress_chars("#>-"));
+ pb
+ } else {
+ let pb = ProgressBar::new_spinner();
+ pb.set_style(ProgressStyle::default_spinner()
+ .template("{spinner:.green} [{elapsed_precise}] Fetching INDEX.tar.gz...")?
+ );
+ pb
+ };
+
+ let mut file = tokio::fs::File::create(&file_path).await?;
+
+ let chunk_size = 8192u64;
+ let mut buffer = vec![0; chunk_size as usize];
+
+ loop {
+
+ let bytes_read_result = std::io::Read::read(&mut body_reader, &mut buffer);
+ let bytes_read = match bytes_read_result {
+ Ok(n) => n,
+ Err(e) => return Err(e.into()),
+ };
+
+ if bytes_read == 0 {
+ break;
+ }
+ file.write_all(&buffer[..bytes_read]).await?;
+
+ pb.inc(bytes_read as u64);
+
+ if bytes_read < chunk_size as usize {
+ break;
+ }
+ }
+
+ pb.finish_with_message("INDEX.tar.gz download finished");
+
+ // --- НОВОЕ: Извлечение и парсинг INDEX.toml ---
+ log::info!("Extracting INDEX.tar.gz to cache directory...");
+ Package::extract_archive(&file_path.to_string_lossy())?; // Используем существующую функцию из pkgtoolkit
+
+ let index_toml_path = Path::new(cache_dir).join("INDEX.toml"); // Предполагаем, что INDEX.toml внутри архива
+ if !index_toml_path.exists() {
+ log::warn!("INDEX.toml not found in INDEX.tar.gz. Proceeding without index data.");
+ self.index_packages = Some(HashMap::new()); // Или None, если это ошибка
+ return Ok(true);
+ }
+
+ let index_content = std::fs::read_to_string(&index_toml_path)?;
+ let index_data: IndexData = toml::from_str(&index_content)?;
+
+ let mut package_map = HashMap::new();
+ for pkg in index_data.packages {
+ // PKG_URL = /repo/package.mesk
+ // FULL URL = "http://mesk.anthrill.i2p/i2p/repo/pkg.mesk"
+ let base_url = url::Url::parse(&self.config.repo.repo_url)?;
+ let full_url = base_url.join(&pkg.url)?;
+ let mut pkg_clone = pkg.clone();
+ pkg_clone.url = full_url.to_string();
+
+ package_map.insert(pkg_clone.name.clone(), pkg_clone);
+ }
+
+ self.index_packages = Some(package_map.clone());
+ log::info!("Index loaded successfully, {} packages found.", package_map.len());
+
+ Ok(true)
+ }
+
+
+ /// An internal auxiliary function for downloading data and writing it to a file with a progress display.
+ ///
+ /// # Arguments
+ /// * `data' - Byte slice (&[u8]) with data to write.
+ /// * `file_path' is the path to the file to write the data to.
+ /// * `content_length' is the expected data size (for the progress bar). Maybe 0.
+ /// * `description' - Description of the operation for the progress bar.
+ ///
+ /// # Errors
+ ///
+ /// Returns an error if there is a problem when creating or writing to a file.
+ async fn download_and_write_file_with_progress(
+ data: &[u8],
+ file_path: &Path,
+ content_length: u64,
+ description: &str,
+ ) -> Result<(), Box<dyn std::error::Error>> {
+ // Прогресс-бар для загрузки
+ let pb = if content_length > 0 {
+ let pb = ProgressBar::new(content_length);
+ pb.set_style(ProgressStyle::default_bar()
+ .template(&format!("{{spinner:.green}} [{{elapsed_precise}}] [{{bar:40.cyan/blue}}] {{bytes}}/{{total_bytes}} ({} {{eta}})", description))?
+ .progress_chars("#>-"));
+ pb
+ } else {
+ let pb = ProgressBar::new_spinner();
+ pb.set_style(ProgressStyle::default_spinner()
+ .template(&format!("{{spinner:.green}} [{{elapsed_precise}}] {}...", description))?
+ );
+ pb
+ };
+
+ let mut file = tokio::fs::File::create(&file_path).await?;
+
+ let chunk_size = 8192usize;
+ let mut pos = 0;
+
+ while pos < data.len() {
+ let end = std::cmp::min(pos + chunk_size, data.len());
+ let chunk = &data[pos..end];
+ file.write_all(chunk).await?;
+ pb.inc(chunk.len() as u64);
+ pos = end;
+ }
+
+ pb.finish_with_message(format!("{} download finished", description));
+ Ok(())
+ }
+
+ /// Fetches a specific package identified by `index` (likely the package name).
+ /// Assumes `fetch_index` has been called and `self.index_packages` is populated.
+ pub fn fetch_package_info(&self, package_name: &str) -> Result<&Package, Box<dyn std::error::Error>> {
+ let packages = self.index_packages.as_ref()
+ .ok_or("Index not loaded. Call fetch_index first.")?;
+ let pkg_info = packages.get(package_name)
+ .ok_or(format!("Package '{}' not found in index.", package_name))?;
+ Ok(pkg_info)
+ }
+
+ /// Fetches a specific package identified by `package_name`.
+ /// Assumes `fetch_index` has been called and `self.index_packages` is populated to get the URL.
+ /// Downloads the package file (.mesk) to the cache directory.
+ ///
+ /// # Errors
+ ///
+ /// Returns an error if the index is not loaded, the package is not found in the index,
+ /// the package URL is invalid, the request fails, or if there's an issue writing the file.
+ /// Why didn't I just abstract the download functionality into a separate function initially?
+ /// Yes, I'm scared to work with fetch_index, even I don't often write such shit code.
+ pub async fn fetch_package(&self, package_name: &str) -> Result<bool, Box<dyn std::error::Error>> {
+ let package_info = self.fetch_package_info(package_name)?;
+ let url = url::Url::parse(&package_info.url)?;
+ let host = url.host_str().ok_or("No host in package URL")?;
+ let request_path = url.path();
+
+ let session_options = SessionOptions::default();
+ let mut session = Session::new(session_options).await?;
+ let mut stream = session.connect(host).await?;
+
+ let request = format!(
+ "GET {} HTTP/1.1\r\nHost: {}\r\nConnection: close\r\n\r\n",
+ request_path, host
+ );
+ stream.write_all(request.as_bytes()).await?;
+
+ let mut reader = BufReader::new(stream);
+ let mut response_buffer = Vec::new();
+ reader.read_to_end(&mut response_buffer).await?;
+
+ let headers_end = response_buffer
+ .windows(4)
+ .position(|window| window == b"\r\n\r\n")
+ .ok_or("Invalid response: no headers end")?;
+
+ let headers_str = std::str::from_utf8(&response_buffer[..headers_end])?;
+ if !headers_str.starts_with("HTTP/1.1 200") && !headers_str.starts_with("HTTP/1.0 200") {
+ return Err(format!(
+ "HTTP Error: {}",
+ headers_str.lines().next().unwrap_or("Unknown")
+ ).into());
+ }
+
+ let content_length = headers_str
+ .lines()
+ .find(|line| line.to_lowercase().starts_with("content-length:"))
+ .and_then(|line| line.split_at(15).1.trim().parse::<u64>().ok())
+ .unwrap_or(0);
+
+ let body_start = headers_end + 4;
+ let body_bytes = &response_buffer[body_start..];
+
+ let file_name = Path::new(request_path)
+ .file_name()
+ .ok_or("Could not determine filename from URL path")?
+ .to_str()
+ .ok_or("Filename is not valid UTF-8")?;
+ let cache_dir = &self.config.paths.cache_dir;
+ let file_path = Path::new(cache_dir).join(file_name);
+
+ Self::download_and_write_file_with_progress(body_bytes, &file_path, content_length, file_name).await?;
+
+ log::info!("Package '{}' downloaded successfully to {:?}", package_name, file_path);
Ok(true)
}
}
+