summaryrefslogtreecommitdiff
path: root/src/net/http_package.rs
diff options
context:
space:
mode:
authorNamilskyy <alive6863@gmail.com>2025-12-01 14:06:10 +0300
committerNamilskyy <alive6863@gmail.com>2025-12-01 14:06:10 +0300
commit068eea55e0612184151461e64633b3dc18e53490 (patch)
treed3e20aea860a88e2b50ad3d35ddbd8792a1e531e /src/net/http_package.rs
parent2029c2c09284d0228c0bce2977fdfa0ca8a8db06 (diff)
Implemented test suite, fixed some issues and added more modular structure into .woodpecker.yaml
Diffstat (limited to 'src/net/http_package.rs')
-rw-r--r--src/net/http_package.rs43
1 files changed, 30 insertions, 13 deletions
diff --git a/src/net/http_package.rs b/src/net/http_package.rs
index ec7c318..ade4ee1 100644
--- a/src/net/http_package.rs
+++ b/src/net/http_package.rs
@@ -4,14 +4,15 @@ use futures_util::stream::TryStreamExt;
use indicatif::{ProgressBar, ProgressStyle};
use reqwest;
use serde::Deserialize;
-use std::collections::HashMap;
-use std::path::Path;
-use tokio::fs::File;
-use tokio::io::AsyncWriteExt;
+use std::{ collections::HashMap, path::Path };
+use tokio::{ fs::File, io::AsyncWriteExt};
+use std::fs::File as StdFile;
+use flate2::read::GzDecoder;
+use tar::Archive;
pub struct HTTPPackage {
- config: Config,
- index_packages: Option<HashMap<String, Package>>,
+ pub config: Config,
+ pub index_packages: Option<HashMap<String, Package>>,
}
#[derive(Deserialize, Debug)]
@@ -42,6 +43,8 @@ impl HTTPPackage {
let repo_url_str = &self.config.repo.repo_url;
let cache_dir = &self.config.paths.cache_dir;
+ log::debug!("Cache directory: {:?}", cache_dir);
+
let index_url = if repo_url_str.ends_with(".tar.gz") {
repo_url_str.clone()
} else {
@@ -86,6 +89,10 @@ impl HTTPPackage {
let mut stream = response.bytes_stream();
let file_path = Path::new(cache_dir).join("INDEX.tar.gz");
+ // Ensure cache_dir exists
+ tokio::fs::create_dir_all(cache_dir).await
+ .map_err(|e| format!("Failed to create cache dir: {}", e))?;
+
let mut file = File::create(&file_path).await?;
let mut downloaded: u64 = 0;
@@ -98,9 +105,16 @@ impl HTTPPackage {
pb.finish_with_message("INDEX.tar.gz download finished");
- // --- Извлечение и парсинг INDEX.toml ---
log::info!("Extracting INDEX.tar.gz to cache directory...");
- Package::extract_archive(&file_path.to_string_lossy())?; // Используем существующую функцию из pkgtoolkit
+ // Package::extract_archive(&file_path.to_string_lossy())?;
+
+
+ let archive_file = StdFile::open(&file_path)
+ .map_err(|e| format!("Failed to open archive: {}", e))?;
+ let gz_decoder = GzDecoder::new(archive_file);
+ let mut archive = Archive::new(gz_decoder);
+ archive.unpack(cache_dir)
+ .map_err(|e| format!("Failed to unpack archive: {}", e))?;
let index_toml_path = Path::new(cache_dir).join("INDEX.toml");
if !index_toml_path.exists() {
@@ -110,12 +124,13 @@ impl HTTPPackage {
}
let index_content = tokio::fs::read_to_string(&index_toml_path).await?;
- let index_data: IndexData = toml::from_str(&index_content)?;
+ log::debug!("Content of INDEX.toml:\n{}", index_content);
+
+ let index_data: IndexData = toml::from_str(&index_content)
+ .map_err(|e| format!("Failed to parse INDEX.toml: {}", e))?;
let mut package_map = HashMap::new();
for pkg in index_data.packages {
- // PKG_URL = /repo/package.mesk
- // FULL URL = "http://mesk.anthrill.i2p/i2p/repo/pkg.mesk"
let base_url = url::Url::parse(&self.config.repo.repo_url)?;
let full_url = base_url.join(&pkg.url)?;
let mut pkg_clone = pkg.clone();
@@ -131,7 +146,7 @@ impl HTTPPackage {
);
Ok(true)
- }
+}
/// An internal auxiliary function for downloading data and writing it to a file with a progress display.
///
@@ -212,7 +227,7 @@ impl HTTPPackage {
) -> Result<bool, Box<dyn std::error::Error>> {
let package_info = self.fetch_package_info(package_name)?;
let url = &package_info.url;
-
+
let client = reqwest::Client::new();
let file_name = Path::new(url)
@@ -223,6 +238,8 @@ impl HTTPPackage {
let cache_dir = &self.config.paths.cache_dir;
let file_path = Path::new(cache_dir).join(file_name);
+ tokio::fs::create_dir_all(&cache_dir).await?;
+
Self::download_file_with_progress(&client, url, &file_path, file_name).await?;
log::info!(