Add detailed error handling for file and directory operations

- Introduced specific error variants (e.g., `FileOpenError`, `FileWriteError`, `DirectoryCreateError`) in `RepositoryError` for better error diagnostics.
- Applied `.map_err()` handling for operations like file creation, copying, renaming, and directory manipulation.
- Replaced direct `HashMap` usage with `Mutex<HashMap>` for thread-safe access to catalog managers in `RestBackend`.
- Refactored `list_packages` to use `list_packages_from_catalog`, removing reliance on the search API.
- Added temporary directory management for non-local-cache scenarios in `RestBackend`.
This commit is contained in:
Till Wegmueller 2026-02-05 23:16:02 +01:00
parent dfc24725b8
commit 7a3373a17d
No known key found for this signature in database
4 changed files with 319 additions and 128 deletions

View file

@ -571,15 +571,30 @@ impl Transaction {
// Save the JSON manifest to the transaction directory // Save the JSON manifest to the transaction directory
let manifest_json_path = self.path.join("manifest.json"); let manifest_json_path = self.path.join("manifest.json");
let manifest_json = serde_json::to_string_pretty(&self.manifest)?; let manifest_json = serde_json::to_string_pretty(&self.manifest)?;
fs::write(&manifest_json_path, &manifest_json)?; fs::write(&manifest_json_path, &manifest_json).map_err(|e| {
RepositoryError::FileWriteError {
path: manifest_json_path.clone(),
source: e,
}
})?;
// Save the legacy manifest to the transaction directory // Save the legacy manifest to the transaction directory
let manifest_legacy_path = self.path.join("manifest"); let manifest_legacy_path = self.path.join("manifest");
if let Some(content) = &self.legacy_manifest_content { if let Some(content) = &self.legacy_manifest_content {
fs::write(&manifest_legacy_path, content)?; fs::write(&manifest_legacy_path, content).map_err(|e| {
RepositoryError::FileWriteError {
path: manifest_legacy_path.clone(),
source: e,
}
})?;
} else { } else {
// Fallback: write JSON as legacy content if none provided (status quo) // Fallback: write JSON as legacy content if none provided (status quo)
fs::write(&manifest_legacy_path, &manifest_json)?; fs::write(&manifest_legacy_path, &manifest_json).map_err(|e| {
RepositoryError::FileWriteError {
path: manifest_legacy_path.clone(),
source: e,
}
})?;
} }
// Determine the publisher to use // Determine the publisher to use
@ -625,12 +640,19 @@ impl Transaction {
// Create parent directories if they don't exist // Create parent directories if they don't exist
if let Some(parent) = dest_path.parent() { if let Some(parent) = dest_path.parent() {
fs::create_dir_all(parent)?; fs::create_dir_all(parent).map_err(|e| RepositoryError::DirectoryCreateError {
path: parent.to_path_buf(),
source: e,
})?;
} }
// Copy the file if it doesn't already exist // Copy the file if it doesn't already exist
if !dest_path.exists() { if !dest_path.exists() {
fs::copy(source_path, &dest_path)?; fs::copy(&source_path, &dest_path).map_err(|e| RepositoryError::FileCopyError {
from: source_path.clone(),
to: dest_path,
source: e,
})?;
} }
} }
@ -689,7 +711,10 @@ impl Transaction {
debug!("Package directory: {}", pkg_dir.display()); debug!("Package directory: {}", pkg_dir.display());
if !pkg_dir.exists() { if !pkg_dir.exists() {
debug!("Creating package directory"); debug!("Creating package directory");
fs::create_dir_all(&pkg_dir)?; fs::create_dir_all(&pkg_dir).map_err(|e| RepositoryError::DirectoryCreateError {
path: pkg_dir.clone(),
source: e,
})?;
} }
// Construct the manifest path using the helper method // Construct the manifest path using the helper method
@ -710,7 +735,10 @@ impl Transaction {
// Create parent directories if they don't exist // Create parent directories if they don't exist
if let Some(parent) = pkg_manifest_path.parent() { if let Some(parent) = pkg_manifest_path.parent() {
debug!("Creating parent directories: {}", parent.display()); debug!("Creating parent directories: {}", parent.display());
fs::create_dir_all(parent)?; fs::create_dir_all(parent).map_err(|e| RepositoryError::DirectoryCreateError {
path: parent.to_path_buf(),
source: e,
})?;
} }
// Copy to pkg directory // Copy to pkg directory
@ -721,7 +749,13 @@ impl Transaction {
manifest_json_path.display(), manifest_json_path.display(),
pkg_manifest_json_path.display() pkg_manifest_json_path.display()
); );
fs::copy(&manifest_json_path, &pkg_manifest_json_path)?; fs::copy(&manifest_json_path, &pkg_manifest_json_path).map_err(|e| {
RepositoryError::FileCopyError {
from: manifest_json_path,
to: pkg_manifest_json_path,
source: e,
}
})?;
// 2. Copy legacy manifest // 2. Copy legacy manifest
debug!( debug!(
@ -729,7 +763,13 @@ impl Transaction {
manifest_legacy_path.display(), manifest_legacy_path.display(),
pkg_manifest_path.display() pkg_manifest_path.display()
); );
fs::copy(&manifest_legacy_path, &pkg_manifest_path)?; fs::copy(&manifest_legacy_path, &pkg_manifest_path).map_err(|e| {
RepositoryError::FileCopyError {
from: manifest_legacy_path,
to: pkg_manifest_path,
source: e,
}
})?;
// Check if we need to create a pub.p5i file for the publisher // Check if we need to create a pub.p5i file for the publisher
let config_path = self.repo.join(REPOSITORY_CONFIG_FILENAME); let config_path = self.repo.join(REPOSITORY_CONFIG_FILENAME);
@ -757,7 +797,10 @@ impl Transaction {
} }
// Clean up the transaction directory // Clean up the transaction directory
fs::remove_dir_all(self.path)?; fs::remove_dir_all(&self.path).map_err(|e| RepositoryError::DirectoryRemoveError {
path: self.path.clone(),
source: e,
})?;
Ok(()) Ok(())
} }
@ -1498,7 +1541,10 @@ impl ReadableRepository for FileBackend {
// Ensure destination directory exists // Ensure destination directory exists
if let Some(parent) = dest.parent() { if let Some(parent) = dest.parent() {
fs::create_dir_all(parent)?; fs::create_dir_all(parent).map_err(|e| RepositoryError::DirectoryCreateError {
path: parent.to_path_buf(),
source: e,
})?;
} }
// If destination already exists and matches digest, do nothing // If destination already exists and matches digest, do nothing
@ -1541,10 +1587,20 @@ impl ReadableRepository for FileBackend {
// Write atomically // Write atomically
let tmp = dest.with_extension("tmp"); let tmp = dest.with_extension("tmp");
{ {
let mut f = File::create(&tmp)?; let mut f = File::create(&tmp).map_err(|e| RepositoryError::FileCreateError {
f.write_all(&bytes)?; path: tmp.clone(),
source: e,
})?;
f.write_all(&bytes).map_err(|e| RepositoryError::FileWriteError {
path: tmp.clone(),
source: e,
})?;
} }
fs::rename(&tmp, dest)?; fs::rename(&tmp, dest).map_err(|e| RepositoryError::FileRenameError {
from: tmp,
to: dest.to_path_buf(),
source: e,
})?;
Ok(()) Ok(())
} }

View file

@ -69,6 +69,17 @@ pub enum RepositoryError {
source: io::Error, source: io::Error,
}, },
#[error("failed to open file {path}: {source}")]
#[diagnostic(
code(ips::repository_error::file_open),
help("Check that the file exists and is accessible")
)]
FileOpenError {
path: PathBuf,
#[source]
source: io::Error,
},
#[error("failed to read file {path}: {source}")] #[error("failed to read file {path}: {source}")]
#[diagnostic( #[diagnostic(
code(ips::repository_error::file_read), code(ips::repository_error::file_read),
@ -91,6 +102,63 @@ pub enum RepositoryError {
source: io::Error, source: io::Error,
}, },
#[error("failed to create file {path}: {source}")]
#[diagnostic(
code(ips::repository_error::file_create),
help("Check that the directory is writable and has enough space")
)]
FileCreateError {
path: PathBuf,
#[source]
source: io::Error,
},
#[error("failed to rename {from} to {to}: {source}")]
#[diagnostic(
code(ips::repository_error::file_rename),
help("Check that the files are on the same filesystem and not in use")
)]
FileRenameError {
from: PathBuf,
to: PathBuf,
#[source]
source: io::Error,
},
#[error("failed to remove file {path}: {source}")]
#[diagnostic(
code(ips::repository_error::file_remove),
help("Check that the file exists and you have permissions to remove it")
)]
FileRemoveError {
path: PathBuf,
#[source]
source: io::Error,
},
#[error("failed to remove directory {path}: {source}")]
#[diagnostic(
code(ips::repository_error::directory_remove),
help("Check that the directory exists and you have permissions to remove it")
)]
DirectoryRemoveError {
path: PathBuf,
#[source]
source: io::Error,
},
#[error("failed to copy {from} to {to}: {source}")]
#[diagnostic(
code(ips::repository_error::file_copy),
help("Check that both paths are valid and you have necessary permissions")
)]
FileCopyError {
from: PathBuf,
to: PathBuf,
#[source]
source: io::Error,
},
#[error("failed to parse JSON: {0}")] #[error("failed to parse JSON: {0}")]
#[diagnostic( #[diagnostic(
code(ips::repository_error::json_parse), code(ips::repository_error::json_parse),

View file

@ -5,9 +5,10 @@
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::fs::{self, File}; use std::fs::{self, File};
use std::io::{BufRead, BufReader, Write}; use std::io::Write;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::str::FromStr; use std::str::FromStr;
use std::sync::Mutex;
use tracing::{debug, info, warn}; use tracing::{debug, info, warn};
use reqwest::blocking::Client; use reqwest::blocking::Client;
@ -55,8 +56,10 @@ pub struct RestBackend {
pub local_cache_path: Option<PathBuf>, pub local_cache_path: Option<PathBuf>,
/// HTTP client for making requests to the repository /// HTTP client for making requests to the repository
client: Client, client: Client,
/// Catalog managers for each publisher /// Catalog managers for each publisher (using internal mutability)
catalog_managers: HashMap<String, CatalogManager>, catalog_managers: Mutex<HashMap<String, CatalogManager>>,
/// Temporary directory for catalogs (using internal mutability)
temp_cache_dir: Mutex<Option<tempfile::TempDir>>,
} }
impl WritableRepository for RestBackend { impl WritableRepository for RestBackend {
@ -83,7 +86,8 @@ impl WritableRepository for RestBackend {
config, config,
local_cache_path: None, local_cache_path: None,
client: Self::create_optimized_client(), client: Self::create_optimized_client(),
catalog_managers: HashMap::new(), catalog_managers: Mutex::new(HashMap::new()),
temp_cache_dir: Mutex::new(None),
}; };
// In a real implementation, we would make a REST API call to create the repository structure // In a real implementation, we would make a REST API call to create the repository structure
@ -260,12 +264,13 @@ impl WritableRepository for RestBackend {
/// Refresh repository metadata /// Refresh repository metadata
fn refresh(&self, publisher: Option<&str>, no_catalog: bool, no_index: bool) -> Result<()> { fn refresh(&self, publisher: Option<&str>, no_catalog: bool, no_index: bool) -> Result<()> {
// We need to clone self to avoid borrowing issues // We need to clone self to avoid borrowing issues
let mut cloned_self = RestBackend { let cloned_self = RestBackend {
uri: self.uri.clone(), uri: self.uri.clone(),
config: self.config.clone(), config: self.config.clone(),
local_cache_path: self.local_cache_path.clone(), local_cache_path: self.local_cache_path.clone(),
client: Self::create_optimized_client(), client: Self::create_optimized_client(),
catalog_managers: HashMap::new(), catalog_managers: Mutex::new(HashMap::new()),
temp_cache_dir: Mutex::new(None),
}; };
// Check if we have a local cache path // Check if we have a local cache path
@ -396,7 +401,8 @@ impl ReadableRepository for RestBackend {
config, config,
local_cache_path: None, local_cache_path: None,
client, client,
catalog_managers: HashMap::new(), catalog_managers: Mutex::new(HashMap::new()),
temp_cache_dir: Mutex::new(None),
}) })
} }
@ -435,71 +441,7 @@ impl ReadableRepository for RestBackend {
publisher: Option<&str>, publisher: Option<&str>,
pattern: Option<&str>, pattern: Option<&str>,
) -> Result<Vec<PackageInfo>> { ) -> Result<Vec<PackageInfo>> {
let pattern = pattern.unwrap_or("*"); self.list_packages_from_catalog(publisher, pattern)
// Use search API to find packages
// URL: /search/0/<pattern>
let url = format!("{}/search/0/{}", self.uri, pattern);
debug!("Listing packages via search: {}", url);
let mut packages = Vec::new();
let mut seen_fmris = HashSet::new();
match self.client.get(&url).send() {
Ok(resp) => {
let resp = match resp.error_for_status() {
Ok(r) => r,
Err(e) if e.status() == Some(reqwest::StatusCode::NOT_FOUND) => {
return Ok(Vec::new());
}
Err(e) => {
return Err(RepositoryError::Other(format!(
"Search API error: {} for {}",
e, url
)));
}
};
let reader = BufReader::new(resp);
for line in reader.lines() {
let line = line.map_err(|e| {
RepositoryError::Other(format!(
"Failed to read search response line: {}",
e
))
})?;
// Line format: <attr> <fmri> <value_type> <value>
// Example: pkg.fmri pkg:/system/rsyslog@8.2508.0,5.11-151056.0:20251023T180542Z set omnios/system/rsyslog
let parts: Vec<&str> = line.split_whitespace().collect();
if parts.len() >= 2 && parts[0] == "pkg.fmri" {
if let Ok(fmri) = crate::fmri::Fmri::parse(parts[1]) {
// Filter by publisher if requested
if let Some(pub_name) = publisher {
if let Some(fmri_pub) = fmri.publisher.as_deref() {
if fmri_pub != pub_name {
continue;
}
}
// If FMRI has no publisher, we assume it matches the requested publisher
// as it's being served by this repository.
}
if seen_fmris.insert(fmri.to_string()) {
packages.push(PackageInfo { fmri });
}
}
}
}
}
Err(e) => {
return Err(RepositoryError::Other(format!(
"Failed to connect to search API: {} for {}",
e, url
)));
}
}
Ok(packages)
} }
/// Show contents of packages /// Show contents of packages
@ -625,7 +567,10 @@ impl ReadableRepository for RestBackend {
// Ensure destination directory exists // Ensure destination directory exists
if let Some(parent) = dest.parent() { if let Some(parent) = dest.parent() {
fs::create_dir_all(parent)?; fs::create_dir_all(parent).map_err(|e| RepositoryError::DirectoryCreateError {
path: parent.to_path_buf(),
source: e,
})?;
} }
let mut last_err: Option<String> = None; let mut last_err: Option<String> = None;
@ -635,7 +580,11 @@ impl ReadableRepository for RestBackend {
let mut resp = resp; let mut resp = resp;
// Write atomically // Write atomically
let tmp_path = dest.with_extension("tmp"); let tmp_path = dest.with_extension("tmp");
let mut tmp_file = File::create(&tmp_path)?; let mut tmp_file =
File::create(&tmp_path).map_err(|e| RepositoryError::FileCreateError {
path: tmp_path.clone(),
source: e,
})?;
std::io::copy(&mut resp, &mut tmp_file).map_err(|e| { std::io::copy(&mut resp, &mut tmp_file).map_err(|e| {
RepositoryError::Other(format!("Failed to download payload: {}", e)) RepositoryError::Other(format!("Failed to download payload: {}", e))
@ -644,7 +593,10 @@ impl ReadableRepository for RestBackend {
// Verify digest if algorithm is known // Verify digest if algorithm is known
if let Some(alg) = algo.clone() { if let Some(alg) = algo.clone() {
let f = File::open(&tmp_path)?; let f = File::open(&tmp_path).map_err(|e| RepositoryError::FileOpenError {
path: tmp_path.clone(),
source: e,
})?;
let comp = crate::digest::Digest::from_reader( let comp = crate::digest::Digest::from_reader(
f, f,
alg, alg,
@ -661,7 +613,11 @@ impl ReadableRepository for RestBackend {
} }
} }
fs::rename(&tmp_path, dest)?; fs::rename(&tmp_path, dest).map_err(|e| RepositoryError::FileRenameError {
from: tmp_path,
to: dest.to_path_buf(),
source: e,
})?;
return Ok(()); return Ok(());
} }
Ok(resp) => { Ok(resp) => {
@ -769,7 +725,6 @@ impl RestBackend {
.connect_timeout(Duration::from_secs(30)) .connect_timeout(Duration::from_secs(30))
.timeout(Duration::from_secs(300)) .timeout(Duration::from_secs(300))
.tcp_keepalive(Some(Duration::from_secs(60))) .tcp_keepalive(Some(Duration::from_secs(60)))
.http2_prior_knowledge()
.build() .build()
.unwrap_or_else(|_| Client::new()) .unwrap_or_else(|_| Client::new())
} }
@ -835,29 +790,31 @@ impl RestBackend {
} }
/// Get the catalog manager for a publisher /// Get the catalog manager for a publisher
fn get_catalog_manager(&mut self, publisher: &str) -> Result<&mut CatalogManager> { fn get_catalog_manager(&self, publisher: &str) -> Result<CatalogManager> {
// Check if we have a local cache path // Check if we have a local cache path, otherwise use temporary directory
let cache_path = match &self.local_cache_path { if self.local_cache_path.is_none() && self.temp_cache_dir.lock().unwrap().is_none() {
Some(path) => path, let temp_dir = tempfile::tempdir().map_err(RepositoryError::IoError)?;
None => { *self.temp_cache_dir.lock().unwrap() = Some(temp_dir);
return Err(RepositoryError::Other(
"No local cache path set".to_string(),
));
}
};
// The local cache path is expected to already point to the per-publisher directory
// Ensure the directory exists
fs::create_dir_all(cache_path)?;
// Get or create the catalog manager pointing at the per-publisher directory directly
if !self.catalog_managers.contains_key(publisher) {
let catalog_manager = CatalogManager::new(cache_path, publisher)?;
self.catalog_managers
.insert(publisher.to_string(), catalog_manager);
} }
Ok(self.catalog_managers.get_mut(publisher).unwrap()) let cache_path = if let Some(path) = &self.local_cache_path {
path.clone()
} else {
self.temp_cache_dir
.lock()
.unwrap()
.as_ref()
.unwrap()
.path()
.join(publisher)
};
// Ensure the directory exists
fs::create_dir_all(&cache_path)?;
// Return a new catalog manager pointing at the directory
let manager = CatalogManager::new(&cache_path, publisher)?;
Ok(manager)
} }
/// Downloads a catalog file from the remote server. /// Downloads a catalog file from the remote server.
@ -986,23 +943,31 @@ impl RestBackend {
/// - Failed to download the catalog file /// - Failed to download the catalog file
/// - Failed to create or write to the file /// - Failed to create or write to the file
fn download_and_store_catalog_file( fn download_and_store_catalog_file(
&mut self, &self,
publisher: &str, publisher: &str,
file_name: &str, file_name: &str,
progress: Option<&dyn ProgressReporter>, progress: Option<&dyn ProgressReporter>,
) -> Result<PathBuf> { ) -> Result<PathBuf> {
// Check if we have a local cache path // Check if we have a local cache path, otherwise use temporary directory
let cache_path = match &self.local_cache_path { if self.local_cache_path.is_none() && self.temp_cache_dir.lock().unwrap().is_none() {
Some(path) => path, let temp_dir = tempfile::tempdir().map_err(RepositoryError::IoError)?;
None => { *self.temp_cache_dir.lock().unwrap() = Some(temp_dir);
return Err(RepositoryError::Other( }
"No local cache path set".to_string(),
)); let cache_path = if let Some(path) = &self.local_cache_path {
} path.clone()
} else {
self.temp_cache_dir
.lock()
.unwrap()
.as_ref()
.unwrap()
.path()
.join(publisher)
}; };
// Ensure the per-publisher directory (local cache path) exists // Ensure the directory exists
fs::create_dir_all(cache_path)?; fs::create_dir_all(&cache_path)?;
// Download the catalog file // Download the catalog file
let content = self.download_catalog_file(publisher, file_name, progress)?; let content = self.download_catalog_file(publisher, file_name, progress)?;
@ -1073,7 +1038,7 @@ impl RestBackend {
/// - Failed to download any catalog part /// - Failed to download any catalog part
/// - Failed to load any catalog part into the catalog manager /// - Failed to load any catalog part into the catalog manager
pub fn download_catalog( pub fn download_catalog(
&mut self, &self,
publisher: &str, publisher: &str,
progress: Option<&dyn ProgressReporter>, progress: Option<&dyn ProgressReporter>,
) -> Result<()> { ) -> Result<()> {
@ -1130,7 +1095,7 @@ impl RestBackend {
} }
// Get the catalog manager for this publisher // Get the catalog manager for this publisher
let catalog_manager = self.get_catalog_manager(publisher)?; let mut catalog_manager = self.get_catalog_manager(publisher)?;
// Update progress for loading parts // Update progress for loading parts
overall_progress = overall_progress.with_context("Loading catalog parts".to_string()); overall_progress = overall_progress.with_context("Loading catalog parts".to_string());
@ -1141,6 +1106,12 @@ impl RestBackend {
catalog_manager.load_part(part_name)?; catalog_manager.load_part(part_name)?;
} }
// Store the catalog manager back if we want to cache it
self.catalog_managers
.lock()
.unwrap()
.insert(publisher.to_string(), catalog_manager);
// Report completion // Report completion
overall_progress = overall_progress.with_current(total_parts); overall_progress = overall_progress.with_current(total_parts);
progress_reporter.finish(&overall_progress); progress_reporter.finish(&overall_progress);
@ -1159,7 +1130,7 @@ impl RestBackend {
/// # Returns /// # Returns
/// ///
/// * `Result<()>` - Ok if all catalogs were downloaded successfully, Err otherwise /// * `Result<()>` - Ok if all catalogs were downloaded successfully, Err otherwise
pub fn download_all_catalogs(&mut self, progress: Option<&dyn ProgressReporter>) -> Result<()> { pub fn download_all_catalogs(&self, progress: Option<&dyn ProgressReporter>) -> Result<()> {
// Use a no-op reporter if none was provided // Use a no-op reporter if none was provided
let progress_reporter = progress.unwrap_or(&NoopProgressReporter); let progress_reporter = progress.unwrap_or(&NoopProgressReporter);
@ -1208,10 +1179,106 @@ impl RestBackend {
/// ///
/// * `Result<()>` - Ok if the catalog was refreshed successfully, Err otherwise /// * `Result<()>` - Ok if the catalog was refreshed successfully, Err otherwise
pub fn refresh_catalog( pub fn refresh_catalog(
&mut self, &self,
publisher: &str, publisher: &str,
progress: Option<&dyn ProgressReporter>, progress: Option<&dyn ProgressReporter>,
) -> Result<()> { ) -> Result<()> {
self.download_catalog(publisher, progress) self.download_catalog(publisher, progress)
} }
/// List packages using the catalog instead of the search API
pub fn list_packages_from_catalog(
&self,
publisher: Option<&str>,
pattern: Option<&str>,
) -> Result<Vec<PackageInfo>> {
let pattern = pattern.unwrap_or("*");
let mut packages = Vec::new();
// Get publishers to check
let publishers = if let Some(pub_name) = publisher {
vec![pub_name.to_string()]
} else {
self.config.publishers.clone()
};
for pub_name in publishers {
// Refresh catalog for each publisher
self.refresh_catalog(&pub_name, None)?;
let cache_path = if let Some(path) = &self.local_cache_path {
path.clone()
} else {
self.temp_cache_dir
.lock()
.unwrap()
.as_ref()
.unwrap()
.path()
.join(&pub_name)
};
let catalog_manager = self.get_catalog_manager(&pub_name)?;
let attrs_path = cache_path.join("catalog.attrs");
let attrs_content = fs::read_to_string(&attrs_path).map_err(|e| {
RepositoryError::FileReadError {
path: attrs_path.clone(),
source: e,
}
})?;
let attrs: Value = serde_json::from_str(&attrs_content).map_err(|e| {
RepositoryError::JsonParseError(format!("Failed to parse catalog.attrs: {}", e))
})?;
let parts = attrs["parts"].as_object().ok_or_else(|| {
RepositoryError::JsonParseError("Missing 'parts' field in catalog.attrs".to_string())
})?;
let mut seen_fmris = HashSet::new();
for part_name in parts.keys() {
if let Some(part) = catalog_manager.get_part(part_name) {
// Match stems against pattern
for (publisher_in_catalog, stems) in &part.packages {
if publisher_in_catalog != &pub_name {
continue;
}
for (stem, versions) in stems {
let matches = if pattern == "*" {
true
} else if pattern.contains('*') {
// Basic glob matching (stem matching pattern)
let re_pattern = pattern.replace('*', ".*");
if let Ok(re) = regex::Regex::new(&format!("^{}$", re_pattern)) {
re.is_match(stem)
} else {
stem == pattern
}
} else {
stem == pattern
};
if matches {
for v_entry in versions {
let fmri_str = format!(
"pkg://{}/{}@{}",
pub_name, stem, v_entry.version
);
if seen_fmris.insert(fmri_str.clone()) {
if let Ok(fmri) = crate::fmri::Fmri::parse(&fmri_str) {
packages.push(PackageInfo { fmri });
}
}
}
}
}
}
}
}
}
Ok(packages)
}
} }

View file

@ -50,7 +50,7 @@ struct Cli {
fn main() -> Result<()> { fn main() -> Result<()> {
// Initialize tracing // Initialize tracing
fmt() fmt()
.with_env_filter(EnvFilter::from_default_env().add_directive(tracing::Level::INFO.into())) .with_env_filter(EnvFilter::from_default_env())
.init(); .init();
let cli = Cli::parse(); let cli = Cli::parse();