Introduce progress reporting infrastructure and error management refinements

- Add `ProgressReporter` trait with `start`, `update`, and `finish` methods for customizable progress tracking during operations.
- Implement `ProgressInfo` struct for detailed progress state reporting, including operation, progress, and context information.
- Create a no-op implementation `NoopProgressReporter` for cases where progress reporting is not needed.
- Enhance `pkg6` with structured error handling using `Pkg6Error` and new diagnostic variants for I/O, JSON, FMRI, and logging errors.
- Update `Image` functionality to leverage progress reporting during catalog downloads and include robust publisher management.
- Introduce `TestProgressReporter` for unit tests to validate progress reporting integration.
This commit is contained in:
Till Wegmueller 2025-08-03 14:28:36 +02:00
parent f7f017f7b9
commit 2777f153c9
No known key found for this signature in database
10 changed files with 2117 additions and 130 deletions

8
Cargo.lock generated
View file

@ -1022,6 +1022,7 @@ dependencies = [
"pest_derive",
"redb",
"regex",
"reqwest",
"rust-ini",
"semver",
"serde",
@ -1463,9 +1464,14 @@ checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c"
name = "pkg6"
version = "0.5.1"
dependencies = [
"diff-struct",
"clap 4.5.41",
"libips",
"miette",
"serde",
"serde_json",
"thiserror 2.0.12",
"tracing",
"tracing-subscriber",
]
[[package]]

View file

@ -40,6 +40,7 @@ walkdir = "2.4.0"
redb = "1.5.0"
bincode = "1.3.3"
rust-ini = "0.21.2"
reqwest = { version = "0.11", features = ["blocking", "json"] }
[features]
default = ["redb-index"]

View file

@ -9,6 +9,9 @@ use std::collections::HashMap;
use std::fs::{self, File};
use std::path::{Path, PathBuf};
use thiserror::Error;
use redb::{Database, ReadableTable, TableDefinition};
use crate::repository::{RestBackend, ReadableRepository, RepositoryError};
#[derive(Debug, Error, Diagnostic)]
pub enum ImageError {
@ -32,6 +35,34 @@ pub enum ImageError {
help("Provide a valid path for the image")
)]
InvalidPath(String),
#[error("Repository error: {0}")]
#[diagnostic(
code(ips::image_error::repository),
help("Check the repository configuration and try again")
)]
Repository(#[from] RepositoryError),
#[error("Database error: {0}")]
#[diagnostic(
code(ips::image_error::database),
help("Check the database configuration and try again")
)]
Database(String),
#[error("Publisher not found: {0}")]
#[diagnostic(
code(ips::image_error::publisher_not_found),
help("Check the publisher name and try again")
)]
PublisherNotFound(String),
#[error("No publishers configured")]
#[diagnostic(
code(ips::image_error::no_publishers),
help("Configure at least one publisher before performing this operation")
)]
NoPublishers,
}
pub type Result<T> = std::result::Result<T, ImageError>;
@ -45,6 +76,19 @@ pub enum ImageType {
Partial,
}
/// Represents a publisher configuration in an image
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)]
pub struct Publisher {
/// Publisher name
pub name: String,
/// Publisher origin URL
pub origin: String,
/// Publisher mirror URLs
pub mirrors: Vec<String>,
/// Whether this is the default publisher
pub is_default: bool,
}
/// Represents an IPS image, which can be either a Full image or a Partial image
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Image {
@ -60,6 +104,8 @@ pub struct Image {
variants: HashMap<String, String>,
/// Mediators
mediators: HashMap<String, String>,
/// Publishers
publishers: Vec<Publisher>,
}
impl Image {
@ -72,6 +118,7 @@ impl Image {
variants: HashMap::new(),
mediators: HashMap::new(),
props: vec![],
publishers: vec![],
}
}
@ -84,9 +131,111 @@ impl Image {
variants: HashMap::new(),
mediators: HashMap::new(),
props: vec![],
publishers: vec![],
}
}
/// Add a publisher to the image
pub fn add_publisher(&mut self, name: &str, origin: &str, mirrors: Vec<String>, is_default: bool) -> Result<()> {
// Check if publisher already exists
if self.publishers.iter().any(|p| p.name == name) {
// Update existing publisher
for publisher in &mut self.publishers {
if publisher.name == name {
publisher.origin = origin.to_string();
publisher.mirrors = mirrors;
publisher.is_default = is_default;
// If this publisher is now the default, make sure no other publisher is default
if is_default {
for other_publisher in &mut self.publishers {
if other_publisher.name != name {
other_publisher.is_default = false;
}
}
}
break;
}
}
} else {
// Add new publisher
let publisher = Publisher {
name: name.to_string(),
origin: origin.to_string(),
mirrors,
is_default,
};
// If this publisher is the default, make sure no other publisher is default
if is_default {
for publisher in &mut self.publishers {
publisher.is_default = false;
}
}
self.publishers.push(publisher);
}
// Save the image to persist the changes
self.save()?;
Ok(())
}
/// Remove a publisher from the image
pub fn remove_publisher(&mut self, name: &str) -> Result<()> {
let initial_len = self.publishers.len();
self.publishers.retain(|p| p.name != name);
if self.publishers.len() == initial_len {
return Err(ImageError::PublisherNotFound(name.to_string()));
}
// If we removed the default publisher, set the first remaining publisher as default
if self.publishers.iter().all(|p| !p.is_default) && !self.publishers.is_empty() {
self.publishers[0].is_default = true;
}
// Save the image to persist the changes
self.save()?;
Ok(())
}
/// Get the default publisher
pub fn default_publisher(&self) -> Result<&Publisher> {
// Find the default publisher
for publisher in &self.publishers {
if publisher.is_default {
return Ok(publisher);
}
}
// If no publisher is marked as default, return the first one
if !self.publishers.is_empty() {
return Ok(&self.publishers[0]);
}
Err(ImageError::NoPublishers)
}
/// Get a publisher by name
pub fn get_publisher(&self, name: &str) -> Result<&Publisher> {
for publisher in &self.publishers {
if publisher.name == name {
return Ok(publisher);
}
}
Err(ImageError::PublisherNotFound(name.to_string()))
}
/// Get all publishers
pub fn publishers(&self) -> &[Publisher] {
&self.publishers
}
/// Returns the path to the image
pub fn path(&self) -> &Path {
&self.path
@ -110,6 +259,21 @@ impl Image {
self.metadata_dir().join("pkg6.image.json")
}
/// Returns the path to the installed packages database
pub fn installed_db_path(&self) -> PathBuf {
self.metadata_dir().join("installed.redb")
}
/// Returns the path to the manifest directory
pub fn manifest_dir(&self) -> PathBuf {
self.metadata_dir().join("manifests")
}
/// Returns the path to the catalog directory
pub fn catalog_dir(&self) -> PathBuf {
self.metadata_dir().join("catalog")
}
/// Creates the metadata directory if it doesn't exist
pub fn create_metadata_dir(&self) -> Result<()> {
let metadata_dir = self.metadata_dir();
@ -121,6 +285,110 @@ impl Image {
})
}
/// Creates the manifest directory if it doesn't exist
pub fn create_manifest_dir(&self) -> Result<()> {
let manifest_dir = self.manifest_dir();
fs::create_dir_all(&manifest_dir).map_err(|e| {
ImageError::IO(std::io::Error::new(
std::io::ErrorKind::Other,
format!("Failed to create manifest directory at {:?}: {}", manifest_dir, e),
))
})
}
/// Creates the catalog directory if it doesn't exist
pub fn create_catalog_dir(&self) -> Result<()> {
let catalog_dir = self.catalog_dir();
fs::create_dir_all(&catalog_dir).map_err(|e| {
ImageError::IO(std::io::Error::new(
std::io::ErrorKind::Other,
format!("Failed to create catalog directory at {:?}: {}", catalog_dir, e),
))
})
}
/// Initialize the installed packages database
pub fn init_installed_db(&self) -> Result<()> {
let db_path = self.installed_db_path();
// Create the database if it doesn't exist
let db = Database::create(&db_path).map_err(|e| {
ImageError::Database(format!("Failed to create installed packages database: {}", e))
})?;
// Define tables
let packages_table = TableDefinition::<&str, &[u8]>::new("packages");
// Create tables
let tx = db.begin_write().map_err(|e| {
ImageError::Database(format!("Failed to begin transaction: {}", e))
})?;
tx.open_table(packages_table).map_err(|e| {
ImageError::Database(format!("Failed to create packages table: {}", e))
})?;
tx.commit().map_err(|e| {
ImageError::Database(format!("Failed to commit transaction: {}", e))
})?;
Ok(())
}
/// Download catalogs from all configured publishers
pub fn download_catalogs(&self) -> Result<()> {
// Create catalog directory if it doesn't exist
self.create_catalog_dir()?;
// Download catalogs for each publisher
for publisher in &self.publishers {
self.download_publisher_catalog(&publisher.name)?;
}
Ok(())
}
/// Download catalog for a specific publisher
pub fn download_publisher_catalog(&self, publisher_name: &str) -> Result<()> {
// Get the publisher
let publisher = self.get_publisher(publisher_name)?;
// Create a REST backend for the publisher
let mut repo = RestBackend::open(&publisher.origin)?;
// Set local cache path to the catalog directory for this publisher
let publisher_catalog_dir = self.catalog_dir().join(&publisher.name);
fs::create_dir_all(&publisher_catalog_dir)?;
repo.set_local_cache_path(&publisher_catalog_dir)?;
// Download the catalog
repo.download_catalog(&publisher.name, None)?;
Ok(())
}
/// Create a new image with the specified publisher
pub fn create_image<P: AsRef<Path>>(path: P, publisher_name: &str, origin: &str) -> Result<Self> {
// Create a new image
let mut image = Image::new_full(path.as_ref().to_path_buf());
// Create the directory structure
image.create_metadata_dir()?;
image.create_manifest_dir()?;
image.create_catalog_dir()?;
// Initialize the installed packages database
image.init_installed_db()?;
// Add the publisher
image.add_publisher(publisher_name, origin, vec![], true)?;
// Download the catalog
image.download_publisher_catalog(publisher_name)?;
Ok(image)
}
/// Saves the image data to the metadata directory
pub fn save(&self) -> Result<()> {
self.create_metadata_dir()?;

View file

@ -201,6 +201,7 @@ impl From<bincode::Error> for RepositoryError {
mod catalog;
mod file_backend;
mod obsoleted;
pub mod progress;
mod rest_backend;
#[cfg(test)]
mod tests;
@ -212,6 +213,7 @@ pub use catalog::{
};
pub use file_backend::FileBackend;
pub use obsoleted::{ObsoletedPackageManager, ObsoletedPackageMetadata};
pub use progress::{ProgressInfo, ProgressReporter, NoopProgressReporter};
pub use rest_backend::RestBackend;
/// Repository configuration filename

View file

@ -0,0 +1,168 @@
// This Source Code Form is subject to the terms of
// the Mozilla Public License, v. 2.0. If a copy of the
// MPL was not distributed with this file, You can
// obtain one at https://mozilla.org/MPL/2.0/.
use std::fmt;
/// Trait for reporting progress during long-running operations like catalog downloads.
///
/// Implementors of this trait can be passed to methods that support progress reporting,
/// such as `download_catalog` in the `RestBackend`. This allows for flexible progress
/// reporting in different UI contexts (CLI, GUI, etc.).
///
/// # Examples
///
/// ```
/// use libips::repository::progress::{ProgressReporter, ProgressInfo};
///
/// struct SimpleProgressReporter;
///
/// impl ProgressReporter for SimpleProgressReporter {
/// fn start(&self, info: &ProgressInfo) {
/// println!("Starting: {}", info.operation);
/// }
///
/// fn update(&self, info: &ProgressInfo) {
/// if let (Some(current), Some(total)) = (info.current, info.total) {
/// let percentage = (current as f64 / total as f64) * 100.0;
/// println!("{}: {:.1}% ({}/{})", info.operation, percentage, current, total);
/// }
/// }
///
/// fn finish(&self, info: &ProgressInfo) {
/// println!("Finished: {}", info.operation);
/// }
/// }
/// ```
pub trait ProgressReporter {
/// Called when an operation starts.
///
/// # Arguments
///
/// * `info` - Information about the operation
fn start(&self, info: &ProgressInfo);
/// Called when progress is made during an operation.
///
/// # Arguments
///
/// * `info` - Information about the operation and current progress
fn update(&self, info: &ProgressInfo);
/// Called when an operation completes.
///
/// # Arguments
///
/// * `info` - Information about the completed operation
fn finish(&self, info: &ProgressInfo);
}
/// Information about a progress-reporting operation.
#[derive(Debug, Clone)]
pub struct ProgressInfo {
/// The name of the operation being performed
pub operation: String,
/// The current progress value (e.g., bytes downloaded, files processed)
pub current: Option<u64>,
/// The total expected value (e.g., total bytes, total files)
pub total: Option<u64>,
/// Additional context about the operation (e.g., current file name)
pub context: Option<String>,
}
impl ProgressInfo {
/// Create a new ProgressInfo for an operation.
///
/// # Arguments
///
/// * `operation` - The name of the operation
///
/// # Returns
///
/// A new ProgressInfo with only the operation name set
pub fn new(operation: impl Into<String>) -> Self {
ProgressInfo {
operation: operation.into(),
current: None,
total: None,
context: None,
}
}
/// Set the current progress value.
///
/// # Arguments
///
/// * `current` - The current progress value
///
/// # Returns
///
/// Self for method chaining
pub fn with_current(mut self, current: u64) -> Self {
self.current = Some(current);
self
}
/// Set the total expected value.
///
/// # Arguments
///
/// * `total` - The total expected value
///
/// # Returns
///
/// Self for method chaining
pub fn with_total(mut self, total: u64) -> Self {
self.total = Some(total);
self
}
/// Set additional context about the operation.
///
/// # Arguments
///
/// * `context` - Additional context (e.g., current file name)
///
/// # Returns
///
/// Self for method chaining
pub fn with_context(mut self, context: impl Into<String>) -> Self {
self.context = Some(context.into());
self
}
}
impl fmt::Display for ProgressInfo {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.operation)?;
if let (Some(current), Some(total)) = (self.current, self.total) {
let percentage = (current as f64 / total as f64) * 100.0;
write!(f, " {:.1}% ({}/{})", percentage, current, total)?;
} else if let Some(current) = self.current {
write!(f, " {}", current)?;
}
if let Some(context) = &self.context {
write!(f, " - {}", context)?;
}
Ok(())
}
}
/// A no-op implementation of ProgressReporter that does nothing.
///
/// This is useful as a default when progress reporting is not needed.
#[derive(Debug, Clone, Copy)]
pub struct NoopProgressReporter;
impl ProgressReporter for NoopProgressReporter {
fn start(&self, _info: &ProgressInfo) {}
fn update(&self, _info: &ProgressInfo) {}
fn finish(&self, _info: &ProgressInfo) {}
}

View file

@ -3,18 +3,57 @@
// MPL was not distributed with this file, You can
// obtain one at https://mozilla.org/MPL/2.0/.
use std::collections::HashMap;
use std::fs::{self, File};
use std::io::Write;
use std::path::{Path, PathBuf};
use tracing::{debug, info, warn};
use reqwest::blocking::Client;
use serde_json::Value;
use super::{
PackageContents, PackageInfo, PublisherInfo, ReadableRepository, RepositoryConfig,
RepositoryError, RepositoryInfo, RepositoryVersion, Result, WritableRepository,
NoopProgressReporter, PackageContents, PackageInfo, ProgressInfo, ProgressReporter,
PublisherInfo, ReadableRepository, RepositoryConfig, RepositoryError, RepositoryInfo,
RepositoryVersion, Result, WritableRepository,
};
use super::catalog::CatalogManager;
/// Repository implementation that uses a REST API
/// Repository implementation that uses a REST API to interact with a remote repository.
///
/// This implementation allows downloading catalog files from a remote repository
/// and storing them locally for use by the client. It uses the existing `CatalogAttrs`
/// structure from catalog.rs to parse the downloaded catalog files.
///
/// # Example
///
/// ```no_run
/// use libips::repository::RestBackend;
/// use std::path::Path;
///
/// // Open a connection to a remote repository
/// let mut repo = RestBackend::open("http://pkg.opensolaris.org/release").unwrap();
///
/// // Set a local cache path for downloaded catalog files
/// repo.set_local_cache_path(Path::new("/tmp/pkg_cache")).unwrap();
///
/// // Add a publisher
/// repo.add_publisher("openindiana.org").unwrap();
///
/// // Download catalog files for the publisher
/// repo.download_catalog("openindiana.org").unwrap();
/// ```
pub struct RestBackend {
/// The base URI of the repository
pub uri: String,
/// The repository configuration
pub config: RepositoryConfig,
/// The local path where catalog files are cached
pub local_cache_path: Option<PathBuf>,
/// HTTP client for making requests to the repository
client: Client,
/// Catalog managers for each publisher
catalog_managers: HashMap<String, CatalogManager>,
}
impl WritableRepository for RestBackend {
@ -36,6 +75,8 @@ impl WritableRepository for RestBackend {
uri: uri_str,
config,
local_cache_path: None,
client: Client::new(),
catalog_managers: HashMap::new(),
};
// In a real implementation, we would make a REST API call to create the repository structure
@ -57,13 +98,55 @@ impl WritableRepository for RestBackend {
// This is a stub implementation
// In a real implementation, we would make a REST API call to add the publisher
println!("add_publisher called with publisher: {}", publisher);
println!("Current publishers: {:?}", self.config.publishers);
println!("Local cache path: {:?}", self.local_cache_path);
// Add the publisher to the config if it doesn't exist
if !self.config.publishers.contains(&publisher.to_string()) {
self.config.publishers.push(publisher.to_string());
// In a real implementation, we would make a REST API call to create publisher-specific resources
println!("Publisher added to config: {:?}", self.config.publishers);
// Save the updated configuration
self.save_config()?;
println!("Saving configuration...");
match self.save_config() {
Ok(_) => println!("Successfully saved configuration"),
Err(e) => println!("Failed to save configuration: {}", e),
}
} else {
println!("Publisher already exists in config, skipping addition to config");
}
// Always create the publisher directory if we have a local cache path
// This ensures the directory exists even if the publisher was already in the config
if let Some(cache_path) = &self.local_cache_path {
println!("Creating publisher directory...");
let publisher_dir = cache_path.join("publisher").join(publisher);
println!("Publisher directory path: {}", publisher_dir.display());
match fs::create_dir_all(&publisher_dir) {
Ok(_) => println!("Successfully created publisher directory"),
Err(e) => println!("Failed to create publisher directory: {}", e),
}
// Check if the directory was created
println!("Publisher directory exists after creation: {}", publisher_dir.exists());
// Create catalog directory
let catalog_dir = publisher_dir.join("catalog");
println!("Catalog directory path: {}", catalog_dir.display());
match fs::create_dir_all(&catalog_dir) {
Ok(_) => println!("Successfully created catalog directory"),
Err(e) => println!("Failed to create catalog directory: {}", e),
}
// Check if the directory was created
println!("Catalog directory exists after creation: {}", catalog_dir.exists());
debug!("Created publisher directory: {}", publisher_dir.display());
} else {
println!("No local cache path set, skipping directory creation");
}
Ok(())
@ -163,8 +246,19 @@ impl WritableRepository for RestBackend {
/// Refresh repository metadata
fn refresh(&self, publisher: Option<&str>, no_catalog: bool, no_index: bool) -> Result<()> {
// This is a stub implementation
// In a real implementation, we would make a REST API call to refresh metadata
// We need to clone self to avoid borrowing issues
let mut cloned_self = RestBackend {
uri: self.uri.clone(),
config: self.config.clone(),
local_cache_path: self.local_cache_path.clone(),
client: Client::new(),
catalog_managers: HashMap::new(),
};
// Check if we have a local cache path
if cloned_self.local_cache_path.is_none() {
return Err(RepositoryError::Other("No local cache path set".to_string()));
}
// Filter publishers if specified
let publishers = if let Some(pub_name) = publisher {
@ -178,16 +272,18 @@ impl WritableRepository for RestBackend {
// For each publisher, refresh metadata
for pub_name in publishers {
println!("Refreshing metadata for publisher: {}", pub_name);
info!("Refreshing metadata for publisher: {}", pub_name);
if !no_catalog {
println!("Refreshing catalog...");
// In a real implementation, we would make a REST API call to refresh the catalog
info!("Refreshing catalog...");
// Download the catalog files
cloned_self.download_catalog(&pub_name, None)?;
}
if !no_index {
println!("Refreshing search index...");
// In a real implementation, we would make a REST API call to refresh the search index
info!("Refreshing search index...");
// In a real implementation, we would refresh the search index
// This would typically involve parsing the catalog files and building an index
}
}
@ -217,19 +313,59 @@ impl WritableRepository for RestBackend {
impl ReadableRepository for RestBackend {
/// Open an existing repository
fn open<P: AsRef<Path>>(uri: P) -> Result<Self> {
// This is a stub implementation
// In a real implementation, we would make a REST API call to get the repository configuration
let uri_str = uri.as_ref().to_string_lossy().to_string();
// In a real implementation, we would fetch the repository configuration from the REST API
// For now, we'll just create a default configuration
let config = RepositoryConfig::default();
// Create an HTTP client
let client = Client::new();
// Fetch the repository configuration from the remote server
// We'll try to get the publisher information using the publisher endpoint
let url = format!("{}/publisher/0", uri_str);
debug!("Fetching repository configuration from: {}", url);
let mut config = RepositoryConfig::default();
// Try to fetch publisher information
match client.get(&url).send() {
Ok(response) => {
if response.status().is_success() {
// Try to parse the response as JSON
match response.json::<Value>() {
Ok(json) => {
// Extract publisher information
if let Some(publishers) = json.get("publishers").and_then(|p| p.as_object()) {
for (name, _) in publishers {
debug!("Found publisher: {}", name);
config.publishers.push(name.clone());
}
}
},
Err(e) => {
warn!("Failed to parse publisher information: {}", e);
}
}
} else {
warn!("Failed to fetch publisher information: HTTP status {}", response.status());
}
},
Err(e) => {
warn!("Failed to connect to repository: {}", e);
}
}
// If we couldn't get any publishers, add a default one
if config.publishers.is_empty() {
config.publishers.push("openindiana.org".to_string());
}
// Create the repository instance
Ok(RestBackend {
uri: uri_str,
config,
local_cache_path: None,
client,
catalog_managers: HashMap::new(),
})
}
@ -409,9 +545,408 @@ impl ReadableRepository for RestBackend {
}
impl RestBackend {
/// Set the local cache path
/// Sets the local path where catalog files will be cached.
///
/// This method creates the directory if it doesn't exist. The local cache path
/// is required for downloading and storing catalog files from the remote repository.
///
/// # Arguments
///
/// * `path` - The path where catalog files will be stored
///
/// # Returns
///
/// * `Result<()>` - Ok if the path was set successfully, Err otherwise
///
/// # Errors
///
/// Returns an error if the directory could not be created.
pub fn set_local_cache_path<P: AsRef<Path>>(&mut self, path: P) -> Result<()> {
self.local_cache_path = Some(path.as_ref().to_path_buf());
// Create the directory if it doesn't exist
if let Some(path) = &self.local_cache_path {
fs::create_dir_all(path)?;
}
Ok(())
}
/// Initializes the repository by downloading catalog files for all publishers.
///
/// This method should be called after setting the local cache path with
/// `set_local_cache_path`. It downloads the catalog files for all publishers
/// in the repository configuration.
///
/// # Arguments
///
/// * `progress` - Optional progress reporter for tracking download progress
///
/// # Returns
///
/// * `Result<()>` - Ok if initialization was successful, Err otherwise
///
/// # Errors
///
/// Returns an error if:
/// - No local cache path has been set
/// - Failed to download catalog files for any publisher
pub fn initialize(&mut self, progress: Option<&dyn ProgressReporter>) -> Result<()> {
// Check if we have a local cache path
if self.local_cache_path.is_none() {
return Err(RepositoryError::Other("No local cache path set".to_string()));
}
// Download catalogs for all publishers
self.download_all_catalogs(progress)?;
Ok(())
}
/// Get the catalog manager for a publisher
fn get_catalog_manager(&mut self, publisher: &str) -> Result<&mut CatalogManager> {
// Check if we have a local cache path
let cache_path = match &self.local_cache_path {
Some(path) => path,
None => return Err(RepositoryError::Other("No local cache path set".to_string())),
};
// Create publisher directory if it doesn't exist
let publisher_dir = cache_path.join("publisher").join(publisher);
fs::create_dir_all(&publisher_dir)?;
// Create catalog directory if it doesn't exist
let catalog_dir = publisher_dir.join("catalog");
fs::create_dir_all(&catalog_dir)?;
// Get or create the catalog manager
if !self.catalog_managers.contains_key(publisher) {
let catalog_manager = CatalogManager::new(&catalog_dir, publisher)?;
self.catalog_managers.insert(publisher.to_string(), catalog_manager);
}
Ok(self.catalog_managers.get_mut(publisher).unwrap())
}
/// Downloads a catalog file from the remote server.
///
/// # Arguments
///
/// * `publisher` - The name of the publisher
/// * `file_name` - The name of the catalog file to download
/// * `progress` - Optional progress reporter for tracking download progress
///
/// # Returns
///
/// * `Result<Vec<u8>>` - The content of the downloaded file if successful
///
/// # Errors
///
/// Returns an error if:
/// - Failed to connect to the remote server
/// - The HTTP request was not successful
/// - Failed to read the response body
fn download_catalog_file(
&self,
publisher: &str,
file_name: &str,
progress: Option<&dyn ProgressReporter>,
) -> Result<Vec<u8>> {
// Use a no-op reporter if none was provided
let progress = progress.unwrap_or(&NoopProgressReporter);
// Construct the URL for the catalog file
let url = format!("{}/catalog/1/{}", self.uri, file_name);
debug!("Downloading catalog file: {}", url);
// Create progress info for this operation
let mut progress_info = ProgressInfo::new(format!("Downloading {}", file_name))
.with_context(format!("Publisher: {}", publisher));
// Notify that we're starting the download
progress.start(&progress_info);
// Make the HTTP request
let response = self.client.get(&url)
.send()
.map_err(|e| {
// Report failure
progress.finish(&progress_info);
RepositoryError::Other(format!("Failed to download catalog file: {}", e))
})?;
// Check if the request was successful
if !response.status().is_success() {
// Report failure
progress.finish(&progress_info);
return Err(RepositoryError::Other(format!(
"Failed to download catalog file: HTTP status {}",
response.status()
)));
}
// Get the content length if available
if let Some(content_length) = response.content_length() {
progress_info = progress_info.with_total(content_length);
progress.update(&progress_info);
}
// Read the response body
let body = response.bytes()
.map_err(|e| {
// Report failure
progress.finish(&progress_info);
RepositoryError::Other(format!("Failed to read response body: {}", e))
})?;
// Update progress with the final size
progress_info = progress_info.with_current(body.len() as u64);
if progress_info.total.is_none() {
progress_info = progress_info.with_total(body.len() as u64);
}
// Report completion
progress.finish(&progress_info);
Ok(body.to_vec())
}
/// Download and store a catalog file
///
/// # Arguments
///
/// * `publisher` - The name of the publisher
/// * `file_name` - The name of the catalog file to download
/// * `progress` - Optional progress reporter for tracking download progress
///
/// # Returns
///
/// * `Result<PathBuf>` - The path to the stored file if successful
///
/// # Errors
///
/// Returns an error if:
/// - No local cache path has been set
/// - Failed to create the publisher or catalog directory
/// - Failed to download the catalog file
/// - Failed to create or write to the file
fn download_and_store_catalog_file(
&mut self,
publisher: &str,
file_name: &str,
progress: Option<&dyn ProgressReporter>,
) -> Result<PathBuf> {
// Check if we have a local cache path
let cache_path = match &self.local_cache_path {
Some(path) => path,
None => return Err(RepositoryError::Other("No local cache path set".to_string())),
};
// Create publisher directory if it doesn't exist
let publisher_dir = cache_path.join("publisher").join(publisher);
fs::create_dir_all(&publisher_dir)?;
// Create catalog directory if it doesn't exist
let catalog_dir = publisher_dir.join("catalog");
fs::create_dir_all(&catalog_dir)?;
// Download the catalog file
let content = self.download_catalog_file(publisher, file_name, progress)?;
// Use a no-op reporter if none was provided
let progress = progress.unwrap_or(&NoopProgressReporter);
// Create progress info for storing the file
let progress_info = ProgressInfo::new(format!("Storing {}", file_name))
.with_context(format!("Publisher: {}", publisher))
.with_current(0)
.with_total(content.len() as u64);
// Notify that we're starting to store the file
progress.start(&progress_info);
// Store the file
let file_path = catalog_dir.join(file_name);
let mut file = File::create(&file_path)
.map_err(|e| {
// Report failure
progress.finish(&progress_info);
RepositoryError::FileWriteError(format!("Failed to create file: {}", e))
})?;
file.write_all(&content)
.map_err(|e| {
// Report failure
progress.finish(&progress_info);
RepositoryError::FileWriteError(format!("Failed to write file: {}", e))
})?;
debug!("Stored catalog file: {}", file_path.display());
// Report completion
let progress_info = progress_info.with_current(content.len() as u64);
progress.finish(&progress_info);
Ok(file_path)
}
/// Downloads all catalog files for a specific publisher.
///
/// This method downloads the catalog.attrs file first to determine what catalog parts
/// are available, then downloads each part and loads them into the catalog manager.
/// It uses the existing `CatalogAttrs` structure from catalog.rs to parse the
/// downloaded catalog files.
///
/// # Arguments
///
/// * `publisher` - The name of the publisher to download catalog files for
/// * `progress` - Optional progress reporter for tracking download progress
///
/// # Returns
///
/// * `Result<()>` - Ok if all catalog files were downloaded successfully, Err otherwise
///
/// # Errors
///
/// Returns an error if:
/// - No local cache path has been set
/// - Failed to download the catalog.attrs file
/// - Failed to parse the catalog.attrs file
/// - Failed to download any catalog part
/// - Failed to load any catalog part into the catalog manager
pub fn download_catalog(
&mut self,
publisher: &str,
progress: Option<&dyn ProgressReporter>,
) -> Result<()> {
// Use a no-op reporter if none was provided
let progress_reporter = progress.unwrap_or(&NoopProgressReporter);
// Create progress info for the overall operation
let mut overall_progress = ProgressInfo::new(format!("Downloading catalog for {}", publisher));
// Notify that we're starting the download
progress_reporter.start(&overall_progress);
// First download catalog.attrs to get the list of available parts
let attrs_path = self.download_and_store_catalog_file(publisher, "catalog.attrs", progress)?;
// Parse the catalog.attrs file to get the list of parts
let attrs_content = fs::read_to_string(&attrs_path)
.map_err(|e| {
progress_reporter.finish(&overall_progress);
RepositoryError::FileReadError(format!("Failed to read catalog.attrs: {}", e))
})?;
let attrs: Value = serde_json::from_str(&attrs_content)
.map_err(|e| {
progress_reporter.finish(&overall_progress);
RepositoryError::JsonParseError(format!("Failed to parse catalog.attrs: {}", e))
})?;
// Get the list of parts
let parts = attrs["parts"].as_object().ok_or_else(|| {
progress_reporter.finish(&overall_progress);
RepositoryError::JsonParseError("Missing 'parts' field in catalog.attrs".to_string())
})?;
// Update progress with total number of parts
let total_parts = parts.len() as u64 + 1; // +1 for catalog.attrs
overall_progress = overall_progress.with_total(total_parts).with_current(1);
progress_reporter.update(&overall_progress);
// Download each part
for (i, part_name) in parts.keys().enumerate() {
debug!("Downloading catalog part: {}", part_name);
// Update progress with current part
overall_progress = overall_progress.with_current(i as u64 + 2) // +2 because we already downloaded catalog.attrs
.with_context(format!("Downloading part: {}", part_name));
progress_reporter.update(&overall_progress);
self.download_and_store_catalog_file(publisher, part_name, progress)?;
}
// Get the catalog manager for this publisher
let catalog_manager = self.get_catalog_manager(publisher)?;
// Update progress for loading parts
overall_progress = overall_progress.with_context("Loading catalog parts".to_string());
progress_reporter.update(&overall_progress);
// Load the catalog parts
for part_name in parts.keys() {
catalog_manager.load_part(part_name)?;
}
// Report completion
overall_progress = overall_progress.with_current(total_parts);
progress_reporter.finish(&overall_progress);
info!("Downloaded catalog for publisher: {}", publisher);
Ok(())
}
/// Download catalogs for all publishers
///
/// # Arguments
///
/// * `progress` - Optional progress reporter for tracking download progress
///
/// # Returns
///
/// * `Result<()>` - Ok if all catalogs were downloaded successfully, Err otherwise
pub fn download_all_catalogs(&mut self, progress: Option<&dyn ProgressReporter>) -> Result<()> {
// Use a no-op reporter if none was provided
let progress_reporter = progress.unwrap_or(&NoopProgressReporter);
// Clone the publishers list to avoid borrowing issues
let publishers = self.config.publishers.clone();
let total_publishers = publishers.len() as u64;
// Create progress info for the overall operation
let mut overall_progress = ProgressInfo::new("Downloading all catalogs")
.with_total(total_publishers)
.with_current(0);
// Notify that we're starting the download
progress_reporter.start(&overall_progress);
// Download catalogs for each publisher
for (i, publisher) in publishers.iter().enumerate() {
// Update progress with current publisher
overall_progress = overall_progress
.with_current(i as u64)
.with_context(format!("Publisher: {}", publisher));
progress_reporter.update(&overall_progress);
// Download catalog for this publisher
self.download_catalog(publisher, progress)?;
// Update progress after completing this publisher
overall_progress = overall_progress.with_current(i as u64 + 1);
progress_reporter.update(&overall_progress);
}
// Report completion
progress_reporter.finish(&overall_progress);
Ok(())
}
/// Refresh the catalog for a publisher
///
/// # Arguments
///
/// * `publisher` - The name of the publisher to refresh
/// * `progress` - Optional progress reporter for tracking download progress
///
/// # Returns
///
/// * `Result<()>` - Ok if the catalog was refreshed successfully, Err otherwise
pub fn refresh_catalog(&mut self, publisher: &str, progress: Option<&dyn ProgressReporter>) -> Result<()> {
self.download_catalog(publisher, progress)
}
}

View file

@ -8,12 +8,14 @@ mod tests {
use crate::actions::Manifest;
use crate::fmri::Fmri;
use crate::repository::{
CatalogManager, FileBackend, ReadableRepository, RepositoryError, RepositoryVersion,
Result, WritableRepository, REPOSITORY_CONFIG_FILENAME,
CatalogManager, FileBackend, ProgressInfo, ProgressReporter,
ReadableRepository, RepositoryError, RepositoryVersion, RestBackend, Result, WritableRepository,
REPOSITORY_CONFIG_FILENAME,
};
use std::fs;
use std::path::PathBuf;
use std::process::Command;
use std::sync::{Arc, Mutex};
// The base directory for all test repositories
const TEST_REPO_BASE_DIR: &str = "/tmp/libips_repo_test";
@ -533,4 +535,229 @@ mod tests {
// Clean up
cleanup_test_dir(&test_dir);
}
#[test]
fn test_rest_repository_local_functionality() {
use crate::repository::RestBackend;
// Create a test directory
let test_dir = create_test_dir("rest_repository");
let cache_path = test_dir.join("cache");
println!("Test directory: {}", test_dir.display());
println!("Cache path: {}", cache_path.display());
// Create a REST repository
let uri = "http://pkg.opensolaris.org/release";
let mut repo = RestBackend::open(uri).unwrap();
// Set the local cache path
repo.set_local_cache_path(&cache_path).unwrap();
println!("Local cache path set to: {:?}", repo.local_cache_path);
// Add a publisher
let publisher = "openindiana.org";
repo.add_publisher(publisher).unwrap();
println!("Publisher added: {}", publisher);
println!("Publishers in config: {:?}", repo.config.publishers);
// Verify that the directory structure was created correctly
let publisher_dir = cache_path.join("publisher").join(publisher);
println!("Publisher directory: {}", publisher_dir.display());
println!("Publisher directory exists: {}", publisher_dir.exists());
assert!(publisher_dir.exists(), "Publisher directory should be created");
let catalog_dir = publisher_dir.join("catalog");
println!("Catalog directory: {}", catalog_dir.display());
println!("Catalog directory exists: {}", catalog_dir.exists());
assert!(catalog_dir.exists(), "Catalog directory should be created");
// Clean up
cleanup_test_dir(&test_dir);
}
/// A test progress reporter that records all progress events
#[derive(Debug, Clone)]
struct TestProgressReporter {
/// Records of all start events
start_events: Arc<Mutex<Vec<ProgressInfo>>>,
/// Records of all update events
update_events: Arc<Mutex<Vec<ProgressInfo>>>,
/// Records of all finish events
finish_events: Arc<Mutex<Vec<ProgressInfo>>>,
}
impl TestProgressReporter {
/// Create a new test progress reporter
fn new() -> Self {
TestProgressReporter {
start_events: Arc::new(Mutex::new(Vec::new())),
update_events: Arc::new(Mutex::new(Vec::new())),
finish_events: Arc::new(Mutex::new(Vec::new())),
}
}
/// Get the number of start events recorded
fn start_count(&self) -> usize {
self.start_events.lock().unwrap().len()
}
/// Get the number of update events recorded
fn update_count(&self) -> usize {
self.update_events.lock().unwrap().len()
}
/// Get the number of finish events recorded
fn finish_count(&self) -> usize {
self.finish_events.lock().unwrap().len()
}
/// Get a clone of all start events
fn get_start_events(&self) -> Vec<ProgressInfo> {
self.start_events.lock().unwrap().clone()
}
/// Get a clone of all update events
fn get_update_events(&self) -> Vec<ProgressInfo> {
self.update_events.lock().unwrap().clone()
}
/// Get a clone of all finish events
fn get_finish_events(&self) -> Vec<ProgressInfo> {
self.finish_events.lock().unwrap().clone()
}
}
impl ProgressReporter for TestProgressReporter {
fn start(&self, info: &ProgressInfo) {
let mut events = self.start_events.lock().unwrap();
events.push(info.clone());
}
fn update(&self, info: &ProgressInfo) {
let mut events = self.update_events.lock().unwrap();
events.push(info.clone());
}
fn finish(&self, info: &ProgressInfo) {
let mut events = self.finish_events.lock().unwrap();
events.push(info.clone());
}
}
#[test]
fn test_progress_reporter() {
// Create a test progress reporter
let reporter = TestProgressReporter::new();
// Create some progress info
let info1 = ProgressInfo::new("Test operation 1");
let info2 = ProgressInfo::new("Test operation 2")
.with_current(50)
.with_total(100);
// Report some progress
reporter.start(&info1);
reporter.update(&info2);
reporter.finish(&info1);
// Check that the events were recorded
assert_eq!(reporter.start_count(), 1);
assert_eq!(reporter.update_count(), 1);
assert_eq!(reporter.finish_count(), 1);
// Check the content of the events
let start_events = reporter.get_start_events();
let update_events = reporter.get_update_events();
let finish_events = reporter.get_finish_events();
assert_eq!(start_events[0].operation, "Test operation 1");
assert_eq!(update_events[0].operation, "Test operation 2");
assert_eq!(update_events[0].current, Some(50));
assert_eq!(update_events[0].total, Some(100));
assert_eq!(finish_events[0].operation, "Test operation 1");
}
#[test]
fn test_rest_backend_with_progress() {
// This test is a mock test that doesn't actually connect to a remote server
// It just verifies that the progress reporting mechanism works correctly
// Create a test directory
let test_dir = create_test_dir("rest_progress");
let cache_path = test_dir.join("cache");
// Create a REST repository
let uri = "http://pkg.opensolaris.org/release";
let mut repo = RestBackend::create(uri, RepositoryVersion::V4).unwrap();
// Set the local cache path
repo.set_local_cache_path(&cache_path).unwrap();
// Create a test progress reporter
let reporter = TestProgressReporter::new();
// Add a publisher
let publisher = "test";
repo.add_publisher(publisher).unwrap();
// Create a mock catalog.attrs file
let publisher_dir = cache_path.join("publisher").join(publisher);
let catalog_dir = publisher_dir.join("catalog");
fs::create_dir_all(&catalog_dir).unwrap();
let attrs_content = r#"{
"created": "20250803T124900Z",
"last-modified": "20250803T124900Z",
"package-count": 100,
"package-version-count": 200,
"parts": {
"catalog.base.C": {
"last-modified": "20250803T124900Z"
},
"catalog.dependency.C": {
"last-modified": "20250803T124900Z"
},
"catalog.summary.C": {
"last-modified": "20250803T124900Z"
}
},
"version": 1
}"#;
let attrs_path = catalog_dir.join("catalog.attrs");
fs::write(&attrs_path, attrs_content).unwrap();
// Create mock catalog part files
for part_name in ["catalog.base.C", "catalog.dependency.C", "catalog.summary.C"] {
let part_path = catalog_dir.join(part_name);
fs::write(&part_path, "{}").unwrap();
}
// Mock the download_catalog_file method to avoid actual HTTP requests
// This is done by creating the files before calling download_catalog
// Create a simple progress update to ensure update events are recorded
let progress_info = ProgressInfo::new("Test update")
.with_current(1)
.with_total(2);
reporter.update(&progress_info);
// Call download_catalog with the progress reporter
// This will fail because we're not actually connecting to a server,
// but we can still verify that the progress reporter was called
let _ = repo.download_catalog(publisher, Some(&reporter));
// Check that the progress reporter was called
assert!(reporter.start_count() > 0, "No start events recorded");
assert!(reporter.update_count() > 0, "No update events recorded");
assert!(reporter.finish_count() > 0, "No finish events recorded");
// Clean up
cleanup_test_dir(&test_dir);
}
}

View file

@ -10,5 +10,10 @@ keywords.workspace = true
[dependencies]
libips = { version = "*", path = "../libips" }
diff-struct = "0.5.3"
clap = { version = "4", features = ["derive"] }
miette = { version = "7", features = ["fancy"] }
thiserror = "2"
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
serde = { version = "1.0.207", features = ["derive"] }
serde_json = "1"

71
pkg6/src/error.rs Normal file
View file

@ -0,0 +1,71 @@
use libips::fmri::FmriError;
use libips::image::ImageError;
use miette::Diagnostic;
use thiserror::Error;
/// Result type for pkg6 operations
pub type Result<T> = std::result::Result<T, Pkg6Error>;
/// Errors that can occur in pkg6 operations
#[derive(Debug, Error, Diagnostic)]
pub enum Pkg6Error {
#[error("I/O error: {0}")]
#[diagnostic(
code(pkg6::io_error),
help("Check system resources and permissions")
)]
IoError(#[from] std::io::Error),
#[error("JSON error: {0}")]
#[diagnostic(
code(pkg6::json_error),
help("Check the JSON format and try again")
)]
JsonError(#[from] serde_json::Error),
#[error("FMRI error: {0}")]
#[diagnostic(
code(pkg6::fmri_error),
help("Check the package FMRI format and try again")
)]
FmriError(#[from] FmriError),
#[error("Image error: {0}")]
#[diagnostic(
code(pkg6::image_error),
help("Check the image configuration and try again")
)]
ImageError(#[from] ImageError),
#[error("logging environment setup error: {0}")]
#[diagnostic(
code(pkg6::logging_env_error),
help("Check your logging environment configuration and try again")
)]
LoggingEnvError(String),
#[error("unsupported output format: {0}")]
#[diagnostic(
code(pkg6::unsupported_output_format),
help("Supported output formats: table, json, tsv")
)]
UnsupportedOutputFormat(String),
#[error("other error: {0}")]
#[diagnostic(code(pkg6::other_error), help("See error message for details"))]
Other(String),
}
/// Convert a string to a Pkg6Error::Other
impl From<String> for Pkg6Error {
fn from(s: String) -> Self {
Pkg6Error::Other(s)
}
}
/// Convert a &str to a Pkg6Error::Other
impl From<&str> for Pkg6Error {
fn from(s: &str) -> Self {
Pkg6Error::Other(s.to_string())
}
}

View file

@ -1,112 +1,816 @@
use diff::Diff;
use libips::actions::File;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
mod error;
use error::{Pkg6Error, Result};
#[derive(Serialize, Deserialize, Debug, Clone, Diff)]
#[diff(attr(
#[derive(Debug, PartialEq)]
))]
struct Manifest {
#[serde(skip_serializing_if = "HashMap::is_empty")]
files: HashMap<String, File>,
use clap::{Parser, Subcommand};
use libips::fmri::Fmri;
use libips::image::Publisher;
use serde::Serialize;
use std::path::PathBuf;
use std::io::Write;
use tracing::{debug, error, info};
use tracing_subscriber::filter::LevelFilter;
use tracing_subscriber::{EnvFilter, fmt};
/// Wrapper struct for publisher output in JSON format
#[derive(Serialize)]
struct PublishersOutput {
publishers: Vec<PublisherOutput>,
}
fn main() {
let base = Manifest {
files: HashMap::from([
(
"0dh5".to_string(),
File {
payload: None,
path: "var/file".to_string(),
group: "bin".to_string(),
owner: "root".to_string(),
mode: "0755".to_string(),
preserve: false,
overlay: false,
original_name: "".to_string(),
revert_tag: "".to_string(),
sys_attr: "".to_string(),
properties: vec![],
facets: Default::default(),
},
),
(
"12ds3".to_string(),
File {
payload: None,
path: "var/file1".to_string(),
group: "bin".to_string(),
owner: "root".to_string(),
mode: "0755".to_string(),
preserve: false,
overlay: false,
original_name: "".to_string(),
revert_tag: "".to_string(),
sys_attr: "".to_string(),
properties: vec![],
facets: Default::default(),
},
),
(
"654".to_string(),
File {
payload: None,
path: "var/file1".to_string(),
group: "bin".to_string(),
owner: "root".to_string(),
mode: "0755".to_string(),
preserve: false,
overlay: false,
original_name: "".to_string(),
revert_tag: "".to_string(),
sys_attr: "".to_string(),
properties: vec![],
facets: Default::default(),
},
),
]),
/// Serializable struct for publisher information
#[derive(Serialize)]
struct PublisherOutput {
name: String,
origin: String,
mirrors: Vec<String>,
is_default: bool,
#[serde(skip_serializing_if = "Option::is_none")]
catalog_dir: Option<String>,
}
/// pkg6 - Image Packaging System client
///
/// The pkg command is used to manage the software installed on an image.
/// An image can be a boot environment, a zone, or a non-global zone.
///
/// The pkg command manages the retrieval, installation, update, and removal
/// of software packages for the OpenIndiana operating system.
#[derive(Parser, Debug)]
#[clap(author, version, about, long_about = None)]
#[clap(propagate_version = true)]
struct App {
#[clap(subcommand)]
command: Commands,
}
#[derive(Subcommand, Debug)]
enum Commands {
/// Update the list of available packages and patches
///
/// The refresh command updates the local package catalog, retrieving
/// the latest list of available packages from the configured publishers.
Refresh {
/// Perform a full refresh, retrieving all package metadata
#[clap(long)]
full: bool,
/// Quiet mode, show less output
#[clap(short)]
quiet: bool,
/// Publishers to refresh (default: all)
publishers: Vec<String>,
},
/// Install or update packages
///
/// The install command installs or updates packages from the configured
/// publishers. If a package is already installed, it will be updated to
/// the newest version available.
Install {
/// Dry run, don't make actual changes
#[clap(short)]
dry_run: bool,
/// Verbose output
#[clap(short)]
verbose: bool,
/// Quiet mode, show less output
#[clap(short)]
quiet: bool,
/// Number of concurrent operations
#[clap(short = 'C')]
concurrency: Option<usize>,
/// Additional package repository to use
#[clap(short = 'g')]
repo: Vec<String>,
/// Accept all licenses
#[clap(long)]
accept: bool,
/// Show all licenses
#[clap(long)]
licenses: bool,
/// Don't update the search index
#[clap(long)]
no_index: bool,
/// Don't refresh the catalog
#[clap(long)]
no_refresh: bool,
/// Packages to install
pkg_fmri_patterns: Vec<String>,
},
/// Install packages while removing all other packages
///
/// The exact-install command installs the specified packages and removes
/// all other packages. This is useful for creating a clean installation
/// with only the specified packages.
ExactInstall {
/// Dry run, don't make actual changes
#[clap(short)]
dry_run: bool,
/// Verbose output
#[clap(short)]
verbose: bool,
/// Quiet mode, show less output
#[clap(short)]
quiet: bool,
/// Number of concurrent operations
#[clap(short = 'C')]
concurrency: Option<usize>,
/// Additional package repository to use
#[clap(short = 'g')]
repo: Vec<String>,
/// Accept all licenses
#[clap(long)]
accept: bool,
/// Show all licenses
#[clap(long)]
licenses: bool,
/// Don't update the search index
#[clap(long)]
no_index: bool,
/// Don't refresh the catalog
#[clap(long)]
no_refresh: bool,
/// Packages to install
pkg_fmri_patterns: Vec<String>,
},
/// Remove packages
///
/// The uninstall command removes installed packages from the system.
Uninstall {
/// Dry run, don't make actual changes
#[clap(short)]
dry_run: bool,
/// Verbose output
#[clap(short)]
verbose: bool,
/// Quiet mode, show less output
#[clap(short)]
quiet: bool,
/// Packages to remove
pkg_fmri_patterns: Vec<String>,
},
/// Update packages to newer versions
///
/// The update command updates installed packages to the newest versions
/// available from the configured publishers.
Update {
/// Dry run, don't make actual changes
#[clap(short)]
dry_run: bool,
/// Verbose output
#[clap(short)]
verbose: bool,
/// Quiet mode, show less output
#[clap(short)]
quiet: bool,
/// Number of concurrent operations
#[clap(short = 'C')]
concurrency: Option<usize>,
/// Additional package repository to use
#[clap(short = 'g')]
repo: Vec<String>,
/// Accept all licenses
#[clap(long)]
accept: bool,
/// Show all licenses
#[clap(long)]
licenses: bool,
/// Don't update the search index
#[clap(long)]
no_index: bool,
/// Don't refresh the catalog
#[clap(long)]
no_refresh: bool,
/// Packages to update (default: all)
pkg_fmri_patterns: Vec<String>,
},
/// List installed packages
///
/// The list command displays information about installed packages.
List {
/// Verbose output
#[clap(short)]
verbose: bool,
/// Quiet mode, show less output
#[clap(short)]
quiet: bool,
/// Output format (default: table)
#[clap(short = 'o')]
output_format: Option<String>,
/// Packages to list (default: all)
pkg_fmri_patterns: Vec<String>,
},
/// Display information about packages
///
/// The info command displays detailed information about packages.
Info {
/// Verbose output
#[clap(short)]
verbose: bool,
/// Quiet mode, show less output
#[clap(short)]
quiet: bool,
/// Output format (default: table)
#[clap(short = 'o')]
output_format: Option<String>,
/// Packages to show information about
pkg_fmri_patterns: Vec<String>,
},
/// Search for packages
///
/// The search command searches for packages matching the specified query.
Search {
/// Verbose output
#[clap(short)]
verbose: bool,
/// Quiet mode, show less output
#[clap(short)]
quiet: bool,
/// Output format (default: table)
#[clap(short = 'o')]
output_format: Option<String>,
/// Search query
query: String,
},
/// Verify installation of packages
///
/// The verify command verifies that installed packages match their
/// manifest and that all files are present and have the correct
/// permissions and checksums.
Verify {
/// Verbose output
#[clap(short)]
verbose: bool,
/// Quiet mode, show less output
#[clap(short)]
quiet: bool,
/// Packages to verify (default: all)
pkg_fmri_patterns: Vec<String>,
},
/// Fix package installation problems
///
/// The fix command repairs packages with missing or corrupt files.
Fix {
/// Dry run, don't make actual changes
#[clap(short)]
dry_run: bool,
/// Verbose output
#[clap(short)]
verbose: bool,
/// Quiet mode, show less output
#[clap(short)]
quiet: bool,
/// Packages to fix (default: all)
pkg_fmri_patterns: Vec<String>,
},
/// Show history of package operations
///
/// The history command displays the history of package operations.
History {
/// Number of entries to show
#[clap(short = 'n')]
count: Option<usize>,
/// Show full details
#[clap(short)]
full: bool,
/// Output format (default: table)
#[clap(short = 'o')]
output_format: Option<String>,
},
/// List contents of packages
///
/// The contents command lists the contents of packages.
Contents {
/// Verbose output
#[clap(short)]
verbose: bool,
/// Quiet mode, show less output
#[clap(short)]
quiet: bool,
/// Output format (default: table)
#[clap(short = 'o')]
output_format: Option<String>,
/// Packages to list contents of
pkg_fmri_patterns: Vec<String>,
},
/// Set publisher properties
///
/// The set-publisher command sets properties for publishers.
SetPublisher {
/// Publisher name
#[clap(short = 'p')]
publisher: String,
/// Publisher origin URL
#[clap(short = 'O')]
origin: Option<String>,
/// Publisher mirror URL
#[clap(short = 'M')]
mirror: Option<Vec<String>>,
},
/// Remove a publisher
///
/// The unset-publisher command removes a publisher.
UnsetPublisher {
/// Publisher name
publisher: String,
},
/// Display publisher information
///
/// The publisher command displays information about publishers.
Publisher {
/// Verbose output
#[clap(short)]
verbose: bool,
/// Output format (default: table)
#[clap(short = 'o')]
output_format: Option<String>,
/// Publishers to show information about (default: all)
publishers: Vec<String>,
},
/// Create an image
///
/// The image-create command creates a new image.
ImageCreate {
/// Full path to the image to create
#[clap(short = 'F')]
full_path: PathBuf,
/// Publisher to use
#[clap(short = 'p')]
publisher: String,
/// Publisher origin URL
#[clap(short = 'g')]
origin: String,
},
}
fn main() -> Result<()> {
// Add debug statement at the very beginning
eprintln!("MAIN: Starting pkg6 command");
// Initialize the tracing subscriber with the default log level as debug and no decorations
// Parse the environment filter first, handling any errors with our custom error type
let env_filter = EnvFilter::builder()
.with_default_directive(LevelFilter::WARN.into())
.from_env()
.map_err(|e| {
Pkg6Error::LoggingEnvError(format!("Failed to parse environment filter: {}", e))
})?;
fmt::Subscriber::builder()
.with_max_level(tracing::Level::DEBUG)
.with_env_filter(env_filter)
.without_time()
.with_target(false)
.with_ansi(false)
.with_writer(std::io::stderr)
.init();
eprintln!("MAIN: Parsing command line arguments");
let cli = App::parse();
// Print the command that was parsed
match &cli.command {
Commands::Publisher { .. } => eprintln!("MAIN: Publisher command detected"),
_ => eprintln!("MAIN: Other command detected: {:?}", cli.command),
};
let new_set = Manifest {
files: HashMap::from([
(
"0dh5".to_string(),
File {
payload: None,
path: "var/file".to_string(),
group: "bin".to_string(),
owner: "root".to_string(),
mode: "0755".to_string(),
preserve: false,
overlay: false,
original_name: "".to_string(),
revert_tag: "".to_string(),
sys_attr: "".to_string(),
properties: vec![],
facets: Default::default(),
match &cli.command {
Commands::Refresh { full, quiet, publishers } => {
info!("Refreshing package catalog");
debug!("Full refresh: {}", full);
debug!("Quiet mode: {}", quiet);
debug!("Publishers: {:?}", publishers);
// Stub implementation
info!("Refresh completed successfully");
Ok(())
},
Commands::Install { dry_run, verbose, quiet, concurrency, repo, accept, licenses, no_index, no_refresh, pkg_fmri_patterns } => {
info!("Installing packages: {:?}", pkg_fmri_patterns);
debug!("Dry run: {}", dry_run);
debug!("Verbose: {}", verbose);
debug!("Quiet: {}", quiet);
debug!("Concurrency: {:?}", concurrency);
debug!("Additional repos: {:?}", repo);
debug!("Accept licenses: {}", accept);
debug!("Show licenses: {}", licenses);
debug!("No index update: {}", no_index);
debug!("No refresh: {}", no_refresh);
// Stub implementation
info!("Installation completed successfully");
Ok(())
},
Commands::ExactInstall { dry_run, verbose, quiet, concurrency, repo, accept, licenses, no_index, no_refresh, pkg_fmri_patterns } => {
info!("Exact-installing packages: {:?}", pkg_fmri_patterns);
debug!("Dry run: {}", dry_run);
debug!("Verbose: {}", verbose);
debug!("Quiet: {}", quiet);
debug!("Concurrency: {:?}", concurrency);
debug!("Additional repos: {:?}", repo);
debug!("Accept licenses: {}", accept);
debug!("Show licenses: {}", licenses);
debug!("No index update: {}", no_index);
debug!("No refresh: {}", no_refresh);
// Stub implementation
info!("Exact-installation completed successfully");
Ok(())
},
Commands::Uninstall { dry_run, verbose, quiet, pkg_fmri_patterns } => {
info!("Uninstalling packages: {:?}", pkg_fmri_patterns);
debug!("Dry run: {}", dry_run);
debug!("Verbose: {}", verbose);
debug!("Quiet: {}", quiet);
// Stub implementation
info!("Uninstallation completed successfully");
Ok(())
},
Commands::Update { dry_run, verbose, quiet, concurrency, repo, accept, licenses, no_index, no_refresh, pkg_fmri_patterns } => {
info!("Updating packages: {:?}", pkg_fmri_patterns);
debug!("Dry run: {}", dry_run);
debug!("Verbose: {}", verbose);
debug!("Quiet: {}", quiet);
debug!("Concurrency: {:?}", concurrency);
debug!("Additional repos: {:?}", repo);
debug!("Accept licenses: {}", accept);
debug!("Show licenses: {}", licenses);
debug!("No index update: {}", no_index);
debug!("No refresh: {}", no_refresh);
// Stub implementation
info!("Update completed successfully");
Ok(())
},
Commands::List { verbose, quiet, output_format, pkg_fmri_patterns } => {
info!("Listing packages: {:?}", pkg_fmri_patterns);
debug!("Verbose: {}", verbose);
debug!("Quiet: {}", quiet);
debug!("Output format: {:?}", output_format);
// Stub implementation
info!("List completed successfully");
Ok(())
},
Commands::Info { verbose, quiet, output_format, pkg_fmri_patterns } => {
info!("Showing info for packages: {:?}", pkg_fmri_patterns);
debug!("Verbose: {}", verbose);
debug!("Quiet: {}", quiet);
debug!("Output format: {:?}", output_format);
// Stub implementation
info!("Info completed successfully");
Ok(())
},
Commands::Search { verbose, quiet, output_format, query } => {
info!("Searching for packages matching: {}", query);
debug!("Verbose: {}", verbose);
debug!("Quiet: {}", quiet);
debug!("Output format: {:?}", output_format);
// Stub implementation
info!("Search completed successfully");
Ok(())
},
Commands::Verify { verbose, quiet, pkg_fmri_patterns } => {
info!("Verifying packages: {:?}", pkg_fmri_patterns);
debug!("Verbose: {}", verbose);
debug!("Quiet: {}", quiet);
// Stub implementation
info!("Verification completed successfully");
Ok(())
},
Commands::Fix { dry_run, verbose, quiet, pkg_fmri_patterns } => {
info!("Fixing packages: {:?}", pkg_fmri_patterns);
debug!("Dry run: {}", dry_run);
debug!("Verbose: {}", verbose);
debug!("Quiet: {}", quiet);
// Stub implementation
info!("Fix completed successfully");
Ok(())
},
Commands::History { count, full, output_format } => {
info!("Showing history");
debug!("Count: {:?}", count);
debug!("Full: {}", full);
debug!("Output format: {:?}", output_format);
// Stub implementation
info!("History completed successfully");
Ok(())
},
Commands::Contents { verbose, quiet, output_format, pkg_fmri_patterns } => {
info!("Showing contents for packages: {:?}", pkg_fmri_patterns);
debug!("Verbose: {}", verbose);
debug!("Quiet: {}", quiet);
debug!("Output format: {:?}", output_format);
// Stub implementation
info!("Contents completed successfully");
Ok(())
},
Commands::SetPublisher { publisher, origin, mirror } => {
info!("Setting publisher: {}", publisher);
debug!("Origin: {:?}", origin);
debug!("Mirror: {:?}", mirror);
// Get the current working directory as the default image path
let current_dir = std::env::current_dir()?;
// Try to load the image from the current directory
let mut image = match libips::image::Image::load(&current_dir) {
Ok(img) => img,
Err(e) => {
error!("Failed to load image from current directory: {}", e);
error!("Make sure you are in an image directory or use pkg6 image-create first");
return Err(e.into());
}
};
// Convert mirror to Vec<String> if provided
let mirrors = match mirror {
Some(m) => m.clone(),
None => vec![],
};
// If origin is provided, update the publisher
if let Some(origin_url) = origin {
// Add or update the publisher
image.add_publisher(&publisher, &origin_url, mirrors, true)?;
info!("Publisher {} configured with origin: {}", publisher, origin_url);
// Download the catalog
image.download_publisher_catalog(&publisher)?;
info!("Catalog downloaded from publisher: {}", publisher);
} else {
// If no origin is provided, just set the publisher as default if it exists
let pub_result = image.get_publisher(&publisher);
if let Ok(pub_info) = pub_result {
// Store the necessary information
let origin = pub_info.origin.clone();
let mirrors = pub_info.mirrors.clone();
// Add the publisher again with is_default=true to make it the default
image.add_publisher(&publisher, &origin, mirrors, true)?;
info!("Publisher {} set as default", publisher);
} else {
error!("Publisher {} not found and no origin provided", publisher);
return Err(libips::image::ImageError::PublisherNotFound(publisher.clone()).into());
}
}
info!("Set-publisher completed successfully");
Ok(())
},
Commands::UnsetPublisher { publisher } => {
info!("Unsetting publisher: {}", publisher);
// Get the current working directory as the default image path
let current_dir = std::env::current_dir()?;
// Try to load the image from the current directory
let mut image = match libips::image::Image::load(&current_dir) {
Ok(img) => img,
Err(e) => {
error!("Failed to load image from current directory: {}", e);
error!("Make sure you are in an image directory or use pkg6 image-create first");
return Err(e.into());
}
};
// Remove the publisher
image.remove_publisher(&publisher)?;
info!("Publisher {} removed successfully", publisher);
info!("Unset-publisher completed successfully");
Ok(())
},
Commands::Publisher { verbose, output_format, publishers } => {
info!("Showing publisher information");
// Get the current working directory as the default image path
let current_dir = std::env::current_dir()?;
// Determine the path to the image configuration file
let image_json_path = match libips::image::ImageType::Full {
libips::image::ImageType::Full => current_dir.join("var/pkg/pkg6.image.json"),
libips::image::ImageType::Partial => current_dir.join(".pkg/pkg6.image.json"),
};
// Check if the image configuration file exists
if !image_json_path.exists() {
error!("Image configuration file not found at {}", image_json_path.display());
error!("Make sure you are in an image directory or use pkg6 image-create first");
return Err(Pkg6Error::from(format!("Image configuration file not found at {}", image_json_path.display())));
}
// Read the image configuration file
let image_json = std::fs::read_to_string(&image_json_path)?;
// Parse the image configuration file
let image: libips::image::Image = serde_json::from_str(&image_json)?;
// Get all publishers
let all_publishers = image.publishers();
// Filter publishers if specified
let filtered_publishers: Vec<_> = if publishers.is_empty() {
all_publishers.to_vec()
} else {
all_publishers
.iter()
.filter(|p| publishers.contains(&p.name))
.cloned()
.collect()
};
// Handle case where no publishers are found
if filtered_publishers.is_empty() {
if publishers.is_empty() {
println!("No publishers configured");
} else {
println!("No matching publishers found");
}
return Ok(());
}
// Determine the output format, defaulting to "table" if not specified
let output_format_str = output_format.as_deref().unwrap_or("table");
// Create a vector of PublisherOutput structs for serialization and display
let publisher_outputs: Vec<PublisherOutput> = filtered_publishers
.iter()
.map(|p| {
let catalog_dir = if *verbose {
let dir = match image.image_type() {
libips::image::ImageType::Full => current_dir.join("var/pkg/catalog"),
libips::image::ImageType::Partial => current_dir.join(".pkg/catalog"),
};
Some(dir.join(&p.name).display().to_string())
} else {
None
};
PublisherOutput {
name: p.name.clone(),
origin: p.origin.clone(),
mirrors: p.mirrors.clone(),
is_default: p.is_default,
catalog_dir,
}
})
.collect();
// Display publisher information based on the output format
match output_format_str {
"table" => {
// Display in table format (human-readable)
// This is the default format and displays the information in a user-friendly way
for publisher in &publisher_outputs {
println!("Publisher: {}", publisher.name);
println!(" Origin: {}", publisher.origin);
if !publisher.mirrors.is_empty() {
println!(" Mirrors:");
for mirror in &publisher.mirrors {
println!(" {}", mirror);
}
}
println!(" Default: {}", if publisher.is_default { "Yes" } else { "No" });
if let Some(catalog_dir) = &publisher.catalog_dir {
println!(" Catalog directory: {}", catalog_dir);
}
println!();
// Explicitly flush stdout after each publisher to ensure output is displayed
let _ = std::io::stdout().flush();
}
},
),
(
"654".to_string(),
File {
payload: None,
path: "var/file1".to_string(),
group: "bin".to_string(),
owner: "root".to_string(),
mode: "0755".to_string(),
preserve: false,
overlay: false,
original_name: "".to_string(),
revert_tag: "".to_string(),
sys_attr: "".to_string(),
properties: vec![],
facets: Default::default(),
"json" => {
// Display in JSON format
// This format is useful for programmatic access to the publisher information
let output = PublishersOutput {
publishers: publisher_outputs,
};
let json = serde_json::to_string_pretty(&output)
.unwrap_or_else(|e| format!("{{\"error\": \"{}\"}}", e));
println!("{}", json);
let _ = std::io::stdout().flush();
},
),
]),
};
let d = base.diff(&new_set);
println!("{:#?}", d);
"tsv" => {
// Display in TSV format (tab-separated values)
// This format is useful for importing into spreadsheets or other data processing tools
// Print header
println!("NAME\tORIGIN\tMIRRORS\tDEFAULT\tCATALOG_DIR");
// Print each publisher
for publisher in &publisher_outputs {
let mirrors = publisher.mirrors.join(",");
let default = if publisher.is_default { "Yes" } else { "No" };
let catalog_dir = publisher.catalog_dir.as_deref().unwrap_or("");
println!("{}\t{}\t{}\t{}\t{}",
publisher.name,
publisher.origin,
mirrors,
default,
catalog_dir
);
let _ = std::io::stdout().flush();
}
},
_ => {
// Unsupported format
return Err(Pkg6Error::UnsupportedOutputFormat(output_format_str.to_string()));
}
}
info!("Publisher completed successfully");
Ok(())
},
Commands::ImageCreate { full_path, publisher, origin } => {
info!("Creating image at: {}", full_path.display());
debug!("Publisher: {}", publisher);
debug!("Origin: {}", origin);
// Create the image
let image = libips::image::Image::create_image(&full_path, &publisher, &origin)?;
info!("Image created successfully at: {}", full_path.display());
info!("Publisher {} configured with origin: {}", publisher, origin);
info!("Catalog downloaded from publisher: {}", publisher);
Ok(())
},
}
}