2024-08-14 20:02:29 +02:00
|
|
|
mod properties;
|
2025-08-02 22:12:37 +02:00
|
|
|
#[cfg(test)]
|
|
|
|
|
mod tests;
|
2024-08-14 20:02:29 +02:00
|
|
|
|
2025-07-26 15:33:39 +02:00
|
|
|
use miette::Diagnostic;
|
2025-07-27 15:22:49 +02:00
|
|
|
use properties::*;
|
2025-12-22 20:10:17 +01:00
|
|
|
use redb::{Database, ReadableDatabase, ReadableTable};
|
2025-07-26 12:54:01 +02:00
|
|
|
use serde::{Deserialize, Serialize};
|
2024-08-14 20:02:29 +02:00
|
|
|
use std::collections::HashMap;
|
2025-08-02 22:12:37 +02:00
|
|
|
use std::fs::{self, File};
|
2024-08-14 20:02:29 +02:00
|
|
|
use std::path::{Path, PathBuf};
|
|
|
|
|
use thiserror::Error;
|
2025-08-03 14:28:36 +02:00
|
|
|
|
2025-12-22 20:10:17 +01:00
|
|
|
use crate::repository::{FileBackend, ReadableRepository, RepositoryError, RestBackend};
|
2025-08-04 22:01:38 +02:00
|
|
|
|
|
|
|
|
// Export the catalog module
|
|
|
|
|
pub mod catalog;
|
2025-12-22 20:10:17 +01:00
|
|
|
use catalog::{INCORPORATE_TABLE, ImageCatalog, PackageInfo};
|
2025-08-04 22:01:38 +02:00
|
|
|
|
|
|
|
|
// Export the installed packages module
|
|
|
|
|
pub mod installed;
|
2025-08-19 11:06:48 +02:00
|
|
|
// Export the action plan module
|
|
|
|
|
pub mod action_plan;
|
2025-08-04 22:01:38 +02:00
|
|
|
use installed::{InstalledPackageInfo, InstalledPackages};
|
|
|
|
|
|
|
|
|
|
// Include tests
|
|
|
|
|
#[cfg(test)]
|
|
|
|
|
mod installed_tests;
|
2024-08-14 20:02:29 +02:00
|
|
|
|
2025-07-26 15:33:39 +02:00
|
|
|
#[derive(Debug, Error, Diagnostic)]
|
2024-08-14 20:02:29 +02:00
|
|
|
pub enum ImageError {
|
2025-07-26 15:33:39 +02:00
|
|
|
#[error("I/O error: {0}")]
|
|
|
|
|
#[diagnostic(
|
|
|
|
|
code(ips::image_error::io),
|
|
|
|
|
help("Check system resources and permissions")
|
|
|
|
|
)]
|
2024-08-14 20:02:29 +02:00
|
|
|
IO(#[from] std::io::Error),
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-26 15:33:39 +02:00
|
|
|
#[error("JSON error: {0}")]
|
|
|
|
|
#[diagnostic(
|
|
|
|
|
code(ips::image_error::json),
|
|
|
|
|
help("Check the JSON format and try again")
|
|
|
|
|
)]
|
2024-08-14 20:02:29 +02:00
|
|
|
Json(#[from] serde_json::Error),
|
2025-08-02 22:12:37 +02:00
|
|
|
|
|
|
|
|
#[error("Invalid image path: {0}")]
|
|
|
|
|
#[diagnostic(
|
|
|
|
|
code(ips::image_error::invalid_path),
|
|
|
|
|
help("Provide a valid path for the image")
|
|
|
|
|
)]
|
|
|
|
|
InvalidPath(String),
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
#[error("Repository error: {0}")]
|
|
|
|
|
#[diagnostic(
|
|
|
|
|
code(ips::image_error::repository),
|
|
|
|
|
help("Check the repository configuration and try again")
|
|
|
|
|
)]
|
|
|
|
|
Repository(#[from] RepositoryError),
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
#[error("Database error: {0}")]
|
|
|
|
|
#[diagnostic(
|
|
|
|
|
code(ips::image_error::database),
|
|
|
|
|
help("Check the database configuration and try again")
|
|
|
|
|
)]
|
|
|
|
|
Database(String),
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
#[error("Publisher not found: {0}")]
|
|
|
|
|
#[diagnostic(
|
|
|
|
|
code(ips::image_error::publisher_not_found),
|
|
|
|
|
help("Check the publisher name and try again")
|
|
|
|
|
)]
|
|
|
|
|
PublisherNotFound(String),
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
#[error("No publishers configured")]
|
|
|
|
|
#[diagnostic(
|
|
|
|
|
code(ips::image_error::no_publishers),
|
|
|
|
|
help("Configure at least one publisher before performing this operation")
|
|
|
|
|
)]
|
|
|
|
|
NoPublishers,
|
2024-08-14 20:02:29 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub type Result<T> = std::result::Result<T, ImageError>;
|
|
|
|
|
|
2025-08-02 22:12:37 +02:00
|
|
|
/// Type of image, either Full (base path of "/") or Partial (attached to a full image)
|
|
|
|
|
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)]
|
|
|
|
|
pub enum ImageType {
|
|
|
|
|
/// Full image with base path of "/"
|
|
|
|
|
Full,
|
|
|
|
|
/// Partial image attached to a full image
|
|
|
|
|
Partial,
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
/// Represents a publisher configuration in an image
|
|
|
|
|
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)]
|
|
|
|
|
pub struct Publisher {
|
|
|
|
|
/// Publisher name
|
|
|
|
|
pub name: String,
|
|
|
|
|
/// Publisher origin URL
|
|
|
|
|
pub origin: String,
|
|
|
|
|
/// Publisher mirror URLs
|
|
|
|
|
pub mirrors: Vec<String>,
|
|
|
|
|
/// Whether this is the default publisher
|
|
|
|
|
pub is_default: bool,
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-02 22:12:37 +02:00
|
|
|
/// Represents an IPS image, which can be either a Full image or a Partial image
|
2024-08-14 20:02:29 +02:00
|
|
|
#[derive(Clone, Debug, Deserialize, Serialize)]
|
|
|
|
|
pub struct Image {
|
2025-08-02 22:12:37 +02:00
|
|
|
/// Path to the image
|
2024-08-14 20:02:29 +02:00
|
|
|
path: PathBuf,
|
2025-08-02 22:12:37 +02:00
|
|
|
/// Type of image (Full or Partial)
|
|
|
|
|
image_type: ImageType,
|
|
|
|
|
/// Image properties
|
2024-08-14 20:02:29 +02:00
|
|
|
props: Vec<ImageProperty>,
|
2025-08-02 22:12:37 +02:00
|
|
|
/// Image version
|
2024-08-14 20:02:29 +02:00
|
|
|
version: i32,
|
2025-08-02 22:12:37 +02:00
|
|
|
/// Variants
|
2024-08-14 20:02:29 +02:00
|
|
|
variants: HashMap<String, String>,
|
2025-08-02 22:12:37 +02:00
|
|
|
/// Mediators
|
2024-08-14 20:02:29 +02:00
|
|
|
mediators: HashMap<String, String>,
|
2025-08-03 14:28:36 +02:00
|
|
|
/// Publishers
|
|
|
|
|
publishers: Vec<Publisher>,
|
2024-08-14 20:02:29 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl Image {
|
2025-08-02 22:12:37 +02:00
|
|
|
/// Creates a new Full image at the specified path
|
|
|
|
|
pub fn new_full<P: Into<PathBuf>>(path: P) -> Image {
|
2025-07-26 12:54:01 +02:00
|
|
|
Image {
|
2024-08-14 20:02:29 +02:00
|
|
|
path: path.into(),
|
2025-08-02 22:12:37 +02:00
|
|
|
image_type: ImageType::Full,
|
2024-08-14 20:02:29 +02:00
|
|
|
version: 5,
|
|
|
|
|
variants: HashMap::new(),
|
|
|
|
|
mediators: HashMap::new(),
|
|
|
|
|
props: vec![],
|
2025-08-03 14:28:36 +02:00
|
|
|
publishers: vec![],
|
2024-08-14 20:02:29 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-02 22:12:37 +02:00
|
|
|
/// Creates a new Partial image at the specified path
|
|
|
|
|
pub fn new_partial<P: Into<PathBuf>>(path: P) -> Image {
|
|
|
|
|
Image {
|
|
|
|
|
path: path.into(),
|
|
|
|
|
image_type: ImageType::Partial,
|
|
|
|
|
version: 5,
|
|
|
|
|
variants: HashMap::new(),
|
|
|
|
|
mediators: HashMap::new(),
|
|
|
|
|
props: vec![],
|
2025-08-03 14:28:36 +02:00
|
|
|
publishers: vec![],
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
/// Add a publisher to the image
|
2025-12-22 20:10:17 +01:00
|
|
|
pub fn add_publisher(
|
|
|
|
|
&mut self,
|
|
|
|
|
name: &str,
|
|
|
|
|
origin: &str,
|
|
|
|
|
mirrors: Vec<String>,
|
|
|
|
|
is_default: bool,
|
|
|
|
|
) -> Result<()> {
|
2025-08-03 14:28:36 +02:00
|
|
|
// Check if publisher already exists
|
|
|
|
|
if self.publishers.iter().any(|p| p.name == name) {
|
|
|
|
|
// Update existing publisher
|
|
|
|
|
for publisher in &mut self.publishers {
|
|
|
|
|
if publisher.name == name {
|
|
|
|
|
publisher.origin = origin.to_string();
|
|
|
|
|
publisher.mirrors = mirrors;
|
|
|
|
|
publisher.is_default = is_default;
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
// If this publisher is now the default, make sure no other publisher is default
|
|
|
|
|
if is_default {
|
|
|
|
|
for other_publisher in &mut self.publishers {
|
|
|
|
|
if other_publisher.name != name {
|
|
|
|
|
other_publisher.is_default = false;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
// Add new publisher
|
|
|
|
|
let publisher = Publisher {
|
|
|
|
|
name: name.to_string(),
|
|
|
|
|
origin: origin.to_string(),
|
|
|
|
|
mirrors,
|
|
|
|
|
is_default,
|
|
|
|
|
};
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
// If this publisher is the default, make sure no other publisher is default
|
|
|
|
|
if is_default {
|
|
|
|
|
for publisher in &mut self.publishers {
|
|
|
|
|
publisher.is_default = false;
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
self.publishers.push(publisher);
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
// Save the image to persist the changes
|
|
|
|
|
self.save()?;
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
Ok(())
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
/// Remove a publisher from the image
|
|
|
|
|
pub fn remove_publisher(&mut self, name: &str) -> Result<()> {
|
|
|
|
|
let initial_len = self.publishers.len();
|
|
|
|
|
self.publishers.retain(|p| p.name != name);
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
if self.publishers.len() == initial_len {
|
|
|
|
|
return Err(ImageError::PublisherNotFound(name.to_string()));
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
// If we removed the default publisher, set the first remaining publisher as default
|
|
|
|
|
if self.publishers.iter().all(|p| !p.is_default) && !self.publishers.is_empty() {
|
|
|
|
|
self.publishers[0].is_default = true;
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
// Save the image to persist the changes
|
|
|
|
|
self.save()?;
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
Ok(())
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
/// Get the default publisher
|
|
|
|
|
pub fn default_publisher(&self) -> Result<&Publisher> {
|
|
|
|
|
// Find the default publisher
|
|
|
|
|
for publisher in &self.publishers {
|
|
|
|
|
if publisher.is_default {
|
|
|
|
|
return Ok(publisher);
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
// If no publisher is marked as default, return the first one
|
|
|
|
|
if !self.publishers.is_empty() {
|
|
|
|
|
return Ok(&self.publishers[0]);
|
2025-08-02 22:12:37 +02:00
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
Err(ImageError::NoPublishers)
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
/// Get a publisher by name
|
|
|
|
|
pub fn get_publisher(&self, name: &str) -> Result<&Publisher> {
|
|
|
|
|
for publisher in &self.publishers {
|
|
|
|
|
if publisher.name == name {
|
|
|
|
|
return Ok(publisher);
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
Err(ImageError::PublisherNotFound(name.to_string()))
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
/// Get all publishers
|
|
|
|
|
pub fn publishers(&self) -> &[Publisher] {
|
|
|
|
|
&self.publishers
|
2025-08-02 22:12:37 +02:00
|
|
|
}
|
2024-08-14 20:02:29 +02:00
|
|
|
|
2025-08-02 22:12:37 +02:00
|
|
|
/// Returns the path to the image
|
|
|
|
|
pub fn path(&self) -> &Path {
|
|
|
|
|
&self.path
|
2024-08-14 20:02:29 +02:00
|
|
|
}
|
|
|
|
|
|
2025-08-02 22:12:37 +02:00
|
|
|
/// Returns the type of the image
|
|
|
|
|
pub fn image_type(&self) -> &ImageType {
|
|
|
|
|
&self.image_type
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Returns the path to the metadata directory for this image
|
|
|
|
|
pub fn metadata_dir(&self) -> PathBuf {
|
|
|
|
|
match self.image_type {
|
|
|
|
|
ImageType::Full => self.path.join("var/pkg"),
|
|
|
|
|
ImageType::Partial => self.path.join(".pkg"),
|
2024-08-14 20:02:29 +02:00
|
|
|
}
|
|
|
|
|
}
|
2025-08-02 22:12:37 +02:00
|
|
|
|
|
|
|
|
/// Returns the path to the image JSON file
|
|
|
|
|
pub fn image_json_path(&self) -> PathBuf {
|
|
|
|
|
self.metadata_dir().join("pkg6.image.json")
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
/// Returns the path to the installed packages database
|
|
|
|
|
pub fn installed_db_path(&self) -> PathBuf {
|
|
|
|
|
self.metadata_dir().join("installed.redb")
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
/// Returns the path to the manifest directory
|
|
|
|
|
pub fn manifest_dir(&self) -> PathBuf {
|
|
|
|
|
self.metadata_dir().join("manifests")
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
/// Returns the path to the catalog directory
|
|
|
|
|
pub fn catalog_dir(&self) -> PathBuf {
|
|
|
|
|
self.metadata_dir().join("catalog")
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-04 22:01:38 +02:00
|
|
|
/// Returns the path to the catalog database
|
|
|
|
|
pub fn catalog_db_path(&self) -> PathBuf {
|
|
|
|
|
self.metadata_dir().join("catalog.redb")
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-19 22:43:50 +02:00
|
|
|
/// Returns the path to the obsoleted packages database (separate DB)
|
|
|
|
|
pub fn obsoleted_db_path(&self) -> PathBuf {
|
|
|
|
|
self.metadata_dir().join("obsoleted.redb")
|
|
|
|
|
}
|
2025-08-02 22:12:37 +02:00
|
|
|
|
|
|
|
|
/// Creates the metadata directory if it doesn't exist
|
|
|
|
|
pub fn create_metadata_dir(&self) -> Result<()> {
|
|
|
|
|
let metadata_dir = self.metadata_dir();
|
|
|
|
|
fs::create_dir_all(&metadata_dir).map_err(|e| {
|
|
|
|
|
ImageError::IO(std::io::Error::new(
|
|
|
|
|
std::io::ErrorKind::Other,
|
2025-12-22 20:10:17 +01:00
|
|
|
format!(
|
|
|
|
|
"Failed to create metadata directory at {:?}: {}",
|
|
|
|
|
metadata_dir, e
|
|
|
|
|
),
|
2025-08-02 22:12:37 +02:00
|
|
|
))
|
|
|
|
|
})
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
/// Creates the manifest directory if it doesn't exist
|
|
|
|
|
pub fn create_manifest_dir(&self) -> Result<()> {
|
|
|
|
|
let manifest_dir = self.manifest_dir();
|
|
|
|
|
fs::create_dir_all(&manifest_dir).map_err(|e| {
|
|
|
|
|
ImageError::IO(std::io::Error::new(
|
|
|
|
|
std::io::ErrorKind::Other,
|
2025-12-22 20:10:17 +01:00
|
|
|
format!(
|
|
|
|
|
"Failed to create manifest directory at {:?}: {}",
|
|
|
|
|
manifest_dir, e
|
|
|
|
|
),
|
2025-08-03 14:28:36 +02:00
|
|
|
))
|
|
|
|
|
})
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
/// Creates the catalog directory if it doesn't exist
|
|
|
|
|
pub fn create_catalog_dir(&self) -> Result<()> {
|
|
|
|
|
let catalog_dir = self.catalog_dir();
|
|
|
|
|
fs::create_dir_all(&catalog_dir).map_err(|e| {
|
|
|
|
|
ImageError::IO(std::io::Error::new(
|
|
|
|
|
std::io::ErrorKind::Other,
|
2025-12-22 20:10:17 +01:00
|
|
|
format!(
|
|
|
|
|
"Failed to create catalog directory at {:?}: {}",
|
|
|
|
|
catalog_dir, e
|
|
|
|
|
),
|
2025-08-03 14:28:36 +02:00
|
|
|
))
|
|
|
|
|
})
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
/// Initialize the installed packages database
|
|
|
|
|
pub fn init_installed_db(&self) -> Result<()> {
|
|
|
|
|
let db_path = self.installed_db_path();
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-04 22:01:38 +02:00
|
|
|
// Create the installed packages database
|
|
|
|
|
let installed = InstalledPackages::new(&db_path);
|
|
|
|
|
installed.init_db().map_err(|e| {
|
2025-12-22 20:10:17 +01:00
|
|
|
ImageError::Database(format!(
|
|
|
|
|
"Failed to initialize installed packages database: {}",
|
|
|
|
|
e
|
|
|
|
|
))
|
2025-08-04 22:01:38 +02:00
|
|
|
})
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-04 22:01:38 +02:00
|
|
|
/// Add a package to the installed packages database
|
2025-12-22 20:10:17 +01:00
|
|
|
pub fn install_package(
|
|
|
|
|
&self,
|
|
|
|
|
fmri: &crate::fmri::Fmri,
|
|
|
|
|
manifest: &crate::actions::Manifest,
|
|
|
|
|
) -> Result<()> {
|
2025-08-19 14:30:55 +02:00
|
|
|
// Precheck incorporation dependencies: fail if any stem already has a lock
|
|
|
|
|
for d in &manifest.dependencies {
|
|
|
|
|
if d.dependency_type == "incorporate" {
|
|
|
|
|
if let Some(df) = &d.fmri {
|
|
|
|
|
let stem = df.stem();
|
|
|
|
|
if let Some(_) = self.get_incorporated_release(stem)? {
|
|
|
|
|
return Err(ImageError::Database(format!(
|
2025-12-22 20:10:17 +01:00
|
|
|
"Incorporation lock already exists for stem {}",
|
|
|
|
|
stem
|
2025-08-19 14:30:55 +02:00
|
|
|
)));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Add to installed database
|
2025-08-04 22:01:38 +02:00
|
|
|
let installed = InstalledPackages::new(self.installed_db_path());
|
|
|
|
|
installed.add_package(fmri, manifest).map_err(|e| {
|
2025-12-22 20:10:17 +01:00
|
|
|
ImageError::Database(format!(
|
|
|
|
|
"Failed to add package to installed database: {}",
|
|
|
|
|
e
|
|
|
|
|
))
|
2025-08-19 14:30:55 +02:00
|
|
|
})?;
|
|
|
|
|
|
|
|
|
|
// Write incorporation locks for any incorporate dependencies
|
|
|
|
|
for d in &manifest.dependencies {
|
|
|
|
|
if d.dependency_type == "incorporate" {
|
|
|
|
|
if let Some(df) = &d.fmri {
|
|
|
|
|
let stem = df.stem();
|
|
|
|
|
let ver = df.version();
|
|
|
|
|
if !ver.is_empty() {
|
|
|
|
|
// Store the full version string (release[,branch][-build][:timestamp])
|
|
|
|
|
// Ignore errors here? Better to propagate to ensure consistency
|
|
|
|
|
self.add_incorporation_lock(stem, &ver)?;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
2025-08-04 22:01:38 +02:00
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-04 22:01:38 +02:00
|
|
|
/// Remove a package from the installed packages database
|
|
|
|
|
pub fn uninstall_package(&self, fmri: &crate::fmri::Fmri) -> Result<()> {
|
|
|
|
|
let installed = InstalledPackages::new(self.installed_db_path());
|
|
|
|
|
installed.remove_package(fmri).map_err(|e| {
|
2025-12-22 20:10:17 +01:00
|
|
|
ImageError::Database(format!(
|
|
|
|
|
"Failed to remove package from installed database: {}",
|
|
|
|
|
e
|
|
|
|
|
))
|
2025-08-04 22:01:38 +02:00
|
|
|
})
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-04 22:01:38 +02:00
|
|
|
/// Query the installed packages database for packages matching a pattern
|
2025-12-22 20:10:17 +01:00
|
|
|
pub fn query_installed_packages(
|
|
|
|
|
&self,
|
|
|
|
|
pattern: Option<&str>,
|
|
|
|
|
) -> Result<Vec<InstalledPackageInfo>> {
|
2025-08-04 22:01:38 +02:00
|
|
|
let installed = InstalledPackages::new(self.installed_db_path());
|
2025-12-22 20:10:17 +01:00
|
|
|
installed
|
|
|
|
|
.query_packages(pattern)
|
|
|
|
|
.map_err(|e| ImageError::Database(format!("Failed to query installed packages: {}", e)))
|
2025-08-04 22:01:38 +02:00
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-04 22:01:38 +02:00
|
|
|
/// Get a manifest from the installed packages database
|
2025-12-22 20:10:17 +01:00
|
|
|
pub fn get_manifest_from_installed(
|
|
|
|
|
&self,
|
|
|
|
|
fmri: &crate::fmri::Fmri,
|
|
|
|
|
) -> Result<Option<crate::actions::Manifest>> {
|
2025-08-04 22:01:38 +02:00
|
|
|
let installed = InstalledPackages::new(self.installed_db_path());
|
|
|
|
|
installed.get_manifest(fmri).map_err(|e| {
|
2025-12-22 20:10:17 +01:00
|
|
|
ImageError::Database(format!(
|
|
|
|
|
"Failed to get manifest from installed database: {}",
|
|
|
|
|
e
|
|
|
|
|
))
|
2025-08-04 22:01:38 +02:00
|
|
|
})
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-04 22:01:38 +02:00
|
|
|
/// Check if a package is installed
|
|
|
|
|
pub fn is_package_installed(&self, fmri: &crate::fmri::Fmri) -> Result<bool> {
|
|
|
|
|
let installed = InstalledPackages::new(self.installed_db_path());
|
|
|
|
|
installed.is_installed(fmri).map_err(|e| {
|
|
|
|
|
ImageError::Database(format!("Failed to check if package is installed: {}", e))
|
|
|
|
|
})
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-19 11:06:48 +02:00
|
|
|
/// Save a manifest into the metadata manifests directory for this image.
|
|
|
|
|
///
|
|
|
|
|
/// The original, unprocessed manifest text is downloaded from the repository
|
|
|
|
|
/// and stored under a flattened path:
|
|
|
|
|
/// manifests/<publisher>/<encoded_stem>@<encoded_version>.p5m
|
|
|
|
|
/// Missing publisher will fall back to the image default publisher, then "unknown".
|
2025-12-22 20:10:17 +01:00
|
|
|
pub fn save_manifest(
|
|
|
|
|
&self,
|
|
|
|
|
fmri: &crate::fmri::Fmri,
|
|
|
|
|
_manifest: &crate::actions::Manifest,
|
|
|
|
|
) -> Result<std::path::PathBuf> {
|
2025-08-19 11:06:48 +02:00
|
|
|
// Determine publisher name
|
|
|
|
|
let pub_name = if let Some(p) = &fmri.publisher {
|
|
|
|
|
p.clone()
|
|
|
|
|
} else if let Ok(def) = self.default_publisher() {
|
|
|
|
|
def.name.clone()
|
|
|
|
|
} else {
|
|
|
|
|
"unknown".to_string()
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Build directory path manifests/<publisher> (flattened, no stem subfolders)
|
|
|
|
|
let dir_path = self.manifest_dir().join(&pub_name);
|
|
|
|
|
std::fs::create_dir_all(&dir_path)?;
|
|
|
|
|
|
|
|
|
|
// Encode helpers for filename parts
|
|
|
|
|
fn url_encode(s: &str) -> String {
|
|
|
|
|
let mut out = String::new();
|
|
|
|
|
for b in s.bytes() {
|
|
|
|
|
match b {
|
2025-12-22 20:10:17 +01:00
|
|
|
b'-' | b'_' | b'.' | b'~' | b'0'..=b'9' | b'a'..=b'z' | b'A'..=b'Z' => {
|
|
|
|
|
out.push(b as char)
|
|
|
|
|
}
|
2025-08-19 11:06:48 +02:00
|
|
|
b' ' => out.push('+'),
|
|
|
|
|
_ => {
|
|
|
|
|
out.push('%');
|
|
|
|
|
out.push_str(&format!("{:02X}", b));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
out
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let version = fmri.version();
|
|
|
|
|
let encoded_stem = url_encode(fmri.stem());
|
|
|
|
|
let encoded_version = url_encode(&version);
|
|
|
|
|
let file_path = dir_path.join(format!("{}@{}.p5m", encoded_stem, encoded_version));
|
|
|
|
|
|
|
|
|
|
// Fetch raw manifest text from repository
|
|
|
|
|
let publisher_name = pub_name.clone();
|
|
|
|
|
let raw_text = {
|
|
|
|
|
// Look up publisher configuration
|
|
|
|
|
let publisher = self.get_publisher(&publisher_name)?;
|
|
|
|
|
let origin = &publisher.origin;
|
|
|
|
|
if origin.starts_with("file://") {
|
|
|
|
|
let path_str = origin.trim_start_matches("file://");
|
|
|
|
|
let path = std::path::PathBuf::from(path_str);
|
|
|
|
|
let repo = crate::repository::FileBackend::open(&path)?;
|
|
|
|
|
repo.fetch_manifest_text(&publisher_name, fmri)?
|
|
|
|
|
} else {
|
|
|
|
|
let mut repo = crate::repository::RestBackend::open(origin)?;
|
|
|
|
|
// Set cache path for completeness
|
|
|
|
|
let publisher_catalog_dir = self.catalog_dir().join(&publisher.name);
|
|
|
|
|
repo.set_local_cache_path(&publisher_catalog_dir)?;
|
|
|
|
|
repo.fetch_manifest_text(&publisher_name, fmri)?
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Write atomically
|
|
|
|
|
let tmp_path = file_path.with_extension("p5m.tmp");
|
|
|
|
|
std::fs::write(&tmp_path, raw_text.as_bytes())?;
|
|
|
|
|
std::fs::rename(&tmp_path, &file_path)?;
|
|
|
|
|
|
|
|
|
|
Ok(file_path)
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-04 22:01:38 +02:00
|
|
|
/// Initialize the catalog database
|
|
|
|
|
pub fn init_catalog_db(&self) -> Result<()> {
|
2025-12-22 20:10:17 +01:00
|
|
|
let catalog = ImageCatalog::new(
|
|
|
|
|
self.catalog_dir(),
|
|
|
|
|
self.catalog_db_path(),
|
|
|
|
|
self.obsoleted_db_path(),
|
|
|
|
|
);
|
2025-08-04 22:01:38 +02:00
|
|
|
catalog.init_db().map_err(|e| {
|
|
|
|
|
ImageError::Database(format!("Failed to initialize catalog database: {}", e))
|
|
|
|
|
})
|
2025-08-03 14:28:36 +02:00
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-04 22:01:38 +02:00
|
|
|
/// Download catalogs from all configured publishers and build the merged catalog
|
2025-08-03 14:28:36 +02:00
|
|
|
pub fn download_catalogs(&self) -> Result<()> {
|
|
|
|
|
// Create catalog directory if it doesn't exist
|
|
|
|
|
self.create_catalog_dir()?;
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
// Download catalogs for each publisher
|
|
|
|
|
for publisher in &self.publishers {
|
|
|
|
|
self.download_publisher_catalog(&publisher.name)?;
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-04 22:01:38 +02:00
|
|
|
// Build the merged catalog
|
|
|
|
|
self.build_catalog()?;
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
Ok(())
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-04 23:45:41 +02:00
|
|
|
/// Refresh catalogs for specified publishers or all publishers if none specified
|
|
|
|
|
///
|
|
|
|
|
/// # Arguments
|
|
|
|
|
///
|
|
|
|
|
/// * `publishers` - Optional list of publishers to refresh. If empty, all publishers are refreshed.
|
|
|
|
|
/// * `full` - If true, perform a full refresh by clearing existing catalog data before downloading.
|
|
|
|
|
///
|
|
|
|
|
/// # Returns
|
|
|
|
|
///
|
|
|
|
|
/// * `Result<()>` - Ok if all catalogs were refreshed successfully, Err otherwise
|
|
|
|
|
pub fn refresh_catalogs(&self, publishers: &[String], full: bool) -> Result<()> {
|
|
|
|
|
// Create catalog directory if it doesn't exist
|
|
|
|
|
self.create_catalog_dir()?;
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-04 23:45:41 +02:00
|
|
|
// Determine which publishers to refresh
|
|
|
|
|
let publishers_to_refresh: Vec<&Publisher> = if publishers.is_empty() {
|
|
|
|
|
// If no publishers specified, refresh all
|
|
|
|
|
self.publishers.iter().collect()
|
|
|
|
|
} else {
|
|
|
|
|
// Otherwise, filter publishers by name
|
2025-12-22 20:10:17 +01:00
|
|
|
self.publishers
|
|
|
|
|
.iter()
|
2025-08-04 23:45:41 +02:00
|
|
|
.filter(|p| publishers.contains(&p.name))
|
|
|
|
|
.collect()
|
|
|
|
|
};
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-04 23:45:41 +02:00
|
|
|
// Check if we have any publishers to refresh
|
|
|
|
|
if publishers_to_refresh.is_empty() {
|
|
|
|
|
return Err(ImageError::NoPublishers);
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-04 23:45:41 +02:00
|
|
|
// If full refresh is requested, clear the catalog directory for each publisher
|
|
|
|
|
if full {
|
|
|
|
|
for publisher in &publishers_to_refresh {
|
|
|
|
|
let publisher_catalog_dir = self.catalog_dir().join(&publisher.name);
|
|
|
|
|
if publisher_catalog_dir.exists() {
|
2025-12-22 20:10:17 +01:00
|
|
|
fs::remove_dir_all(&publisher_catalog_dir).map_err(|e| {
|
|
|
|
|
ImageError::IO(std::io::Error::new(
|
2025-08-04 23:45:41 +02:00
|
|
|
std::io::ErrorKind::Other,
|
2025-12-22 20:10:17 +01:00
|
|
|
format!(
|
|
|
|
|
"Failed to remove catalog directory for publisher {}: {}",
|
|
|
|
|
publisher.name, e
|
|
|
|
|
),
|
|
|
|
|
))
|
|
|
|
|
})?;
|
2025-08-04 23:45:41 +02:00
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
fs::create_dir_all(&publisher_catalog_dir).map_err(|e| {
|
|
|
|
|
ImageError::IO(std::io::Error::new(
|
2025-08-04 23:45:41 +02:00
|
|
|
std::io::ErrorKind::Other,
|
2025-12-22 20:10:17 +01:00
|
|
|
format!(
|
|
|
|
|
"Failed to create catalog directory for publisher {}: {}",
|
|
|
|
|
publisher.name, e
|
|
|
|
|
),
|
|
|
|
|
))
|
|
|
|
|
})?;
|
2025-08-04 23:45:41 +02:00
|
|
|
}
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-04 23:45:41 +02:00
|
|
|
// Download catalogs for each publisher
|
|
|
|
|
for publisher in publishers_to_refresh {
|
|
|
|
|
self.download_publisher_catalog(&publisher.name)?;
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-04 23:45:41 +02:00
|
|
|
// Build the merged catalog
|
|
|
|
|
self.build_catalog()?;
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-04 23:45:41 +02:00
|
|
|
Ok(())
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-04 22:01:38 +02:00
|
|
|
/// Build the merged catalog from downloaded catalogs
|
|
|
|
|
pub fn build_catalog(&self) -> Result<()> {
|
|
|
|
|
// Initialize the catalog database if it doesn't exist
|
|
|
|
|
self.init_catalog_db()?;
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-04 22:01:38 +02:00
|
|
|
// Get publisher names
|
2025-12-22 20:10:17 +01:00
|
|
|
let publisher_names: Vec<String> = self.publishers.iter().map(|p| p.name.clone()).collect();
|
|
|
|
|
|
2025-08-04 22:01:38 +02:00
|
|
|
// Create the catalog and build it
|
2025-12-22 20:10:17 +01:00
|
|
|
let catalog = ImageCatalog::new(
|
|
|
|
|
self.catalog_dir(),
|
|
|
|
|
self.catalog_db_path(),
|
|
|
|
|
self.obsoleted_db_path(),
|
|
|
|
|
);
|
|
|
|
|
catalog
|
|
|
|
|
.build_catalog(&publisher_names)
|
|
|
|
|
.map_err(|e| ImageError::Database(format!("Failed to build catalog: {}", e)))
|
2025-08-04 22:01:38 +02:00
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-04 22:01:38 +02:00
|
|
|
/// Query the catalog for packages matching a pattern
|
|
|
|
|
pub fn query_catalog(&self, pattern: Option<&str>) -> Result<Vec<PackageInfo>> {
|
2025-12-22 20:10:17 +01:00
|
|
|
let catalog = ImageCatalog::new(
|
|
|
|
|
self.catalog_dir(),
|
|
|
|
|
self.catalog_db_path(),
|
|
|
|
|
self.obsoleted_db_path(),
|
|
|
|
|
);
|
|
|
|
|
catalog
|
|
|
|
|
.query_packages(pattern)
|
|
|
|
|
.map_err(|e| ImageError::Database(format!("Failed to query catalog: {}", e)))
|
2025-08-04 22:01:38 +02:00
|
|
|
}
|
2025-08-19 14:30:55 +02:00
|
|
|
|
|
|
|
|
/// Look up an incorporation lock for a given stem.
|
|
|
|
|
/// Returns Some(release) if a lock exists, otherwise None.
|
|
|
|
|
pub fn get_incorporated_release(&self, stem: &str) -> Result<Option<String>> {
|
|
|
|
|
let db = Database::open(self.catalog_db_path())
|
|
|
|
|
.map_err(|e| ImageError::Database(format!("Failed to open catalog database: {}", e)))?;
|
2025-12-22 20:10:17 +01:00
|
|
|
let tx = db.begin_read().map_err(|e| {
|
|
|
|
|
ImageError::Database(format!("Failed to begin read transaction: {}", e))
|
|
|
|
|
})?;
|
2025-08-19 14:30:55 +02:00
|
|
|
match tx.open_table(INCORPORATE_TABLE) {
|
2025-12-22 20:10:17 +01:00
|
|
|
Ok(table) => match table.get(stem) {
|
|
|
|
|
Ok(Some(val)) => Ok(Some(String::from_utf8_lossy(val.value()).to_string())),
|
|
|
|
|
Ok(None) => Ok(None),
|
|
|
|
|
Err(e) => Err(ImageError::Database(format!(
|
|
|
|
|
"Failed to read incorporate lock: {}",
|
|
|
|
|
e
|
|
|
|
|
))),
|
|
|
|
|
},
|
2025-08-19 14:30:55 +02:00
|
|
|
Err(_) => Ok(None),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Add an incorporation lock for a stem to a specific release.
|
|
|
|
|
/// Fails if a lock already exists for the stem.
|
|
|
|
|
pub fn add_incorporation_lock(&self, stem: &str, release: &str) -> Result<()> {
|
|
|
|
|
let db = Database::open(self.catalog_db_path())
|
|
|
|
|
.map_err(|e| ImageError::Database(format!("Failed to open catalog database: {}", e)))?;
|
2025-12-22 20:10:17 +01:00
|
|
|
let tx = db.begin_write().map_err(|e| {
|
|
|
|
|
ImageError::Database(format!("Failed to begin write transaction: {}", e))
|
|
|
|
|
})?;
|
2025-08-19 14:30:55 +02:00
|
|
|
{
|
2025-12-22 20:10:17 +01:00
|
|
|
let mut table = tx.open_table(INCORPORATE_TABLE).map_err(|e| {
|
|
|
|
|
ImageError::Database(format!("Failed to open incorporate table: {}", e))
|
|
|
|
|
})?;
|
2025-08-19 14:30:55 +02:00
|
|
|
if let Ok(Some(_)) = table.get(stem) {
|
2025-12-22 20:10:17 +01:00
|
|
|
return Err(ImageError::Database(format!(
|
|
|
|
|
"Incorporation lock already exists for stem {}",
|
|
|
|
|
stem
|
|
|
|
|
)));
|
2025-08-19 14:30:55 +02:00
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
table.insert(stem, release.as_bytes()).map_err(|e| {
|
|
|
|
|
ImageError::Database(format!("Failed to insert incorporate lock: {}", e))
|
|
|
|
|
})?;
|
2025-08-19 14:30:55 +02:00
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
tx.commit().map_err(|e| {
|
|
|
|
|
ImageError::Database(format!("Failed to commit incorporate lock: {}", e))
|
|
|
|
|
})?;
|
2025-08-19 14:30:55 +02:00
|
|
|
Ok(())
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-04 22:01:38 +02:00
|
|
|
/// Get a manifest from the catalog
|
2025-12-22 20:10:17 +01:00
|
|
|
pub fn get_manifest_from_catalog(
|
|
|
|
|
&self,
|
|
|
|
|
fmri: &crate::fmri::Fmri,
|
|
|
|
|
) -> Result<Option<crate::actions::Manifest>> {
|
|
|
|
|
let catalog = ImageCatalog::new(
|
|
|
|
|
self.catalog_dir(),
|
|
|
|
|
self.catalog_db_path(),
|
|
|
|
|
self.obsoleted_db_path(),
|
|
|
|
|
);
|
2025-08-04 22:01:38 +02:00
|
|
|
catalog.get_manifest(fmri).map_err(|e| {
|
|
|
|
|
ImageError::Database(format!("Failed to get manifest from catalog: {}", e))
|
|
|
|
|
})
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-19 11:06:48 +02:00
|
|
|
/// Fetch a full manifest for the given FMRI directly from its repository origin.
|
|
|
|
|
///
|
|
|
|
|
/// This bypasses the local catalog database and retrieves the full manifest from
|
|
|
|
|
/// the configured publisher origin (REST for http/https origins; File backend for
|
|
|
|
|
/// file:// origins). A versioned FMRI is required.
|
2025-12-22 20:10:17 +01:00
|
|
|
pub fn get_manifest_from_repository(
|
|
|
|
|
&self,
|
|
|
|
|
fmri: &crate::fmri::Fmri,
|
|
|
|
|
) -> Result<crate::actions::Manifest> {
|
2025-08-19 11:06:48 +02:00
|
|
|
// Determine publisher: use FMRI's publisher if present, otherwise default publisher
|
|
|
|
|
let publisher_name = if let Some(p) = &fmri.publisher {
|
|
|
|
|
p.clone()
|
|
|
|
|
} else {
|
|
|
|
|
self.default_publisher()?.name.clone()
|
|
|
|
|
};
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-19 11:06:48 +02:00
|
|
|
// Look up publisher configuration
|
|
|
|
|
let publisher = self.get_publisher(&publisher_name)?;
|
|
|
|
|
let origin = &publisher.origin;
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-19 11:06:48 +02:00
|
|
|
// Require a concrete version in the FMRI
|
|
|
|
|
if fmri.version().is_empty() {
|
|
|
|
|
return Err(ImageError::Repository(RepositoryError::Other(
|
|
|
|
|
"FMRI must include a version to fetch manifest".to_string(),
|
|
|
|
|
)));
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-19 11:06:48 +02:00
|
|
|
// Choose backend based on origin scheme
|
|
|
|
|
if origin.starts_with("file://") {
|
|
|
|
|
let path_str = origin.trim_start_matches("file://");
|
|
|
|
|
let path = PathBuf::from(path_str);
|
|
|
|
|
let mut repo = FileBackend::open(&path)?;
|
2025-12-22 20:10:17 +01:00
|
|
|
repo.fetch_manifest(&publisher_name, fmri)
|
|
|
|
|
.map_err(Into::into)
|
2025-08-19 11:06:48 +02:00
|
|
|
} else {
|
|
|
|
|
let mut repo = RestBackend::open(origin)?;
|
|
|
|
|
// Optionally set a per-publisher cache directory (used by other REST ops)
|
|
|
|
|
let publisher_catalog_dir = self.catalog_dir().join(&publisher.name);
|
|
|
|
|
repo.set_local_cache_path(&publisher_catalog_dir)?;
|
2025-12-22 20:10:17 +01:00
|
|
|
repo.fetch_manifest(&publisher_name, fmri)
|
|
|
|
|
.map_err(Into::into)
|
2025-08-19 11:06:48 +02:00
|
|
|
}
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
/// Download catalog for a specific publisher
|
|
|
|
|
pub fn download_publisher_catalog(&self, publisher_name: &str) -> Result<()> {
|
|
|
|
|
// Get the publisher
|
|
|
|
|
let publisher = self.get_publisher(publisher_name)?;
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
// Create a REST backend for the publisher
|
|
|
|
|
let mut repo = RestBackend::open(&publisher.origin)?;
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
// Set local cache path to the catalog directory for this publisher
|
|
|
|
|
let publisher_catalog_dir = self.catalog_dir().join(&publisher.name);
|
|
|
|
|
fs::create_dir_all(&publisher_catalog_dir)?;
|
|
|
|
|
repo.set_local_cache_path(&publisher_catalog_dir)?;
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
// Download the catalog
|
|
|
|
|
repo.download_catalog(&publisher.name, None)?;
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
Ok(())
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-04 22:01:38 +02:00
|
|
|
/// Create a new image with the basic directory structure
|
2025-12-22 20:10:17 +01:00
|
|
|
///
|
2025-08-04 22:01:38 +02:00
|
|
|
/// This method only creates the image structure without adding publishers or downloading catalogs.
|
|
|
|
|
/// Publisher addition and catalog downloading should be handled separately.
|
2025-08-04 23:01:04 +02:00
|
|
|
///
|
|
|
|
|
/// # Arguments
|
|
|
|
|
///
|
|
|
|
|
/// * `path` - The path where the image will be created
|
|
|
|
|
/// * `image_type` - The type of image to create (Full or Partial)
|
|
|
|
|
pub fn create_image<P: AsRef<Path>>(path: P, image_type: ImageType) -> Result<Self> {
|
|
|
|
|
// Create a new image based on the specified type
|
|
|
|
|
let image = match image_type {
|
|
|
|
|
ImageType::Full => Image::new_full(path.as_ref().to_path_buf()),
|
|
|
|
|
ImageType::Partial => Image::new_partial(path.as_ref().to_path_buf()),
|
|
|
|
|
};
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
// Create the directory structure
|
|
|
|
|
image.create_metadata_dir()?;
|
|
|
|
|
image.create_manifest_dir()?;
|
|
|
|
|
image.create_catalog_dir()?;
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
// Initialize the installed packages database
|
|
|
|
|
image.init_installed_db()?;
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-04 22:01:38 +02:00
|
|
|
// Initialize the catalog database
|
|
|
|
|
image.init_catalog_db()?;
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-04 22:01:38 +02:00
|
|
|
// Save the image
|
|
|
|
|
image.save()?;
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-03 14:28:36 +02:00
|
|
|
Ok(image)
|
|
|
|
|
}
|
2025-08-02 22:12:37 +02:00
|
|
|
|
|
|
|
|
/// Saves the image data to the metadata directory
|
|
|
|
|
pub fn save(&self) -> Result<()> {
|
|
|
|
|
self.create_metadata_dir()?;
|
|
|
|
|
let json_path = self.image_json_path();
|
|
|
|
|
let file = File::create(&json_path).map_err(|e| {
|
|
|
|
|
ImageError::IO(std::io::Error::new(
|
|
|
|
|
std::io::ErrorKind::Other,
|
|
|
|
|
format!("Failed to create image JSON file at {:?}: {}", json_path, e),
|
|
|
|
|
))
|
|
|
|
|
})?;
|
|
|
|
|
serde_json::to_writer_pretty(file, self).map_err(ImageError::Json)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Loads an image from the specified path
|
|
|
|
|
pub fn load<P: AsRef<Path>>(path: P) -> Result<Self> {
|
|
|
|
|
let path = path.as_ref();
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-02 22:12:37 +02:00
|
|
|
// Check for both full and partial image JSON files
|
|
|
|
|
let full_image = Image::new_full(path);
|
|
|
|
|
let partial_image = Image::new_partial(path);
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-02 22:12:37 +02:00
|
|
|
let full_json_path = full_image.image_json_path();
|
|
|
|
|
let partial_json_path = partial_image.image_json_path();
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-02 22:12:37 +02:00
|
|
|
// Determine which JSON file exists
|
|
|
|
|
let json_path = if full_json_path.exists() {
|
|
|
|
|
full_json_path
|
|
|
|
|
} else if partial_json_path.exists() {
|
|
|
|
|
partial_json_path
|
|
|
|
|
} else {
|
|
|
|
|
return Err(ImageError::InvalidPath(format!(
|
2025-12-22 20:10:17 +01:00
|
|
|
"Image JSON file not found at either {:?} or {:?}",
|
2025-08-02 22:12:37 +02:00
|
|
|
full_json_path, partial_json_path
|
|
|
|
|
)));
|
|
|
|
|
};
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-02 22:12:37 +02:00
|
|
|
let file = File::open(&json_path).map_err(|e| {
|
|
|
|
|
ImageError::IO(std::io::Error::new(
|
|
|
|
|
std::io::ErrorKind::Other,
|
|
|
|
|
format!("Failed to open image JSON file at {:?}: {}", json_path, e),
|
|
|
|
|
))
|
|
|
|
|
})?;
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-02 22:12:37 +02:00
|
|
|
serde_json::from_reader(file).map_err(ImageError::Json)
|
|
|
|
|
}
|
2025-07-26 12:54:01 +02:00
|
|
|
}
|