2025-07-21 22:02:05 +02:00
|
|
|
// This Source Code Form is subject to the terms of
|
|
|
|
|
// the Mozilla Public License, v. 2.0. If a copy of the
|
|
|
|
|
// MPL was not distributed with this file, You can
|
|
|
|
|
// obtain one at https://mozilla.org/MPL/2.0/.
|
|
|
|
|
|
2025-07-27 15:22:49 +02:00
|
|
|
use super::{RepositoryError, Result};
|
2025-07-21 22:37:08 +02:00
|
|
|
use flate2::Compression as GzipCompression;
|
2025-12-22 20:10:17 +01:00
|
|
|
use flate2::write::GzEncoder;
|
2025-07-21 22:37:08 +02:00
|
|
|
use lz4::EncoderBuilder;
|
2025-07-22 11:57:24 +02:00
|
|
|
use regex::Regex;
|
2025-07-26 10:34:45 +02:00
|
|
|
use serde::{Deserialize, Serialize};
|
|
|
|
|
use sha2::{Digest as Sha2Digest, Sha256};
|
2026-01-18 12:51:55 +01:00
|
|
|
use std::collections::{BTreeMap, HashMap, HashSet};
|
2025-07-26 10:34:45 +02:00
|
|
|
use std::fs;
|
|
|
|
|
use std::fs::File;
|
|
|
|
|
use std::io::{Read, Write};
|
|
|
|
|
use std::path::{Path, PathBuf};
|
|
|
|
|
use std::str::FromStr;
|
2026-02-05 15:57:56 +01:00
|
|
|
use std::sync::Mutex;
|
2025-07-26 10:34:45 +02:00
|
|
|
use std::time::{SystemTime, UNIX_EPOCH};
|
2025-07-26 23:41:09 +02:00
|
|
|
use tracing::{debug, error, info};
|
2025-07-26 10:34:45 +02:00
|
|
|
use crate::actions::{File as FileAction, Manifest};
|
2026-03-15 19:32:24 +01:00
|
|
|
use crate::digest::{Digest, DigestAlgorithm, DigestSource};
|
2025-07-22 14:10:37 +02:00
|
|
|
use crate::fmri::Fmri;
|
2026-03-14 22:01:48 +01:00
|
|
|
use crate::payload::{Payload, PayloadCompressionAlgorithm};
|
2025-07-21 22:02:05 +02:00
|
|
|
|
2025-12-22 20:10:17 +01:00
|
|
|
use super::catalog_writer;
|
2025-07-26 10:34:45 +02:00
|
|
|
use super::{
|
2025-12-22 20:10:17 +01:00
|
|
|
PackageContents, PackageInfo, PublisherInfo, REPOSITORY_CONFIG_FILENAME, ReadableRepository,
|
|
|
|
|
RepositoryConfig, RepositoryInfo, RepositoryVersion, WritableRepository,
|
2025-07-26 10:34:45 +02:00
|
|
|
};
|
2025-08-02 13:17:49 +02:00
|
|
|
use ini::Ini;
|
2025-07-21 22:02:05 +02:00
|
|
|
|
2025-07-27 16:12:59 +02:00
|
|
|
// Define a struct to hold the content vectors for each package
|
|
|
|
|
struct PackageContentVectors {
|
|
|
|
|
files: Vec<String>,
|
|
|
|
|
directories: Vec<String>,
|
|
|
|
|
links: Vec<String>,
|
|
|
|
|
dependencies: Vec<String>,
|
|
|
|
|
licenses: Vec<String>,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl PackageContentVectors {
|
|
|
|
|
fn new() -> Self {
|
|
|
|
|
Self {
|
|
|
|
|
files: Vec::new(),
|
|
|
|
|
directories: Vec::new(),
|
|
|
|
|
links: Vec::new(),
|
|
|
|
|
dependencies: Vec::new(),
|
|
|
|
|
licenses: Vec::new(),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2026-01-18 12:29:44 +01:00
|
|
|
/// Entry in the search index
|
|
|
|
|
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
|
|
|
|
|
pub struct IndexEntry {
|
|
|
|
|
pub fmri: String,
|
|
|
|
|
pub action_type: String,
|
|
|
|
|
pub index_type: String,
|
|
|
|
|
pub value: String,
|
|
|
|
|
pub token: String, // The term that matched (original case)
|
|
|
|
|
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
|
|
|
|
|
pub attributes: BTreeMap<String, String>,
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-06 00:04:23 +01:00
|
|
|
pub fn glob_to_regex(pattern: &str) -> String {
|
2026-01-18 12:29:44 +01:00
|
|
|
let mut regex = String::from("^");
|
|
|
|
|
for c in pattern.chars() {
|
|
|
|
|
match c {
|
|
|
|
|
'*' => regex.push_str(".*"),
|
|
|
|
|
'?' => regex.push('.'),
|
|
|
|
|
'.' | '+' | '(' | ')' | '[' | ']' | '{' | '}' | '^' | '$' | '|' | '\\' => {
|
|
|
|
|
regex.push('\\');
|
|
|
|
|
regex.push(c);
|
|
|
|
|
}
|
|
|
|
|
_ => regex.push(c),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
regex.push('$');
|
|
|
|
|
regex
|
|
|
|
|
}
|
|
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
/// Repository implementation that uses the local filesystem
|
|
|
|
|
pub struct FileBackend {
|
|
|
|
|
pub path: PathBuf,
|
|
|
|
|
pub config: RepositoryConfig,
|
2025-07-24 00:28:33 +02:00
|
|
|
/// Catalog manager for handling catalog operations
|
2026-02-05 15:57:56 +01:00
|
|
|
/// Uses Mutex for interior mutability to allow mutation through immutable references (thread-safe)
|
|
|
|
|
catalog_manager: Option<Mutex<crate::repository::catalog::CatalogManager>>,
|
Introduce obsoleted package management system in IPS
- Add `obsoleted.rs` module to handle storing, metadata management, and operations for obsoleted packages.
- Implement commands for marking, listing, searching, restoring, exporting, and importing obsoleted packages (`pkg6repo`).
- Enhance `RepositoryError` with `From` implementations for various error types to manage database and serialization-related errors.
- Introduce reusable data structures for obsoleted package metadata and export representation.
- Update `Cargo.toml` and `Cargo.lock` to include new dependencies (`redb`, `bincode`, etc.).
- Document obsoleted package workflow and integration details in `doc/obsoleted_packages.md` for contributors.
- Refactor repository internals to integrate obsoleted package support without disrupting existing workflow.
- Add robust error handling, logging, and pagination for enhanced usability and scalability.
2025-07-29 16:16:12 +02:00
|
|
|
/// Manager for obsoleted packages
|
2025-12-22 20:10:17 +01:00
|
|
|
obsoleted_manager:
|
2026-02-05 15:57:56 +01:00
|
|
|
Option<Mutex<crate::repository::obsoleted::ObsoletedPackageManager>>,
|
2025-07-21 22:02:05 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Transaction for publishing packages
|
|
|
|
|
pub struct Transaction {
|
|
|
|
|
/// Unique ID for the transaction
|
2025-07-26 10:34:45 +02:00
|
|
|
#[allow(dead_code)]
|
2025-07-21 22:02:05 +02:00
|
|
|
id: String,
|
|
|
|
|
/// Path to the transaction directory
|
|
|
|
|
path: PathBuf,
|
|
|
|
|
/// Manifest being updated
|
|
|
|
|
manifest: Manifest,
|
2026-03-15 20:44:43 +01:00
|
|
|
/// Files to be published: (source_path, compressed_hash, primary_hash)
|
|
|
|
|
files: Vec<(PathBuf, String, String)>,
|
2025-07-21 22:02:05 +02:00
|
|
|
/// Repository reference
|
|
|
|
|
repo: PathBuf,
|
2025-07-26 11:52:42 +02:00
|
|
|
/// Publisher name
|
|
|
|
|
publisher: Option<String>,
|
2025-12-23 14:09:14 +01:00
|
|
|
/// Legacy manifest content (optional)
|
|
|
|
|
legacy_manifest_content: Option<String>,
|
2025-07-21 22:02:05 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl Transaction {
|
|
|
|
|
/// Create a new transaction
|
|
|
|
|
pub fn new(repo_path: PathBuf) -> Result<Self> {
|
|
|
|
|
// Generate a unique ID based on timestamp
|
|
|
|
|
let timestamp = SystemTime::now()
|
|
|
|
|
.duration_since(UNIX_EPOCH)
|
|
|
|
|
.unwrap()
|
|
|
|
|
.as_secs();
|
|
|
|
|
let id = format!("trans_{}", timestamp);
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-26 23:41:09 +02:00
|
|
|
// Create a transaction directory
|
2025-07-21 22:02:05 +02:00
|
|
|
let trans_path = repo_path.join("trans").join(&id);
|
|
|
|
|
fs::create_dir_all(&trans_path)?;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
Ok(Transaction {
|
|
|
|
|
id,
|
|
|
|
|
path: trans_path,
|
|
|
|
|
manifest: Manifest::new(),
|
|
|
|
|
files: Vec::new(),
|
|
|
|
|
repo: repo_path,
|
2025-07-26 11:52:42 +02:00
|
|
|
publisher: None,
|
2025-12-23 14:09:14 +01:00
|
|
|
legacy_manifest_content: None,
|
2025-07-21 22:02:05 +02:00
|
|
|
})
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-26 11:52:42 +02:00
|
|
|
/// Set the publisher for this transaction
|
|
|
|
|
pub fn set_publisher(&mut self, publisher: &str) {
|
|
|
|
|
self.publisher = Some(publisher.to_string());
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-23 14:09:14 +01:00
|
|
|
/// Set the legacy manifest content for this transaction
|
|
|
|
|
pub fn set_legacy_manifest(&mut self, content: String) {
|
|
|
|
|
self.legacy_manifest_content = Some(content);
|
|
|
|
|
}
|
|
|
|
|
|
2025-07-21 23:20:19 +02:00
|
|
|
/// Update the manifest in the transaction
|
|
|
|
|
///
|
|
|
|
|
/// This intelligently merges the provided manifest with the existing one,
|
|
|
|
|
/// preserving file actions that have already been added to the transaction.
|
2025-07-26 10:34:45 +02:00
|
|
|
///
|
2025-07-21 23:20:19 +02:00
|
|
|
/// The merge strategy:
|
|
|
|
|
/// - Keeps all file actions from the transaction's manifest (these have been processed with checksums, etc.)
|
|
|
|
|
/// - Adds any file actions from the provided manifest that don't exist in the transaction's manifest
|
|
|
|
|
/// - Merges other types of actions (attributes, directories, dependencies, licenses, links) from both manifests
|
|
|
|
|
pub fn update_manifest(&mut self, manifest: Manifest) {
|
|
|
|
|
// Keep track of file paths that are already in the transaction's manifest
|
2025-07-26 23:41:09 +02:00
|
|
|
let existing_file_paths: HashSet<String> =
|
2025-07-26 10:34:45 +02:00
|
|
|
self.manifest.files.iter().map(|f| f.path.clone()).collect();
|
|
|
|
|
|
2025-07-21 23:20:19 +02:00
|
|
|
// Add file actions from the provided manifest that don't exist in the transaction's manifest
|
|
|
|
|
for file in manifest.files {
|
|
|
|
|
if !existing_file_paths.contains(&file.path) {
|
|
|
|
|
self.manifest.add_file(file);
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 23:20:19 +02:00
|
|
|
// Merge other types of actions
|
|
|
|
|
self.manifest.attributes.extend(manifest.attributes);
|
|
|
|
|
self.manifest.directories.extend(manifest.directories);
|
|
|
|
|
self.manifest.dependencies.extend(manifest.dependencies);
|
|
|
|
|
self.manifest.licenses.extend(manifest.licenses);
|
|
|
|
|
self.manifest.links.extend(manifest.links);
|
2026-02-06 00:40:59 +01:00
|
|
|
self.manifest.signatures.extend(manifest.signatures);
|
|
|
|
|
self.manifest.users.extend(manifest.users);
|
|
|
|
|
self.manifest.groups.extend(manifest.groups);
|
|
|
|
|
self.manifest.drivers.extend(manifest.drivers);
|
|
|
|
|
self.manifest.legacies.extend(manifest.legacies);
|
|
|
|
|
self.manifest.transforms.extend(manifest.transforms);
|
2025-07-21 23:20:19 +02:00
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
/// Process a file for the transaction
|
|
|
|
|
///
|
|
|
|
|
/// Takes a FileAction and a path to a file in a prototype directory.
|
2025-07-21 22:37:08 +02:00
|
|
|
/// Calculates the file's checksum, compresses the content using the specified algorithm (Gzip or LZ4),
|
|
|
|
|
/// stores the compressed content in a temp file in the transactions directory,
|
|
|
|
|
/// and updates the FileAction with the hash information for both uncompressed and compressed versions.
|
2025-07-21 22:02:05 +02:00
|
|
|
pub fn add_file(&mut self, file_action: FileAction, file_path: &Path) -> Result<()> {
|
|
|
|
|
// Calculate SHA256 hash of the file (uncompressed)
|
|
|
|
|
let hash = Self::calculate_file_hash(file_path)?;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Create a temp file path in the transactions directory
|
|
|
|
|
let temp_file_name = format!("temp_{}", hash);
|
|
|
|
|
let temp_file_path = self.path.join(temp_file_name);
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:37:08 +02:00
|
|
|
// Check if the temp file already exists
|
|
|
|
|
if temp_file_path.exists() {
|
|
|
|
|
// If it exists, remove it to avoid any issues with existing content
|
2025-12-22 20:10:17 +01:00
|
|
|
fs::remove_file(&temp_file_path).map_err(|e| RepositoryError::FileWriteError {
|
|
|
|
|
path: temp_file_path.clone(),
|
|
|
|
|
source: e,
|
2025-07-27 15:22:49 +02:00
|
|
|
})?;
|
2025-07-21 22:37:08 +02:00
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:37:08 +02:00
|
|
|
// Read the file content
|
2025-12-22 20:10:17 +01:00
|
|
|
let file_content = fs::read(file_path).map_err(|e| RepositoryError::FileReadError {
|
|
|
|
|
path: file_path.to_path_buf(),
|
|
|
|
|
source: e,
|
2025-07-27 15:22:49 +02:00
|
|
|
})?;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Create a payload with the hash information if it doesn't exist
|
2025-07-21 22:37:08 +02:00
|
|
|
let mut updated_file_action = file_action;
|
2025-07-21 22:02:05 +02:00
|
|
|
let mut payload = updated_file_action.payload.unwrap_or_else(Payload::default);
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:37:08 +02:00
|
|
|
// Set the compression algorithm (use the one from payload or default to Gzip)
|
|
|
|
|
let compression_algorithm = payload.compression_algorithm;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:37:08 +02:00
|
|
|
// Compress the file based on the selected algorithm
|
|
|
|
|
let compressed_hash = match compression_algorithm {
|
|
|
|
|
PayloadCompressionAlgorithm::Gzip => {
|
2025-07-26 23:41:09 +02:00
|
|
|
// Create a Gzip encoder with the default compression level
|
2025-07-21 22:37:08 +02:00
|
|
|
let mut encoder = GzEncoder::new(Vec::new(), GzipCompression::default());
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:37:08 +02:00
|
|
|
// Write the file content to the encoder
|
2025-07-27 15:22:49 +02:00
|
|
|
encoder.write_all(&file_content).map_err(|e| {
|
|
|
|
|
RepositoryError::Other(format!("Failed to write data to Gzip encoder: {}", e))
|
|
|
|
|
})?;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:37:08 +02:00
|
|
|
// Finish the compression and get the compressed data
|
2025-07-27 15:22:49 +02:00
|
|
|
let compressed_data = encoder.finish().map_err(|e| {
|
|
|
|
|
RepositoryError::Other(format!("Failed to finish Gzip compression: {}", e))
|
|
|
|
|
})?;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:37:08 +02:00
|
|
|
// Write the compressed data to the temp file
|
2025-07-27 15:22:49 +02:00
|
|
|
fs::write(&temp_file_path, &compressed_data).map_err(|e| {
|
2025-12-08 23:13:27 +01:00
|
|
|
RepositoryError::FileWriteError {
|
|
|
|
|
path: temp_file_path.clone(),
|
|
|
|
|
source: e,
|
|
|
|
|
}
|
2025-07-27 15:22:49 +02:00
|
|
|
})?;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:37:08 +02:00
|
|
|
// Calculate hash of the compressed data
|
|
|
|
|
let mut hasher = Sha256::new();
|
|
|
|
|
hasher.update(&compressed_data);
|
|
|
|
|
format!("{:x}", hasher.finalize())
|
2025-07-26 10:34:45 +02:00
|
|
|
}
|
2025-07-21 22:37:08 +02:00
|
|
|
PayloadCompressionAlgorithm::LZ4 => {
|
2025-07-26 23:41:09 +02:00
|
|
|
// Create an LZ4 encoder with the default compression level
|
2025-07-27 15:22:49 +02:00
|
|
|
let mut encoder = EncoderBuilder::new().build(Vec::new()).map_err(|e| {
|
|
|
|
|
RepositoryError::Other(format!("Failed to create LZ4 encoder: {}", e))
|
|
|
|
|
})?;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:37:08 +02:00
|
|
|
// Write the file content to the encoder
|
2025-07-27 15:22:49 +02:00
|
|
|
encoder.write_all(&file_content).map_err(|e| {
|
|
|
|
|
RepositoryError::Other(format!("Failed to write data to LZ4 encoder: {}", e))
|
|
|
|
|
})?;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:37:08 +02:00
|
|
|
// Finish the compression and get the compressed data
|
|
|
|
|
let (compressed_data, _) = encoder.finish();
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:37:08 +02:00
|
|
|
// Write the compressed data to the temp file
|
2025-07-26 10:34:45 +02:00
|
|
|
fs::write(&temp_file_path, &compressed_data).map_err(|e| {
|
2025-12-08 23:13:27 +01:00
|
|
|
RepositoryError::FileWriteError {
|
|
|
|
|
path: temp_file_path.clone(),
|
|
|
|
|
source: e,
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
})?;
|
|
|
|
|
|
2025-07-21 22:37:08 +02:00
|
|
|
// Calculate hash of the compressed data
|
|
|
|
|
let mut hasher = Sha256::new();
|
|
|
|
|
hasher.update(&compressed_data);
|
|
|
|
|
format!("{:x}", hasher.finalize())
|
|
|
|
|
}
|
|
|
|
|
};
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-26 23:41:09 +02:00
|
|
|
// Add a file to the list for later processing during commit
|
2026-03-15 20:44:43 +01:00
|
|
|
// Track both compressed hash (storage key) and primary hash (for compatibility lookups)
|
2025-07-26 10:34:45 +02:00
|
|
|
self.files
|
2026-03-15 20:44:43 +01:00
|
|
|
.push((temp_file_path.clone(), compressed_hash.clone(), hash.clone()));
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2026-03-15 19:32:24 +01:00
|
|
|
// Set the primary identifier (uncompressed SHA256 hash)
|
|
|
|
|
payload.primary_identifier = Digest {
|
|
|
|
|
hash: hash.clone(),
|
|
|
|
|
algorithm: DigestAlgorithm::SHA256,
|
|
|
|
|
source: DigestSource::UncompressedFile,
|
|
|
|
|
};
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Set the compression algorithm
|
2025-07-21 22:37:08 +02:00
|
|
|
payload.compression_algorithm = compression_algorithm;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Add the compressed hash as an additional identifier
|
2026-03-15 19:32:24 +01:00
|
|
|
let compressed_source = match payload.compression_algorithm {
|
|
|
|
|
PayloadCompressionAlgorithm::Gzip => DigestSource::GzipCompressed,
|
|
|
|
|
PayloadCompressionAlgorithm::LZ4 => DigestSource::GzipCompressed, // LZ4 shares file storage pattern
|
|
|
|
|
};
|
|
|
|
|
payload.additional_identifiers.push(Digest {
|
|
|
|
|
hash: compressed_hash.clone(),
|
|
|
|
|
algorithm: DigestAlgorithm::SHA256,
|
|
|
|
|
source: compressed_source,
|
|
|
|
|
});
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Update the FileAction with the payload
|
|
|
|
|
updated_file_action.payload = Some(payload);
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Add the FileAction to the manifest
|
|
|
|
|
self.manifest.add_file(updated_file_action);
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
Ok(())
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
/// Commit the transaction
|
|
|
|
|
pub fn commit(self) -> Result<()> {
|
2025-12-23 14:09:14 +01:00
|
|
|
// Save the JSON manifest to the transaction directory
|
|
|
|
|
let manifest_json_path = self.path.join("manifest.json");
|
2025-07-21 22:02:05 +02:00
|
|
|
let manifest_json = serde_json::to_string_pretty(&self.manifest)?;
|
2026-02-05 23:16:02 +01:00
|
|
|
fs::write(&manifest_json_path, &manifest_json).map_err(|e| {
|
|
|
|
|
RepositoryError::FileWriteError {
|
|
|
|
|
path: manifest_json_path.clone(),
|
|
|
|
|
source: e,
|
|
|
|
|
}
|
|
|
|
|
})?;
|
2025-12-23 14:09:14 +01:00
|
|
|
|
|
|
|
|
// Save the legacy manifest to the transaction directory
|
|
|
|
|
let manifest_legacy_path = self.path.join("manifest");
|
|
|
|
|
if let Some(content) = &self.legacy_manifest_content {
|
2026-02-05 23:16:02 +01:00
|
|
|
fs::write(&manifest_legacy_path, content).map_err(|e| {
|
|
|
|
|
RepositoryError::FileWriteError {
|
|
|
|
|
path: manifest_legacy_path.clone(),
|
|
|
|
|
source: e,
|
|
|
|
|
}
|
|
|
|
|
})?;
|
2025-12-23 14:09:14 +01:00
|
|
|
} else {
|
|
|
|
|
// Fallback: write JSON as legacy content if none provided (status quo)
|
2026-02-05 23:16:02 +01:00
|
|
|
fs::write(&manifest_legacy_path, &manifest_json).map_err(|e| {
|
|
|
|
|
RepositoryError::FileWriteError {
|
|
|
|
|
path: manifest_legacy_path.clone(),
|
|
|
|
|
source: e,
|
|
|
|
|
}
|
|
|
|
|
})?;
|
2025-12-23 14:09:14 +01:00
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-31 00:18:21 +02:00
|
|
|
// Determine the publisher to use
|
|
|
|
|
let publisher = match &self.publisher {
|
|
|
|
|
Some(pub_name) => {
|
|
|
|
|
debug!("Using specified publisher: {}", pub_name);
|
|
|
|
|
pub_name.clone()
|
|
|
|
|
}
|
|
|
|
|
None => {
|
|
|
|
|
debug!("No publisher specified, trying to use default publisher");
|
|
|
|
|
// If no publisher is specified, use the default publisher from the repository config
|
|
|
|
|
let config_path = self.repo.join(REPOSITORY_CONFIG_FILENAME);
|
|
|
|
|
if config_path.exists() {
|
|
|
|
|
let config_content = fs::read_to_string(&config_path)?;
|
|
|
|
|
let config: RepositoryConfig = serde_json::from_str(&config_content)?;
|
|
|
|
|
match config.default_publisher {
|
|
|
|
|
Some(default_pub) => {
|
|
|
|
|
debug!("Using default publisher: {}", default_pub);
|
|
|
|
|
default_pub
|
|
|
|
|
}
|
|
|
|
|
None => {
|
|
|
|
|
debug!("No default publisher set in repository");
|
|
|
|
|
return Err(RepositoryError::Other(
|
|
|
|
|
"No publisher specified and no default publisher set in repository"
|
|
|
|
|
.to_string(),
|
|
|
|
|
));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
debug!("Repository configuration not found");
|
|
|
|
|
return Err(RepositoryError::Other(
|
|
|
|
|
"No publisher specified and repository configuration not found".to_string(),
|
|
|
|
|
));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2026-03-15 19:32:24 +01:00
|
|
|
// Extract package information from manifest
|
|
|
|
|
let mut package_stem = String::from("unknown");
|
|
|
|
|
let mut package_version = String::from("");
|
|
|
|
|
for attr in &self.manifest.attributes {
|
|
|
|
|
if attr.key == "pkg.fmri" && !attr.values.is_empty() {
|
|
|
|
|
if let Ok(fmri) = Fmri::parse(&attr.values[0]) {
|
|
|
|
|
package_stem = fmri.stem().to_string();
|
|
|
|
|
package_version = fmri.version();
|
|
|
|
|
debug!("Extracted package stem from FMRI: {}", package_stem);
|
|
|
|
|
debug!("Extracted package version from FMRI: {}", package_version);
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-15 20:58:44 +01:00
|
|
|
// Move files to their final location (atomic rename, same filesystem).
|
|
|
|
|
// Store under the primary (uncompressed) hash — this is the hash that clients
|
|
|
|
|
// use to request files via /file/0/<hash>. Also store under compressed hash
|
|
|
|
|
// for internal lookups.
|
2026-03-15 20:44:43 +01:00
|
|
|
for (source_path, compressed_hash, primary_hash) in self.files {
|
2026-03-15 20:58:44 +01:00
|
|
|
// Primary storage path: use the primary (uncompressed) hash as the key,
|
|
|
|
|
// matching IPS protocol where clients look up files by manifest hash
|
|
|
|
|
let primary_path =
|
|
|
|
|
FileBackend::construct_file_path_with_publisher(&self.repo, &publisher, &primary_hash);
|
2025-07-27 13:15:52 +02:00
|
|
|
|
2026-03-15 20:58:44 +01:00
|
|
|
if let Some(parent) = primary_path.parent() {
|
2026-02-05 23:16:02 +01:00
|
|
|
fs::create_dir_all(parent).map_err(|e| RepositoryError::DirectoryCreateError {
|
|
|
|
|
path: parent.to_path_buf(),
|
|
|
|
|
source: e,
|
|
|
|
|
})?;
|
2025-07-27 13:15:52 +02:00
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2026-03-15 20:58:44 +01:00
|
|
|
if !primary_path.exists() {
|
|
|
|
|
fs::rename(&source_path, &primary_path).map_err(|e| RepositoryError::FileRenameError {
|
2026-02-05 23:16:02 +01:00
|
|
|
from: source_path.clone(),
|
2026-03-15 20:58:44 +01:00
|
|
|
to: primary_path.clone(),
|
2026-02-05 23:16:02 +01:00
|
|
|
source: e,
|
|
|
|
|
})?;
|
2025-07-21 22:02:05 +02:00
|
|
|
}
|
2026-03-15 20:44:43 +01:00
|
|
|
|
2026-03-15 20:58:44 +01:00
|
|
|
// Also create a hardlink under the compressed hash for internal lookups
|
2026-03-15 20:44:43 +01:00
|
|
|
if primary_hash != compressed_hash {
|
2026-03-15 20:58:44 +01:00
|
|
|
let compressed_path =
|
|
|
|
|
FileBackend::construct_file_path_with_publisher(&self.repo, &publisher, &compressed_hash);
|
|
|
|
|
if !compressed_path.exists() {
|
|
|
|
|
if let Some(parent) = compressed_path.parent() {
|
2026-03-15 20:44:43 +01:00
|
|
|
fs::create_dir_all(parent).map_err(|e| RepositoryError::DirectoryCreateError {
|
|
|
|
|
path: parent.to_path_buf(),
|
|
|
|
|
source: e,
|
|
|
|
|
})?;
|
|
|
|
|
}
|
2026-03-15 20:58:44 +01:00
|
|
|
if let Err(e) = fs::hard_link(&primary_path, &compressed_path) {
|
|
|
|
|
debug!("Failed to create hardlink for compressed hash: {}", e);
|
|
|
|
|
// Not fatal — compressed hash lookup is optional
|
2026-03-15 20:44:43 +01:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-07-21 22:02:05 +02:00
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-27 15:20:45 +02:00
|
|
|
// Create the package directory if it doesn't exist
|
2025-07-31 00:18:21 +02:00
|
|
|
let pkg_dir = FileBackend::construct_package_dir(&self.repo, &publisher, &package_stem);
|
2025-07-27 15:20:45 +02:00
|
|
|
debug!("Package directory: {}", pkg_dir.display());
|
|
|
|
|
if !pkg_dir.exists() {
|
|
|
|
|
debug!("Creating package directory");
|
2026-02-05 23:16:02 +01:00
|
|
|
fs::create_dir_all(&pkg_dir).map_err(|e| RepositoryError::DirectoryCreateError {
|
|
|
|
|
path: pkg_dir.clone(),
|
|
|
|
|
source: e,
|
|
|
|
|
})?;
|
2025-07-26 21:20:50 +02:00
|
|
|
}
|
|
|
|
|
|
2025-07-27 15:20:45 +02:00
|
|
|
// Construct the manifest path using the helper method
|
Add `transformer` module for manifest transformation logic
- Introduced `transformer.rs` with a structured approach for parsing and applying transformation rules.
- Added support for operations like `add`, `default`, `delete`, `drop`, `edit`, `emit`, and `set` on attributes, files, directories, and other targets.
- Implemented regex-based matching for patterns and backreference handling in transformations.
- Enhanced manifest modification functionality, including attribute/facet operations and deferred action emission.
- Added comprehensive unit tests to validate transformation rules and their applications.
2025-08-28 23:50:59 +02:00
|
|
|
let pkg_manifest_path = if package_version.is_empty() {
|
|
|
|
|
// If no version was provided, store as a default manifest file
|
2025-12-22 20:10:17 +01:00
|
|
|
FileBackend::construct_package_dir(&self.repo, &publisher, &package_stem)
|
|
|
|
|
.join("manifest")
|
Add `transformer` module for manifest transformation logic
- Introduced `transformer.rs` with a structured approach for parsing and applying transformation rules.
- Added support for operations like `add`, `default`, `delete`, `drop`, `edit`, `emit`, and `set` on attributes, files, directories, and other targets.
- Implemented regex-based matching for patterns and backreference handling in transformations.
- Enhanced manifest modification functionality, including attribute/facet operations and deferred action emission.
- Added comprehensive unit tests to validate transformation rules and their applications.
2025-08-28 23:50:59 +02:00
|
|
|
} else {
|
|
|
|
|
FileBackend::construct_manifest_path(
|
|
|
|
|
&self.repo,
|
|
|
|
|
&publisher,
|
|
|
|
|
&package_stem,
|
|
|
|
|
&package_version,
|
|
|
|
|
)
|
|
|
|
|
};
|
2025-07-26 21:20:50 +02:00
|
|
|
debug!("Manifest path: {}", pkg_manifest_path.display());
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-27 12:19:38 +02:00
|
|
|
// Create parent directories if they don't exist
|
|
|
|
|
if let Some(parent) = pkg_manifest_path.parent() {
|
|
|
|
|
debug!("Creating parent directories: {}", parent.display());
|
2026-02-05 23:16:02 +01:00
|
|
|
fs::create_dir_all(parent).map_err(|e| RepositoryError::DirectoryCreateError {
|
|
|
|
|
path: parent.to_path_buf(),
|
|
|
|
|
source: e,
|
|
|
|
|
})?;
|
2025-07-27 12:19:38 +02:00
|
|
|
}
|
|
|
|
|
|
2026-03-15 19:32:24 +01:00
|
|
|
// Move manifests to pkg directory (atomic rename, same filesystem)
|
|
|
|
|
// 1. JSON manifest
|
2025-12-23 14:09:14 +01:00
|
|
|
let pkg_manifest_json_path = PathBuf::from(format!("{}.json", pkg_manifest_path.display()));
|
|
|
|
|
debug!(
|
2026-03-15 19:32:24 +01:00
|
|
|
"Moving JSON manifest from {} to {}",
|
2025-12-23 14:09:14 +01:00
|
|
|
manifest_json_path.display(),
|
|
|
|
|
pkg_manifest_json_path.display()
|
|
|
|
|
);
|
2026-03-15 19:32:24 +01:00
|
|
|
fs::rename(&manifest_json_path, &pkg_manifest_json_path).map_err(|e| {
|
|
|
|
|
RepositoryError::FileRenameError {
|
2026-02-05 23:16:02 +01:00
|
|
|
from: manifest_json_path,
|
|
|
|
|
to: pkg_manifest_json_path,
|
|
|
|
|
source: e,
|
|
|
|
|
}
|
|
|
|
|
})?;
|
2025-12-23 14:09:14 +01:00
|
|
|
|
2026-03-15 19:32:24 +01:00
|
|
|
// 2. Legacy manifest
|
2025-07-27 15:22:49 +02:00
|
|
|
debug!(
|
2026-03-15 19:32:24 +01:00
|
|
|
"Moving legacy manifest from {} to {}",
|
2025-12-23 14:09:14 +01:00
|
|
|
manifest_legacy_path.display(),
|
2025-07-27 15:22:49 +02:00
|
|
|
pkg_manifest_path.display()
|
|
|
|
|
);
|
2026-03-15 19:32:24 +01:00
|
|
|
fs::rename(&manifest_legacy_path, &pkg_manifest_path).map_err(|e| {
|
|
|
|
|
RepositoryError::FileRenameError {
|
2026-02-05 23:16:02 +01:00
|
|
|
from: manifest_legacy_path,
|
|
|
|
|
to: pkg_manifest_path,
|
|
|
|
|
source: e,
|
|
|
|
|
}
|
|
|
|
|
})?;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-08-02 13:17:49 +02:00
|
|
|
// Check if we need to create a pub.p5i file for the publisher
|
|
|
|
|
let config_path = self.repo.join(REPOSITORY_CONFIG_FILENAME);
|
|
|
|
|
if config_path.exists() {
|
|
|
|
|
let config_content = fs::read_to_string(&config_path)?;
|
|
|
|
|
let config: RepositoryConfig = serde_json::from_str(&config_content)?;
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-02 13:17:49 +02:00
|
|
|
// Check if this publisher was just added in this transaction
|
|
|
|
|
let publisher_dir = self.repo.join("publisher").join(&publisher);
|
|
|
|
|
let pub_p5i_path = publisher_dir.join("pub.p5i");
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-02 13:17:49 +02:00
|
|
|
if !pub_p5i_path.exists() {
|
|
|
|
|
debug!("Creating pub.p5i file for publisher: {}", publisher);
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-02 13:17:49 +02:00
|
|
|
// Create the pub.p5i file
|
|
|
|
|
let repo = FileBackend {
|
|
|
|
|
path: self.repo.clone(),
|
|
|
|
|
config,
|
|
|
|
|
catalog_manager: None,
|
|
|
|
|
obsoleted_manager: None,
|
|
|
|
|
};
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-02 13:17:49 +02:00
|
|
|
repo.create_pub_p5i_file(&publisher)?;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-07-26 21:20:50 +02:00
|
|
|
// Clean up the transaction directory
|
2026-02-05 23:16:02 +01:00
|
|
|
fs::remove_dir_all(&self.path).map_err(|e| RepositoryError::DirectoryRemoveError {
|
|
|
|
|
path: self.path.clone(),
|
|
|
|
|
source: e,
|
|
|
|
|
})?;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
Ok(())
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
/// Calculate SHA256 hash of a file
|
|
|
|
|
fn calculate_file_hash(file_path: &Path) -> Result<String> {
|
|
|
|
|
// Open the file
|
|
|
|
|
let mut file = File::open(file_path)?;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Create a SHA256 hasher
|
|
|
|
|
let mut hasher = Sha256::new();
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Read the file in chunks and update the hasher
|
|
|
|
|
let mut buffer = [0; 1024];
|
|
|
|
|
loop {
|
|
|
|
|
let bytes_read = file.read(&mut buffer)?;
|
|
|
|
|
if bytes_read == 0 {
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
hasher.update(&buffer[..bytes_read]);
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Get the hash result
|
|
|
|
|
let hash = hasher.finalize();
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Convert to hex string
|
|
|
|
|
let hash_str = format!("{:x}", hash);
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
Ok(hash_str)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-07-26 10:34:45 +02:00
|
|
|
impl ReadableRepository for FileBackend {
|
2025-07-21 22:02:05 +02:00
|
|
|
/// Open an existing repository
|
|
|
|
|
fn open<P: AsRef<Path>>(path: P) -> Result<Self> {
|
|
|
|
|
let path = path.as_ref();
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Check if the repository directory exists
|
|
|
|
|
if !path.exists() {
|
2025-07-26 15:33:39 +02:00
|
|
|
return Err(RepositoryError::NotFound(path.display().to_string()));
|
2025-07-21 22:02:05 +02:00
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Load the repository configuration
|
2025-12-09 12:12:57 +01:00
|
|
|
// Prefer pkg6.repository (JSON). If absent, try legacy pkg5.repository (INI)
|
|
|
|
|
let config6_path = path.join(REPOSITORY_CONFIG_FILENAME);
|
|
|
|
|
let config5_path = path.join("pkg5.repository");
|
|
|
|
|
|
|
|
|
|
let config: RepositoryConfig = if config6_path.exists() {
|
2025-12-22 20:10:17 +01:00
|
|
|
let config_data = fs::read_to_string(&config6_path).map_err(|e| {
|
|
|
|
|
RepositoryError::ConfigReadError(format!("{}: {}", config6_path.display(), e))
|
|
|
|
|
})?;
|
2025-12-09 12:12:57 +01:00
|
|
|
serde_json::from_str(&config_data)?
|
|
|
|
|
} else if config5_path.exists() {
|
|
|
|
|
// Minimal mapping for legacy INI: take publishers only from INI; do not scan disk.
|
2025-12-22 20:10:17 +01:00
|
|
|
let ini = Ini::load_from_file(&config5_path).map_err(|e| {
|
|
|
|
|
RepositoryError::ConfigReadError(format!("{}: {}", config5_path.display(), e))
|
|
|
|
|
})?;
|
2025-12-09 12:12:57 +01:00
|
|
|
|
|
|
|
|
// Default repository version for legacy format is v4
|
|
|
|
|
let mut cfg = RepositoryConfig::default();
|
|
|
|
|
|
|
|
|
|
// Try to read default publisher from [publisher] section (key: prefix)
|
|
|
|
|
if let Some(section) = ini.section(Some("publisher")) {
|
|
|
|
|
if let Some(prefix) = section.get("prefix") {
|
|
|
|
|
cfg.default_publisher = Some(prefix.to_string());
|
|
|
|
|
cfg.publishers.push(prefix.to_string());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// If INI enumerates publishers in an optional [publishers] section as comma-separated list
|
|
|
|
|
if let Some(section) = ini.section(Some("publishers")) {
|
|
|
|
|
if let Some(list) = section.get("list") {
|
|
|
|
|
// replace list strictly by INI contents per requirements
|
|
|
|
|
cfg.publishers.clear();
|
|
|
|
|
for p in list.split(',') {
|
|
|
|
|
let name = p.trim();
|
|
|
|
|
if !name.is_empty() {
|
|
|
|
|
cfg.publishers.push(name.to_string());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
cfg
|
|
|
|
|
} else {
|
|
|
|
|
return Err(RepositoryError::ConfigReadError(format!(
|
|
|
|
|
"No repository config found: expected {} or {}",
|
|
|
|
|
config6_path.display(),
|
|
|
|
|
config5_path.display()
|
|
|
|
|
)));
|
|
|
|
|
};
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
Ok(FileBackend {
|
|
|
|
|
path: path.to_path_buf(),
|
|
|
|
|
config,
|
2025-07-24 00:28:33 +02:00
|
|
|
catalog_manager: None,
|
Introduce obsoleted package management system in IPS
- Add `obsoleted.rs` module to handle storing, metadata management, and operations for obsoleted packages.
- Implement commands for marking, listing, searching, restoring, exporting, and importing obsoleted packages (`pkg6repo`).
- Enhance `RepositoryError` with `From` implementations for various error types to manage database and serialization-related errors.
- Introduce reusable data structures for obsoleted package metadata and export representation.
- Update `Cargo.toml` and `Cargo.lock` to include new dependencies (`redb`, `bincode`, etc.).
- Document obsoleted package workflow and integration details in `doc/obsoleted_packages.md` for contributors.
- Refactor repository internals to integrate obsoleted package support without disrupting existing workflow.
- Add robust error handling, logging, and pagination for enhanced usability and scalability.
2025-07-29 16:16:12 +02:00
|
|
|
obsoleted_manager: None,
|
2025-07-21 22:02:05 +02:00
|
|
|
})
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
/// Get repository information
|
2025-07-22 10:21:16 +02:00
|
|
|
fn get_info(&self) -> Result<RepositoryInfo> {
|
|
|
|
|
let mut publishers = Vec::new();
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-22 10:21:16 +02:00
|
|
|
for publisher_name in &self.config.publishers {
|
2025-07-31 00:18:21 +02:00
|
|
|
// Count packages by scanning the publisher's package directory
|
|
|
|
|
let publisher_pkg_dir = Self::construct_package_dir(&self.path, publisher_name, "");
|
2025-07-22 10:21:16 +02:00
|
|
|
let mut package_count = 0;
|
|
|
|
|
let mut latest_timestamp = SystemTime::UNIX_EPOCH;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-22 10:21:16 +02:00
|
|
|
// Check if the publisher directory exists
|
|
|
|
|
if publisher_pkg_dir.exists() {
|
|
|
|
|
// Walk through the directory and count package manifests
|
|
|
|
|
if let Ok(entries) = fs::read_dir(&publisher_pkg_dir) {
|
|
|
|
|
for entry in entries.flatten() {
|
|
|
|
|
let path = entry.path();
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-22 10:21:16 +02:00
|
|
|
// Skip directories, only count files (package manifests)
|
|
|
|
|
if path.is_file() {
|
|
|
|
|
package_count += 1;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-22 10:21:16 +02:00
|
|
|
// Update the latest timestamp if this file is newer
|
|
|
|
|
if let Ok(metadata) = fs::metadata(&path) {
|
|
|
|
|
if let Ok(modified) = metadata.modified() {
|
|
|
|
|
if modified > latest_timestamp {
|
|
|
|
|
latest_timestamp = modified;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-22 10:21:16 +02:00
|
|
|
// Status is always "online" for file-based repositories
|
2025-07-21 22:02:05 +02:00
|
|
|
let status = "online".to_string();
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-22 10:21:16 +02:00
|
|
|
// Format the timestamp in ISO 8601 format
|
|
|
|
|
let updated = if latest_timestamp == SystemTime::UNIX_EPOCH {
|
2025-07-26 23:41:09 +02:00
|
|
|
// If no files were found, use the current time
|
2025-07-22 10:21:16 +02:00
|
|
|
let now = SystemTime::now();
|
2025-12-22 22:42:56 +01:00
|
|
|
crate::repository::catalog::format_iso8601_basic(&now)
|
2025-07-22 10:21:16 +02:00
|
|
|
} else {
|
2025-12-22 22:42:56 +01:00
|
|
|
crate::repository::catalog::format_iso8601_basic(&latest_timestamp)
|
2025-07-22 10:21:16 +02:00
|
|
|
};
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-22 10:21:16 +02:00
|
|
|
// Create a PublisherInfo struct and add it to the list
|
|
|
|
|
publishers.push(PublisherInfo {
|
|
|
|
|
name: publisher_name.clone(),
|
|
|
|
|
package_count,
|
|
|
|
|
status,
|
|
|
|
|
updated,
|
|
|
|
|
});
|
2025-07-21 22:02:05 +02:00
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-22 10:21:16 +02:00
|
|
|
// Create and return a RepositoryInfo struct
|
2026-01-20 17:44:36 +01:00
|
|
|
Ok(RepositoryInfo {
|
|
|
|
|
publishers,
|
|
|
|
|
default_publisher: self.config.default_publisher.clone(),
|
|
|
|
|
})
|
2025-07-21 22:02:05 +02:00
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
/// List packages in the repository
|
2025-07-26 10:34:45 +02:00
|
|
|
fn list_packages(
|
|
|
|
|
&self,
|
|
|
|
|
publisher: Option<&str>,
|
|
|
|
|
pattern: Option<&str>,
|
|
|
|
|
) -> Result<Vec<PackageInfo>> {
|
2025-07-21 22:02:05 +02:00
|
|
|
let mut packages = Vec::new();
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Filter publishers if specified
|
|
|
|
|
let publishers = if let Some(pub_name) = publisher {
|
|
|
|
|
if !self.config.publishers.contains(&pub_name.to_string()) {
|
2025-07-26 15:33:39 +02:00
|
|
|
return Err(RepositoryError::PublisherNotFound(pub_name.to_string()));
|
2025-07-21 22:02:05 +02:00
|
|
|
}
|
|
|
|
|
vec![pub_name.to_string()]
|
|
|
|
|
} else {
|
|
|
|
|
self.config.publishers.clone()
|
|
|
|
|
};
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// For each publisher, list packages
|
|
|
|
|
for pub_name in publishers {
|
2025-07-22 11:57:24 +02:00
|
|
|
// Get the publisher's package directory
|
2025-07-31 00:18:21 +02:00
|
|
|
let publisher_pkg_dir = Self::construct_package_dir(&self.path, &pub_name, "");
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-22 11:57:24 +02:00
|
|
|
// Check if the publisher directory exists
|
|
|
|
|
if publisher_pkg_dir.exists() {
|
|
|
|
|
// Verify that the publisher is in the config
|
|
|
|
|
if !self.config.publishers.contains(&pub_name) {
|
2025-07-27 15:22:49 +02:00
|
|
|
return Err(RepositoryError::Other(format!(
|
|
|
|
|
"Publisher directory exists but is not in the repository configuration: {}",
|
|
|
|
|
pub_name
|
|
|
|
|
)));
|
2025-07-22 11:57:24 +02:00
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-27 15:20:45 +02:00
|
|
|
// Recursively walk through the directory and collect package manifests
|
2025-07-27 15:22:49 +02:00
|
|
|
self.find_manifests_recursive(
|
|
|
|
|
&publisher_pkg_dir,
|
|
|
|
|
&pub_name,
|
2026-02-06 00:04:23 +01:00
|
|
|
pattern.map(glob_to_regex).as_deref(),
|
2025-07-27 15:22:49 +02:00
|
|
|
&mut packages,
|
|
|
|
|
)?;
|
2025-07-22 11:57:24 +02:00
|
|
|
}
|
2025-07-21 22:02:05 +02:00
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
Ok(packages)
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
/// Show the contents of packages
|
2025-07-26 10:34:45 +02:00
|
|
|
fn show_contents(
|
|
|
|
|
&self,
|
|
|
|
|
publisher: Option<&str>,
|
|
|
|
|
pattern: Option<&str>,
|
|
|
|
|
action_types: Option<&[String]>,
|
|
|
|
|
) -> Result<Vec<PackageContents>> {
|
2025-12-22 20:10:17 +01:00
|
|
|
debug!(
|
|
|
|
|
"show_contents called with publisher: {:?}, pattern: {:?}",
|
|
|
|
|
publisher, pattern
|
|
|
|
|
);
|
2025-07-23 22:39:49 +02:00
|
|
|
// Use a HashMap to store package information
|
2025-07-26 23:41:09 +02:00
|
|
|
let mut packages = HashMap::new();
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-22 19:39:46 +02:00
|
|
|
// Filter publishers if specified
|
|
|
|
|
let publishers = if let Some(pub_name) = publisher {
|
|
|
|
|
if !self.config.publishers.contains(&pub_name.to_string()) {
|
2025-07-26 15:33:39 +02:00
|
|
|
return Err(RepositoryError::PublisherNotFound(pub_name.to_string()));
|
2025-07-22 19:39:46 +02:00
|
|
|
}
|
|
|
|
|
vec![pub_name.to_string()]
|
|
|
|
|
} else {
|
|
|
|
|
self.config.publishers.clone()
|
|
|
|
|
};
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-22 19:39:46 +02:00
|
|
|
// For each publisher, process packages
|
|
|
|
|
for pub_name in publishers {
|
|
|
|
|
// Get the publisher's package directory
|
2025-07-31 00:18:21 +02:00
|
|
|
let publisher_pkg_dir = Self::construct_package_dir(&self.path, &pub_name, "");
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-22 19:39:46 +02:00
|
|
|
// Check if the publisher directory exists
|
|
|
|
|
if publisher_pkg_dir.exists() {
|
|
|
|
|
// Walk through the directory and collect package manifests
|
|
|
|
|
if let Ok(entries) = fs::read_dir(&publisher_pkg_dir) {
|
|
|
|
|
for entry in entries.flatten() {
|
|
|
|
|
let path = entry.path();
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-27 16:12:59 +02:00
|
|
|
if path.is_dir() {
|
|
|
|
|
// Recursively search subdirectories
|
|
|
|
|
if let Ok(subentries) = fs::read_dir(&path) {
|
|
|
|
|
for subentry in subentries.flatten() {
|
|
|
|
|
let subpath = subentry.path();
|
|
|
|
|
if subpath.is_file() {
|
|
|
|
|
// Try to read the first few bytes of the file to check if it's a manifest file
|
|
|
|
|
let mut file = match fs::File::open(&subpath) {
|
|
|
|
|
Ok(file) => file,
|
|
|
|
|
Err(err) => {
|
|
|
|
|
error!(
|
|
|
|
|
"FileBackend::show_contents: Error opening file {}: {}",
|
|
|
|
|
subpath.display(),
|
|
|
|
|
err
|
|
|
|
|
);
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let mut buffer = [0; 1024];
|
|
|
|
|
let bytes_read = match file.read(&mut buffer) {
|
|
|
|
|
Ok(bytes) => bytes,
|
|
|
|
|
Err(err) => {
|
|
|
|
|
error!(
|
|
|
|
|
"FileBackend::show_contents: Error reading file {}: {}",
|
|
|
|
|
subpath.display(),
|
|
|
|
|
err
|
|
|
|
|
);
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Check if the file starts with a valid manifest marker
|
|
|
|
|
if bytes_read == 0
|
2025-12-22 20:10:17 +01:00
|
|
|
|| (buffer[0] != b'{'
|
|
|
|
|
&& buffer[0] != b'<'
|
|
|
|
|
&& buffer[0] != b's')
|
2025-07-27 16:12:59 +02:00
|
|
|
{
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Parse the manifest file to get package information
|
|
|
|
|
match Manifest::parse_file(&subpath) {
|
|
|
|
|
Ok(manifest) => {
|
|
|
|
|
// Look for the pkg.fmri attribute to identify the package
|
|
|
|
|
let mut pkg_id = String::new();
|
|
|
|
|
|
|
|
|
|
for attr in &manifest.attributes {
|
2025-12-22 20:10:17 +01:00
|
|
|
if attr.key == "pkg.fmri"
|
|
|
|
|
&& !attr.values.is_empty()
|
|
|
|
|
{
|
2025-07-27 16:12:59 +02:00
|
|
|
let fmri = &attr.values[0];
|
|
|
|
|
|
|
|
|
|
// Parse the FMRI using our Fmri type
|
|
|
|
|
match Fmri::parse(fmri) {
|
|
|
|
|
Ok(parsed_fmri) => {
|
|
|
|
|
// Filter by pattern if specified
|
|
|
|
|
if let Some(pat) = pattern {
|
|
|
|
|
// Try to compile the pattern as a regex
|
|
|
|
|
match Regex::new(pat) {
|
|
|
|
|
Ok(regex) => {
|
|
|
|
|
// Use regex matching
|
2025-12-22 20:10:17 +01:00
|
|
|
if !regex.is_match(
|
|
|
|
|
parsed_fmri.stem(),
|
|
|
|
|
) {
|
2025-07-27 16:12:59 +02:00
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
Err(err) => {
|
|
|
|
|
// Log the error but fall back to the simple string contains
|
2025-12-22 20:10:17 +01:00
|
|
|
error!(
|
|
|
|
|
"FileBackend::show_contents: Error compiling regex pattern '{}': {}",
|
|
|
|
|
pat, err
|
|
|
|
|
);
|
|
|
|
|
if !parsed_fmri
|
|
|
|
|
.stem()
|
|
|
|
|
.contains(pat)
|
|
|
|
|
{
|
2025-07-27 16:12:59 +02:00
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Format the package identifier using the FMRI
|
|
|
|
|
let version = parsed_fmri.version();
|
|
|
|
|
pkg_id = if !version.is_empty() {
|
|
|
|
|
format!(
|
|
|
|
|
"{}@{}",
|
|
|
|
|
parsed_fmri.stem(),
|
|
|
|
|
version
|
|
|
|
|
)
|
|
|
|
|
} else {
|
|
|
|
|
parsed_fmri.stem().to_string()
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
Err(err) => {
|
|
|
|
|
// Log the error but continue processing
|
|
|
|
|
error!(
|
|
|
|
|
"FileBackend::show_contents: Error parsing FMRI '{}': {}",
|
|
|
|
|
fmri, err
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Skip if we couldn't determine the package ID
|
|
|
|
|
if pkg_id.is_empty() {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Get or create the content vectors for this package
|
|
|
|
|
let content_vectors = packages
|
|
|
|
|
.entry(pkg_id.clone())
|
|
|
|
|
.or_insert_with(PackageContentVectors::new);
|
|
|
|
|
|
|
|
|
|
// Process file actions
|
|
|
|
|
if action_types.is_none()
|
|
|
|
|
|| action_types
|
|
|
|
|
.as_ref()
|
|
|
|
|
.unwrap()
|
|
|
|
|
.contains(&"file".to_string())
|
|
|
|
|
{
|
|
|
|
|
for file in &manifest.files {
|
2025-12-22 20:10:17 +01:00
|
|
|
content_vectors
|
|
|
|
|
.files
|
|
|
|
|
.push(file.path.clone());
|
2025-07-27 16:12:59 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Process directory actions
|
|
|
|
|
if action_types.is_none()
|
|
|
|
|
|| action_types
|
|
|
|
|
.as_ref()
|
|
|
|
|
.unwrap()
|
|
|
|
|
.contains(&"dir".to_string())
|
|
|
|
|
{
|
|
|
|
|
for dir in &manifest.directories {
|
2025-12-22 20:10:17 +01:00
|
|
|
content_vectors
|
|
|
|
|
.directories
|
|
|
|
|
.push(dir.path.clone());
|
2025-07-27 16:12:59 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Process link actions
|
|
|
|
|
if action_types.is_none()
|
|
|
|
|
|| action_types
|
|
|
|
|
.as_ref()
|
|
|
|
|
.unwrap()
|
|
|
|
|
.contains(&"link".to_string())
|
|
|
|
|
{
|
|
|
|
|
for link in &manifest.links {
|
2025-12-22 20:10:17 +01:00
|
|
|
content_vectors
|
|
|
|
|
.links
|
|
|
|
|
.push(link.path.clone());
|
2025-07-27 16:12:59 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Process dependency actions
|
|
|
|
|
if action_types.is_none()
|
|
|
|
|
|| action_types
|
|
|
|
|
.as_ref()
|
|
|
|
|
.unwrap()
|
|
|
|
|
.contains(&"depend".to_string())
|
|
|
|
|
{
|
|
|
|
|
for depend in &manifest.dependencies {
|
|
|
|
|
if let Some(fmri) = &depend.fmri {
|
2025-12-22 20:10:17 +01:00
|
|
|
content_vectors
|
|
|
|
|
.dependencies
|
|
|
|
|
.push(fmri.to_string());
|
2025-07-27 16:12:59 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Process license actions
|
|
|
|
|
if action_types.is_none()
|
|
|
|
|
|| action_types
|
|
|
|
|
.as_ref()
|
|
|
|
|
.unwrap()
|
|
|
|
|
.contains(&"license".to_string())
|
|
|
|
|
{
|
|
|
|
|
for license in &manifest.licenses {
|
2025-12-22 20:10:17 +01:00
|
|
|
if let Some(path_prop) =
|
|
|
|
|
license.properties.get("path")
|
|
|
|
|
{
|
|
|
|
|
content_vectors
|
|
|
|
|
.licenses
|
|
|
|
|
.push(path_prop.value.clone());
|
|
|
|
|
} else if let Some(license_prop) =
|
|
|
|
|
license.properties.get("license")
|
|
|
|
|
{
|
|
|
|
|
content_vectors
|
|
|
|
|
.licenses
|
|
|
|
|
.push(license_prop.value.clone());
|
2025-07-27 16:12:59 +02:00
|
|
|
} else {
|
2025-12-22 20:10:17 +01:00
|
|
|
content_vectors
|
|
|
|
|
.licenses
|
|
|
|
|
.push(license.payload.clone());
|
2025-07-27 16:12:59 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
Err(err) => {
|
|
|
|
|
// Log the error but continue processing other files
|
|
|
|
|
error!(
|
|
|
|
|
"FileBackend::show_contents: Error parsing manifest file {}: {}",
|
|
|
|
|
subpath.display(),
|
|
|
|
|
err
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} else if path.is_file() {
|
|
|
|
|
// Try to read the first few bytes of the file to check if it's a manifest file
|
|
|
|
|
let mut file = match fs::File::open(&path) {
|
|
|
|
|
Ok(file) => file,
|
|
|
|
|
Err(err) => {
|
|
|
|
|
error!(
|
|
|
|
|
"FileBackend::show_contents: Error opening file {}: {}",
|
|
|
|
|
path.display(),
|
|
|
|
|
err
|
|
|
|
|
);
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let mut buffer = [0; 1024];
|
|
|
|
|
let bytes_read = match file.read(&mut buffer) {
|
|
|
|
|
Ok(bytes) => bytes,
|
|
|
|
|
Err(err) => {
|
|
|
|
|
error!(
|
|
|
|
|
"FileBackend::show_contents: Error reading file {}: {}",
|
|
|
|
|
path.display(),
|
|
|
|
|
err
|
|
|
|
|
);
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Check if the file starts with a valid manifest marker
|
|
|
|
|
// For example, if it's a JSON file, it should start with '{'
|
|
|
|
|
if bytes_read == 0
|
|
|
|
|
|| (buffer[0] != b'{' && buffer[0] != b'<' && buffer[0] != b's')
|
|
|
|
|
{
|
|
|
|
|
continue;
|
|
|
|
|
}
|
2025-07-22 19:39:46 +02:00
|
|
|
// Parse the manifest file to get package information
|
|
|
|
|
match Manifest::parse_file(&path) {
|
|
|
|
|
Ok(manifest) => {
|
|
|
|
|
// Look for the pkg.fmri attribute to identify the package
|
|
|
|
|
let mut pkg_id = String::new();
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-22 19:39:46 +02:00
|
|
|
for attr in &manifest.attributes {
|
|
|
|
|
if attr.key == "pkg.fmri" && !attr.values.is_empty() {
|
|
|
|
|
let fmri = &attr.values[0];
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-22 19:39:46 +02:00
|
|
|
// Parse the FMRI using our Fmri type
|
|
|
|
|
match Fmri::parse(fmri) {
|
|
|
|
|
Ok(parsed_fmri) => {
|
|
|
|
|
// Filter by pattern if specified
|
|
|
|
|
if let Some(pat) = pattern {
|
|
|
|
|
// Try to compile the pattern as a regex
|
|
|
|
|
match Regex::new(pat) {
|
|
|
|
|
Ok(regex) => {
|
|
|
|
|
// Use regex matching
|
2025-07-26 10:34:45 +02:00
|
|
|
if !regex
|
|
|
|
|
.is_match(parsed_fmri.stem())
|
|
|
|
|
{
|
2025-07-22 19:39:46 +02:00
|
|
|
continue;
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
}
|
2025-07-22 19:39:46 +02:00
|
|
|
Err(err) => {
|
2025-07-26 23:41:09 +02:00
|
|
|
// Log the error but fall back to the simple string contains
|
2025-12-22 20:10:17 +01:00
|
|
|
error!(
|
|
|
|
|
"FileBackend::show_contents: Error compiling regex pattern '{}': {}",
|
|
|
|
|
pat, err
|
|
|
|
|
);
|
2025-07-26 10:34:45 +02:00
|
|
|
if !parsed_fmri.stem().contains(pat)
|
|
|
|
|
{
|
2025-07-22 19:39:46 +02:00
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-22 19:39:46 +02:00
|
|
|
// Format the package identifier using the FMRI
|
2025-07-24 00:28:33 +02:00
|
|
|
let version = parsed_fmri.version();
|
|
|
|
|
pkg_id = if !version.is_empty() {
|
2025-07-26 10:34:45 +02:00
|
|
|
format!(
|
|
|
|
|
"{}@{}",
|
|
|
|
|
parsed_fmri.stem(),
|
|
|
|
|
version
|
|
|
|
|
)
|
2025-07-22 19:39:46 +02:00
|
|
|
} else {
|
2025-07-24 00:28:33 +02:00
|
|
|
parsed_fmri.stem().to_string()
|
2025-07-22 19:39:46 +02:00
|
|
|
};
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-22 19:39:46 +02:00
|
|
|
break;
|
2025-07-26 10:34:45 +02:00
|
|
|
}
|
2025-07-22 19:39:46 +02:00
|
|
|
Err(err) => {
|
|
|
|
|
// Log the error but continue processing
|
2025-07-26 23:41:09 +02:00
|
|
|
error!(
|
|
|
|
|
"FileBackend::show_contents: Error parsing FMRI '{}': {}",
|
2025-07-26 10:34:45 +02:00
|
|
|
fmri, err
|
|
|
|
|
);
|
2025-07-22 19:39:46 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-22 19:39:46 +02:00
|
|
|
// Skip if we couldn't determine the package ID
|
|
|
|
|
if pkg_id.is_empty() {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-23 22:39:49 +02:00
|
|
|
// Get or create the content vectors for this package
|
|
|
|
|
let content_vectors = packages
|
|
|
|
|
.entry(pkg_id.clone())
|
|
|
|
|
.or_insert_with(PackageContentVectors::new);
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-22 19:39:46 +02:00
|
|
|
// Process file actions
|
2025-07-26 10:34:45 +02:00
|
|
|
if action_types.is_none()
|
|
|
|
|
|| action_types
|
|
|
|
|
.as_ref()
|
|
|
|
|
.unwrap()
|
|
|
|
|
.contains(&"file".to_string())
|
|
|
|
|
{
|
2025-07-23 22:39:49 +02:00
|
|
|
for file in &manifest.files {
|
|
|
|
|
content_vectors.files.push(file.path.clone());
|
2025-07-22 19:39:46 +02:00
|
|
|
}
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-22 19:39:46 +02:00
|
|
|
// Process directory actions
|
2025-07-26 10:34:45 +02:00
|
|
|
if action_types.is_none()
|
|
|
|
|
|| action_types
|
|
|
|
|
.as_ref()
|
|
|
|
|
.unwrap()
|
|
|
|
|
.contains(&"dir".to_string())
|
|
|
|
|
{
|
2025-07-23 22:39:49 +02:00
|
|
|
for dir in &manifest.directories {
|
|
|
|
|
content_vectors.directories.push(dir.path.clone());
|
2025-07-22 19:39:46 +02:00
|
|
|
}
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-22 19:39:46 +02:00
|
|
|
// Process link actions
|
2025-07-26 10:34:45 +02:00
|
|
|
if action_types.is_none()
|
|
|
|
|
|| action_types
|
|
|
|
|
.as_ref()
|
|
|
|
|
.unwrap()
|
|
|
|
|
.contains(&"link".to_string())
|
|
|
|
|
{
|
2025-07-23 22:39:49 +02:00
|
|
|
for link in &manifest.links {
|
|
|
|
|
content_vectors.links.push(link.path.clone());
|
2025-07-22 19:39:46 +02:00
|
|
|
}
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-22 19:39:46 +02:00
|
|
|
// Process dependency actions
|
2025-07-26 10:34:45 +02:00
|
|
|
if action_types.is_none()
|
|
|
|
|
|| action_types
|
|
|
|
|
.as_ref()
|
|
|
|
|
.unwrap()
|
|
|
|
|
.contains(&"depend".to_string())
|
|
|
|
|
{
|
2025-07-23 22:39:49 +02:00
|
|
|
for depend in &manifest.dependencies {
|
|
|
|
|
if let Some(fmri) = &depend.fmri {
|
|
|
|
|
content_vectors.dependencies.push(fmri.to_string());
|
2025-07-22 19:39:46 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-22 19:39:46 +02:00
|
|
|
// Process license actions
|
2025-07-26 10:34:45 +02:00
|
|
|
if action_types.is_none()
|
|
|
|
|
|| action_types
|
|
|
|
|
.as_ref()
|
|
|
|
|
.unwrap()
|
|
|
|
|
.contains(&"license".to_string())
|
|
|
|
|
{
|
2025-07-23 22:39:49 +02:00
|
|
|
for license in &manifest.licenses {
|
2025-07-26 10:34:45 +02:00
|
|
|
if let Some(path_prop) = license.properties.get("path")
|
|
|
|
|
{
|
|
|
|
|
content_vectors
|
|
|
|
|
.licenses
|
|
|
|
|
.push(path_prop.value.clone());
|
|
|
|
|
} else if let Some(license_prop) =
|
|
|
|
|
license.properties.get("license")
|
|
|
|
|
{
|
|
|
|
|
content_vectors
|
|
|
|
|
.licenses
|
|
|
|
|
.push(license_prop.value.clone());
|
2025-07-23 22:39:49 +02:00
|
|
|
} else {
|
2025-07-26 10:34:45 +02:00
|
|
|
content_vectors
|
|
|
|
|
.licenses
|
|
|
|
|
.push(license.payload.clone());
|
2025-07-22 19:39:46 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
}
|
2025-07-22 19:39:46 +02:00
|
|
|
Err(err) => {
|
|
|
|
|
// Log the error but continue processing other files
|
2025-07-26 23:41:09 +02:00
|
|
|
error!(
|
|
|
|
|
"FileBackend::show_contents: Error parsing manifest file {}: {}",
|
2025-07-26 10:34:45 +02:00
|
|
|
path.display(),
|
|
|
|
|
err
|
|
|
|
|
);
|
2025-07-22 19:39:46 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-07-21 22:02:05 +02:00
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-23 22:39:49 +02:00
|
|
|
// Convert the HashMap to a Vec<PackageContents>
|
|
|
|
|
let package_contents = packages
|
|
|
|
|
.into_iter()
|
|
|
|
|
.map(|(package_id, content_vectors)| {
|
|
|
|
|
// Only include non-empty vectors
|
|
|
|
|
let files = if content_vectors.files.is_empty() {
|
|
|
|
|
None
|
|
|
|
|
} else {
|
|
|
|
|
Some(content_vectors.files)
|
|
|
|
|
};
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-23 22:39:49 +02:00
|
|
|
let directories = if content_vectors.directories.is_empty() {
|
|
|
|
|
None
|
|
|
|
|
} else {
|
|
|
|
|
Some(content_vectors.directories)
|
|
|
|
|
};
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-23 22:39:49 +02:00
|
|
|
let links = if content_vectors.links.is_empty() {
|
|
|
|
|
None
|
|
|
|
|
} else {
|
|
|
|
|
Some(content_vectors.links)
|
|
|
|
|
};
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-23 22:39:49 +02:00
|
|
|
let dependencies = if content_vectors.dependencies.is_empty() {
|
|
|
|
|
None
|
|
|
|
|
} else {
|
|
|
|
|
Some(content_vectors.dependencies)
|
|
|
|
|
};
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-23 22:39:49 +02:00
|
|
|
let licenses = if content_vectors.licenses.is_empty() {
|
|
|
|
|
None
|
|
|
|
|
} else {
|
|
|
|
|
Some(content_vectors.licenses)
|
|
|
|
|
};
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-23 22:39:49 +02:00
|
|
|
PackageContents {
|
|
|
|
|
package_id,
|
|
|
|
|
files,
|
|
|
|
|
directories,
|
|
|
|
|
links,
|
|
|
|
|
dependencies,
|
|
|
|
|
licenses,
|
|
|
|
|
}
|
|
|
|
|
})
|
|
|
|
|
.collect();
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-23 22:39:49 +02:00
|
|
|
Ok(package_contents)
|
2025-07-21 22:02:05 +02:00
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2026-02-05 15:57:56 +01:00
|
|
|
fn fetch_payload(&self, publisher: &str, digest: &str, dest: &Path) -> Result<()> {
|
2025-08-13 23:23:45 +02:00
|
|
|
// Parse digest; supports both raw hash and source:algorithm:hash
|
|
|
|
|
let parsed = match Digest::from_str(digest) {
|
|
|
|
|
Ok(d) => d,
|
|
|
|
|
Err(e) => return Err(RepositoryError::DigestError(e.to_string())),
|
|
|
|
|
};
|
|
|
|
|
let hash = parsed.hash.clone();
|
|
|
|
|
let algo = parsed.algorithm.clone();
|
|
|
|
|
|
|
|
|
|
if hash.is_empty() {
|
|
|
|
|
return Err(RepositoryError::Other("Empty digest provided".to_string()));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Prepare candidate paths (prefer publisher-specific, then global)
|
|
|
|
|
let cand_pub = Self::construct_file_path_with_publisher(&self.path, publisher, &hash);
|
|
|
|
|
let cand_global = Self::construct_file_path(&self.path, &hash);
|
|
|
|
|
|
|
|
|
|
let source_path = if cand_pub.exists() {
|
|
|
|
|
cand_pub
|
|
|
|
|
} else if cand_global.exists() {
|
|
|
|
|
cand_global
|
|
|
|
|
} else {
|
|
|
|
|
return Err(RepositoryError::NotFound(format!(
|
|
|
|
|
"payload {} not found in repository",
|
|
|
|
|
hash
|
|
|
|
|
)));
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Ensure destination directory exists
|
|
|
|
|
if let Some(parent) = dest.parent() {
|
2026-02-05 23:16:02 +01:00
|
|
|
fs::create_dir_all(parent).map_err(|e| RepositoryError::DirectoryCreateError {
|
|
|
|
|
path: parent.to_path_buf(),
|
|
|
|
|
source: e,
|
|
|
|
|
})?;
|
2025-08-13 23:23:45 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// If destination already exists and matches digest, do nothing
|
|
|
|
|
if dest.exists() {
|
2025-12-22 20:10:17 +01:00
|
|
|
let bytes = fs::read(dest).map_err(|e| RepositoryError::FileReadError {
|
|
|
|
|
path: dest.to_path_buf(),
|
|
|
|
|
source: e,
|
|
|
|
|
})?;
|
|
|
|
|
match crate::digest::Digest::from_bytes(
|
|
|
|
|
&bytes,
|
|
|
|
|
algo.clone(),
|
|
|
|
|
crate::digest::DigestSource::PrimaryPayloadHash,
|
|
|
|
|
) {
|
2025-08-13 23:23:45 +02:00
|
|
|
Ok(comp) if comp.hash == hash => return Ok(()),
|
|
|
|
|
_ => { /* fall through to overwrite */ }
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-15 20:58:44 +01:00
|
|
|
// Read source content (stored compressed — digest of compressed bytes may not
|
|
|
|
|
// match the primary/uncompressed hash, so we skip verification for local files)
|
2025-12-22 20:10:17 +01:00
|
|
|
let bytes = fs::read(&source_path).map_err(|e| RepositoryError::FileReadError {
|
|
|
|
|
path: source_path.clone(),
|
|
|
|
|
source: e,
|
|
|
|
|
})?;
|
2025-08-13 23:23:45 +02:00
|
|
|
|
|
|
|
|
// Write atomically
|
|
|
|
|
let tmp = dest.with_extension("tmp");
|
|
|
|
|
{
|
2026-02-05 23:16:02 +01:00
|
|
|
let mut f = File::create(&tmp).map_err(|e| RepositoryError::FileCreateError {
|
|
|
|
|
path: tmp.clone(),
|
|
|
|
|
source: e,
|
|
|
|
|
})?;
|
|
|
|
|
f.write_all(&bytes).map_err(|e| RepositoryError::FileWriteError {
|
|
|
|
|
path: tmp.clone(),
|
|
|
|
|
source: e,
|
|
|
|
|
})?;
|
2025-08-13 23:23:45 +02:00
|
|
|
}
|
2026-02-05 23:16:02 +01:00
|
|
|
fs::rename(&tmp, dest).map_err(|e| RepositoryError::FileRenameError {
|
|
|
|
|
from: tmp,
|
|
|
|
|
to: dest.to_path_buf(),
|
|
|
|
|
source: e,
|
|
|
|
|
})?;
|
2025-08-13 23:23:45 +02:00
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn fetch_manifest(
|
2026-02-05 15:57:56 +01:00
|
|
|
&self,
|
2025-08-13 23:23:45 +02:00
|
|
|
publisher: &str,
|
|
|
|
|
fmri: &crate::fmri::Fmri,
|
|
|
|
|
) -> Result<crate::actions::Manifest> {
|
|
|
|
|
// Require a concrete version
|
|
|
|
|
let version = fmri.version();
|
|
|
|
|
if version.is_empty() {
|
2025-12-22 20:10:17 +01:00
|
|
|
return Err(RepositoryError::Other(
|
|
|
|
|
"FMRI must include a version to fetch manifest".into(),
|
|
|
|
|
));
|
2025-08-13 23:23:45 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Preferred path: publisher-scoped manifest path
|
|
|
|
|
let path = Self::construct_manifest_path(&self.path, publisher, fmri.stem(), &version);
|
|
|
|
|
if path.exists() {
|
|
|
|
|
return crate::actions::Manifest::parse_file(&path).map_err(RepositoryError::from);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Fallbacks: global pkg layout without publisher
|
|
|
|
|
let encoded_stem = Self::url_encode(fmri.stem());
|
|
|
|
|
let encoded_version = Self::url_encode(&version);
|
2025-12-22 20:10:17 +01:00
|
|
|
let alt1 = self
|
|
|
|
|
.path
|
|
|
|
|
.join("pkg")
|
|
|
|
|
.join(&encoded_stem)
|
|
|
|
|
.join(&encoded_version);
|
2025-08-13 23:23:45 +02:00
|
|
|
if alt1.exists() {
|
|
|
|
|
return crate::actions::Manifest::parse_file(&alt1).map_err(RepositoryError::from);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let alt2 = self
|
|
|
|
|
.path
|
|
|
|
|
.join("publisher")
|
|
|
|
|
.join(publisher)
|
|
|
|
|
.join("pkg")
|
|
|
|
|
.join(&encoded_stem)
|
|
|
|
|
.join(&encoded_version);
|
|
|
|
|
if alt2.exists() {
|
|
|
|
|
return crate::actions::Manifest::parse_file(&alt2).map_err(RepositoryError::from);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Err(RepositoryError::NotFound(format!(
|
|
|
|
|
"manifest for {} not found",
|
|
|
|
|
fmri
|
|
|
|
|
)))
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-05 15:57:56 +01:00
|
|
|
fn fetch_manifest_text(&self, publisher: &str, fmri: &Fmri) -> Result<String> {
|
2026-01-20 20:16:58 +01:00
|
|
|
// Require a concrete version
|
|
|
|
|
let version = fmri.version();
|
|
|
|
|
if version.is_empty() {
|
|
|
|
|
return Err(RepositoryError::Other(
|
|
|
|
|
"FMRI must include a version to fetch manifest".into(),
|
|
|
|
|
));
|
|
|
|
|
}
|
|
|
|
|
// Preferred path: publisher-scoped manifest path
|
|
|
|
|
let path = Self::construct_manifest_path(&self.path, publisher, fmri.stem(), &version);
|
|
|
|
|
if path.exists() {
|
|
|
|
|
return std::fs::read_to_string(&path)
|
2026-01-25 23:17:49 +01:00
|
|
|
.map_err(|e| RepositoryError::FileReadError { path, source: e });
|
2026-01-20 20:16:58 +01:00
|
|
|
}
|
|
|
|
|
// Fallbacks: global pkg layout without publisher
|
|
|
|
|
let encoded_stem = Self::url_encode(fmri.stem());
|
|
|
|
|
let encoded_version = Self::url_encode(&version);
|
|
|
|
|
let alt1 = self
|
|
|
|
|
.path
|
|
|
|
|
.join("pkg")
|
|
|
|
|
.join(&encoded_stem)
|
|
|
|
|
.join(&encoded_version);
|
|
|
|
|
if alt1.exists() {
|
|
|
|
|
return std::fs::read_to_string(&alt1).map_err(|e| RepositoryError::FileReadError {
|
|
|
|
|
path: alt1,
|
|
|
|
|
source: e,
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
let alt2 = self
|
|
|
|
|
.path
|
|
|
|
|
.join("publisher")
|
|
|
|
|
.join(publisher)
|
|
|
|
|
.join("pkg")
|
|
|
|
|
.join(&encoded_stem)
|
|
|
|
|
.join(&encoded_version);
|
|
|
|
|
if alt2.exists() {
|
|
|
|
|
return std::fs::read_to_string(&alt2).map_err(|e| RepositoryError::FileReadError {
|
|
|
|
|
path: alt2,
|
|
|
|
|
source: e,
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
Err(RepositoryError::NotFound(format!(
|
|
|
|
|
"manifest for {} not found",
|
|
|
|
|
fmri
|
|
|
|
|
)))
|
|
|
|
|
}
|
|
|
|
|
|
2025-07-26 10:34:45 +02:00
|
|
|
/// Search for packages in the repository
|
|
|
|
|
fn search(
|
|
|
|
|
&self,
|
|
|
|
|
query: &str,
|
|
|
|
|
publisher: Option<&str>,
|
|
|
|
|
limit: Option<usize>,
|
|
|
|
|
) -> Result<Vec<PackageInfo>> {
|
2025-07-26 23:41:09 +02:00
|
|
|
debug!("Searching for packages with query: {}", query);
|
2025-07-26 15:33:39 +02:00
|
|
|
|
2026-03-14 22:01:48 +01:00
|
|
|
let entries = self.search_detailed(query, publisher, limit, false)?;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2026-03-14 22:01:48 +01:00
|
|
|
// Deduplicate by FMRI and convert to PackageInfo
|
|
|
|
|
let mut added_fmris = HashSet::new();
|
2025-07-26 10:34:45 +02:00
|
|
|
let mut results = Vec::new();
|
2026-03-14 22:01:48 +01:00
|
|
|
for entry in entries {
|
|
|
|
|
if added_fmris.contains(&entry.fmri) {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
if let Ok(fmri) = Fmri::parse(&entry.fmri) {
|
|
|
|
|
results.push(PackageInfo { fmri });
|
|
|
|
|
added_fmris.insert(entry.fmri);
|
2025-07-26 10:34:45 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Ok(results)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl WritableRepository for FileBackend {
|
|
|
|
|
/// Create a new repository at the specified path
|
|
|
|
|
fn create<P: AsRef<Path>>(path: P, version: RepositoryVersion) -> Result<Self> {
|
|
|
|
|
let path = path.as_ref();
|
|
|
|
|
|
|
|
|
|
// Create the repository directory if it doesn't exist
|
|
|
|
|
fs::create_dir_all(path)?;
|
|
|
|
|
|
|
|
|
|
// Create the repository configuration
|
|
|
|
|
let config = RepositoryConfig {
|
|
|
|
|
version,
|
|
|
|
|
..Default::default()
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Create the repository structure
|
|
|
|
|
let repo = FileBackend {
|
|
|
|
|
path: path.to_path_buf(),
|
|
|
|
|
config,
|
|
|
|
|
catalog_manager: None,
|
Introduce obsoleted package management system in IPS
- Add `obsoleted.rs` module to handle storing, metadata management, and operations for obsoleted packages.
- Implement commands for marking, listing, searching, restoring, exporting, and importing obsoleted packages (`pkg6repo`).
- Enhance `RepositoryError` with `From` implementations for various error types to manage database and serialization-related errors.
- Introduce reusable data structures for obsoleted package metadata and export representation.
- Update `Cargo.toml` and `Cargo.lock` to include new dependencies (`redb`, `bincode`, etc.).
- Document obsoleted package workflow and integration details in `doc/obsoleted_packages.md` for contributors.
- Refactor repository internals to integrate obsoleted package support without disrupting existing workflow.
- Add robust error handling, logging, and pagination for enhanced usability and scalability.
2025-07-29 16:16:12 +02:00
|
|
|
obsoleted_manager: None,
|
2025-07-26 10:34:45 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Create the repository directories
|
|
|
|
|
repo.create_directories()?;
|
|
|
|
|
|
|
|
|
|
// Save the repository configuration
|
|
|
|
|
repo.save_config()?;
|
|
|
|
|
|
|
|
|
|
Ok(repo)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Save the repository configuration
|
|
|
|
|
fn save_config(&self) -> Result<()> {
|
2025-08-02 13:17:49 +02:00
|
|
|
// Save the modern JSON format
|
2025-07-26 10:34:45 +02:00
|
|
|
let config_path = self.path.join(REPOSITORY_CONFIG_FILENAME);
|
|
|
|
|
let config_data = serde_json::to_string_pretty(&self.config)?;
|
|
|
|
|
fs::write(config_path, config_data)?;
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-02 13:17:49 +02:00
|
|
|
// Save the legacy INI format for backward compatibility
|
|
|
|
|
self.save_legacy_config()?;
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-07-26 10:34:45 +02:00
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Add a publisher to the repository
|
|
|
|
|
fn add_publisher(&mut self, publisher: &str) -> Result<()> {
|
|
|
|
|
if !self.config.publishers.contains(&publisher.to_string()) {
|
|
|
|
|
self.config.publishers.push(publisher.to_string());
|
|
|
|
|
|
|
|
|
|
// Create publisher-specific directories
|
2025-07-31 00:18:21 +02:00
|
|
|
fs::create_dir_all(Self::construct_catalog_path(&self.path, publisher))?;
|
|
|
|
|
fs::create_dir_all(Self::construct_package_dir(&self.path, publisher, ""))?;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-08-02 13:17:49 +02:00
|
|
|
// Create the publisher directory if it doesn't exist
|
|
|
|
|
let publisher_dir = self.path.join("publisher").join(publisher);
|
|
|
|
|
fs::create_dir_all(&publisher_dir)?;
|
|
|
|
|
|
|
|
|
|
// Create the pub.p5i file for backward compatibility
|
|
|
|
|
self.create_pub_p5i_file(publisher)?;
|
|
|
|
|
|
2025-07-26 23:41:09 +02:00
|
|
|
// Set as the default publisher if no default publisher is set
|
2025-07-26 10:34:45 +02:00
|
|
|
if self.config.default_publisher.is_none() {
|
|
|
|
|
self.config.default_publisher = Some(publisher.to_string());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Save the updated configuration
|
|
|
|
|
self.save_config()?;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Remove a publisher from the repository
|
|
|
|
|
fn remove_publisher(&mut self, publisher: &str, dry_run: bool) -> Result<()> {
|
|
|
|
|
if let Some(pos) = self.config.publishers.iter().position(|p| p == publisher) {
|
|
|
|
|
if !dry_run {
|
|
|
|
|
self.config.publishers.remove(pos);
|
|
|
|
|
|
|
|
|
|
// Remove publisher-specific directories and their contents recursively
|
2025-07-31 00:18:21 +02:00
|
|
|
let catalog_dir = Self::construct_catalog_path(&self.path, publisher);
|
|
|
|
|
let pkg_dir = Self::construct_package_dir(&self.path, publisher, "");
|
2025-07-26 10:34:45 +02:00
|
|
|
|
|
|
|
|
// Remove the catalog directory if it exists
|
|
|
|
|
if catalog_dir.exists() {
|
2025-07-27 15:22:49 +02:00
|
|
|
fs::remove_dir_all(&catalog_dir).map_err(|e| {
|
|
|
|
|
RepositoryError::Other(format!("Failed to remove catalog directory: {}", e))
|
|
|
|
|
})?;
|
2025-07-26 10:34:45 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Remove the package directory if it exists
|
|
|
|
|
if pkg_dir.exists() {
|
2025-07-27 15:22:49 +02:00
|
|
|
fs::remove_dir_all(&pkg_dir).map_err(|e| {
|
|
|
|
|
RepositoryError::Other(format!("Failed to remove package directory: {}", e))
|
|
|
|
|
})?;
|
2025-07-26 10:34:45 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Save the updated configuration
|
|
|
|
|
self.save_config()?;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Set a repository property
|
|
|
|
|
fn set_property(&mut self, property: &str, value: &str) -> Result<()> {
|
|
|
|
|
self.config
|
|
|
|
|
.properties
|
|
|
|
|
.insert(property.to_string(), value.to_string());
|
|
|
|
|
self.save_config()?;
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Set a publisher property
|
|
|
|
|
fn set_publisher_property(
|
|
|
|
|
&mut self,
|
|
|
|
|
publisher: &str,
|
|
|
|
|
property: &str,
|
|
|
|
|
value: &str,
|
|
|
|
|
) -> Result<()> {
|
|
|
|
|
// Check if the publisher exists
|
|
|
|
|
if !self.config.publishers.contains(&publisher.to_string()) {
|
2025-07-26 15:33:39 +02:00
|
|
|
return Err(RepositoryError::PublisherNotFound(publisher.to_string()));
|
2025-07-26 10:34:45 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Create the property key in the format "publisher/property"
|
|
|
|
|
let key = format!("{}/{}", publisher, property);
|
|
|
|
|
|
|
|
|
|
// Set the property
|
|
|
|
|
self.config.properties.insert(key, value.to_string());
|
|
|
|
|
|
|
|
|
|
// Save the updated configuration
|
|
|
|
|
self.save_config()?;
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Rebuild repository metadata
|
2025-07-27 13:43:56 +02:00
|
|
|
fn rebuild(&self, publisher: Option<&str>, no_catalog: bool, no_index: bool) -> Result<()> {
|
2025-07-27 15:22:49 +02:00
|
|
|
debug!(
|
|
|
|
|
"rebuild called with publisher: {:?}, no_catalog: {}, no_index: {}",
|
|
|
|
|
publisher, no_catalog, no_index
|
|
|
|
|
);
|
|
|
|
|
|
2025-07-26 10:34:45 +02:00
|
|
|
// Filter publishers if specified
|
|
|
|
|
let publishers = if let Some(pub_name) = publisher {
|
|
|
|
|
if !self.config.publishers.contains(&pub_name.to_string()) {
|
2025-07-26 15:33:39 +02:00
|
|
|
return Err(RepositoryError::PublisherNotFound(pub_name.to_string()));
|
2025-07-26 10:34:45 +02:00
|
|
|
}
|
2025-07-27 13:43:56 +02:00
|
|
|
debug!("rebuild: using specified publisher: {}", pub_name);
|
2025-07-26 10:34:45 +02:00
|
|
|
vec![pub_name.to_string()]
|
|
|
|
|
} else {
|
2025-07-27 15:22:49 +02:00
|
|
|
debug!(
|
|
|
|
|
"rebuild: using all publishers: {:?}",
|
|
|
|
|
self.config.publishers
|
|
|
|
|
);
|
2025-07-26 10:34:45 +02:00
|
|
|
self.config.publishers.clone()
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// For each publisher, rebuild metadata
|
|
|
|
|
for pub_name in publishers {
|
2025-07-26 21:20:50 +02:00
|
|
|
info!("Rebuilding metadata for publisher: {}", pub_name);
|
2025-07-26 10:34:45 +02:00
|
|
|
|
|
|
|
|
if !no_catalog {
|
2025-07-26 21:20:50 +02:00
|
|
|
info!("Rebuilding catalog...");
|
2025-07-27 13:43:56 +02:00
|
|
|
self.rebuild_catalog(&pub_name, true)?;
|
2026-02-04 22:39:42 +01:00
|
|
|
|
|
|
|
|
// Build SQLite catalog shards (active.db, obsolete.db, fts.db)
|
|
|
|
|
info!("Building catalog shards...");
|
|
|
|
|
let catalog_dir = Self::construct_catalog_path(&self.path, &pub_name);
|
|
|
|
|
let shard_dir = self.shard_dir(&pub_name);
|
|
|
|
|
crate::repository::sqlite_catalog::build_shards(
|
|
|
|
|
&catalog_dir,
|
|
|
|
|
&pub_name,
|
|
|
|
|
&shard_dir,
|
|
|
|
|
)
|
|
|
|
|
.map_err(|e| {
|
|
|
|
|
RepositoryError::Other(format!("Failed to build catalog shards: {}", e.message))
|
|
|
|
|
})?;
|
2025-07-26 10:34:45 +02:00
|
|
|
}
|
2025-07-21 22:02:05 +02:00
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
Ok(())
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
/// Refresh repository metadata
|
2026-03-23 17:27:36 +01:00
|
|
|
fn refresh(&self, publisher: Option<&str>, no_catalog: bool, _no_index: bool) -> Result<()> {
|
2025-07-21 22:02:05 +02:00
|
|
|
// Filter publishers if specified
|
|
|
|
|
let publishers = if let Some(pub_name) = publisher {
|
|
|
|
|
if !self.config.publishers.contains(&pub_name.to_string()) {
|
2025-07-26 15:33:39 +02:00
|
|
|
return Err(RepositoryError::PublisherNotFound(pub_name.to_string()));
|
2025-07-21 22:02:05 +02:00
|
|
|
}
|
|
|
|
|
vec![pub_name.to_string()]
|
|
|
|
|
} else {
|
|
|
|
|
self.config.publishers.clone()
|
|
|
|
|
};
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// For each publisher, refresh metadata
|
|
|
|
|
for pub_name in publishers {
|
2025-07-26 21:20:50 +02:00
|
|
|
info!("Refreshing metadata for publisher: {}", pub_name);
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
if !no_catalog {
|
2025-07-26 21:20:50 +02:00
|
|
|
info!("Refreshing catalog...");
|
2025-07-27 13:43:56 +02:00
|
|
|
self.rebuild_catalog(&pub_name, true)?;
|
2026-02-04 22:39:42 +01:00
|
|
|
|
|
|
|
|
// Build SQLite catalog shards (active.db, obsolete.db, fts.db)
|
|
|
|
|
info!("Building catalog shards...");
|
|
|
|
|
let catalog_dir = Self::construct_catalog_path(&self.path, &pub_name);
|
|
|
|
|
let shard_dir = self.shard_dir(&pub_name);
|
|
|
|
|
crate::repository::sqlite_catalog::build_shards(
|
|
|
|
|
&catalog_dir,
|
|
|
|
|
&pub_name,
|
|
|
|
|
&shard_dir,
|
|
|
|
|
)
|
|
|
|
|
.map_err(|e| {
|
|
|
|
|
RepositoryError::Other(format!("Failed to build catalog shards: {}", e.message))
|
|
|
|
|
})?;
|
2025-07-21 22:02:05 +02:00
|
|
|
}
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
Ok(())
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 23:20:19 +02:00
|
|
|
/// Set the default publisher for the repository
|
|
|
|
|
fn set_default_publisher(&mut self, publisher: &str) -> Result<()> {
|
|
|
|
|
// Check if the publisher exists
|
|
|
|
|
if !self.config.publishers.contains(&publisher.to_string()) {
|
2025-07-26 15:33:39 +02:00
|
|
|
return Err(RepositoryError::PublisherNotFound(publisher.to_string()));
|
2025-07-21 23:20:19 +02:00
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 23:20:19 +02:00
|
|
|
// Set the default publisher
|
|
|
|
|
self.config.default_publisher = Some(publisher.to_string());
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 23:20:19 +02:00
|
|
|
// Save the updated configuration
|
|
|
|
|
self.save_config()?;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 23:20:19 +02:00
|
|
|
Ok(())
|
|
|
|
|
}
|
2025-07-21 22:02:05 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl FileBackend {
|
2025-12-09 12:49:25 +01:00
|
|
|
/// Save catalog.attrs for a publisher using atomic write and SHA-1 signature
|
|
|
|
|
pub fn save_catalog_attrs(
|
|
|
|
|
&self,
|
|
|
|
|
publisher: &str,
|
|
|
|
|
attrs: &mut crate::repository::catalog::CatalogAttrs,
|
|
|
|
|
) -> Result<String> {
|
|
|
|
|
let catalog_dir = Self::construct_catalog_path(&self.path, publisher);
|
|
|
|
|
std::fs::create_dir_all(&catalog_dir)?;
|
|
|
|
|
let attrs_path = catalog_dir.join("catalog.attrs");
|
|
|
|
|
super::catalog_writer::write_catalog_attrs(&attrs_path, attrs)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Save a catalog part for a publisher using atomic write and SHA-1 signature
|
|
|
|
|
pub fn save_catalog_part(
|
|
|
|
|
&self,
|
|
|
|
|
publisher: &str,
|
|
|
|
|
part_name: &str,
|
|
|
|
|
part: &mut crate::repository::catalog::CatalogPart,
|
|
|
|
|
) -> Result<String> {
|
|
|
|
|
if part_name.contains('/') || part_name.contains('\\') {
|
|
|
|
|
return Err(RepositoryError::PathPrefixError(part_name.to_string()));
|
|
|
|
|
}
|
|
|
|
|
let catalog_dir = Self::construct_catalog_path(&self.path, publisher);
|
|
|
|
|
std::fs::create_dir_all(&catalog_dir)?;
|
|
|
|
|
let part_path = catalog_dir.join(part_name);
|
|
|
|
|
super::catalog_writer::write_catalog_part(&part_path, part)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Append a single update entry to the current update log file for a publisher and locale.
|
|
|
|
|
/// If no current log exists, creates one using current timestamp.
|
|
|
|
|
pub fn append_update(
|
|
|
|
|
&self,
|
|
|
|
|
publisher: &str,
|
|
|
|
|
locale: &str,
|
|
|
|
|
fmri: &crate::fmri::Fmri,
|
|
|
|
|
op_type: crate::repository::catalog::CatalogOperationType,
|
2025-12-22 22:42:56 +01:00
|
|
|
catalog_parts: std::collections::BTreeMap<
|
2025-12-22 20:10:17 +01:00
|
|
|
String,
|
2025-12-22 22:42:56 +01:00
|
|
|
std::collections::BTreeMap<String, Vec<String>>,
|
2025-12-22 20:10:17 +01:00
|
|
|
>,
|
2025-12-09 12:49:25 +01:00
|
|
|
signature_sha1: Option<String>,
|
|
|
|
|
) -> Result<()> {
|
|
|
|
|
let catalog_dir = Self::construct_catalog_path(&self.path, publisher);
|
|
|
|
|
std::fs::create_dir_all(&catalog_dir)?;
|
|
|
|
|
|
|
|
|
|
// Locate latest update file for locale
|
|
|
|
|
let mut latest: Option<PathBuf> = None;
|
|
|
|
|
if let Ok(read_dir) = std::fs::read_dir(&catalog_dir) {
|
|
|
|
|
for e in read_dir.flatten() {
|
|
|
|
|
let p = e.path();
|
|
|
|
|
if let Some(name) = p.file_name().and_then(|s| s.to_str()) {
|
|
|
|
|
if name.starts_with("update.") && name.ends_with(&format!(".{}", locale)) {
|
|
|
|
|
if latest.as_ref().map(|lp| p > *lp).unwrap_or(true) {
|
|
|
|
|
latest = Some(p);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// If none, create a new filename using current timestamp in basic format
|
|
|
|
|
let update_path = match latest {
|
|
|
|
|
Some(p) => p,
|
|
|
|
|
None => {
|
|
|
|
|
let now = std::time::SystemTime::now();
|
2025-12-22 22:42:56 +01:00
|
|
|
let ts = crate::repository::catalog::format_iso8601_basic(&now); // e.g., 20090508T161025.686485Z
|
2025-12-09 12:49:25 +01:00
|
|
|
let stem = ts.split('.').next().unwrap_or(&ts); // take up to seconds
|
|
|
|
|
catalog_dir.join(format!("update.{}.{}", stem, locale))
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Load or create log
|
|
|
|
|
let mut log = if update_path.exists() {
|
|
|
|
|
crate::repository::catalog::UpdateLog::load(&update_path)?
|
|
|
|
|
} else {
|
|
|
|
|
crate::repository::catalog::UpdateLog::new()
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Append entry
|
|
|
|
|
log.add_update(publisher, fmri, op_type, catalog_parts, signature_sha1);
|
|
|
|
|
let _ = super::catalog_writer::write_update_log(&update_path, &mut log)?;
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Rotate the update log file by creating a new empty file with the provided timestamp (basic format).
|
|
|
|
|
/// If `timestamp_basic` is None, the current time is used. Timestamp should match catalog v1 naming: YYYYMMDDThhmmssZ
|
|
|
|
|
pub fn rotate_update_file(
|
|
|
|
|
&self,
|
|
|
|
|
publisher: &str,
|
|
|
|
|
locale: &str,
|
|
|
|
|
timestamp_basic: Option<String>,
|
|
|
|
|
) -> Result<PathBuf> {
|
|
|
|
|
let catalog_dir = Self::construct_catalog_path(&self.path, publisher);
|
|
|
|
|
std::fs::create_dir_all(&catalog_dir)?;
|
|
|
|
|
let ts_basic = match timestamp_basic {
|
|
|
|
|
Some(s) => s,
|
|
|
|
|
None => {
|
|
|
|
|
let now = std::time::SystemTime::now();
|
2025-12-22 22:42:56 +01:00
|
|
|
let ts = crate::repository::catalog::format_iso8601_basic(&now);
|
2025-12-09 12:49:25 +01:00
|
|
|
ts.split('.').next().unwrap_or(&ts).to_string()
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
let path = catalog_dir.join(format!("update.{}.{}", ts_basic, locale));
|
|
|
|
|
let mut log = crate::repository::catalog::UpdateLog::new();
|
|
|
|
|
let _ = super::catalog_writer::write_update_log(&path, &mut log)?;
|
|
|
|
|
Ok(path)
|
|
|
|
|
}
|
2026-01-20 20:16:58 +01:00
|
|
|
|
2025-12-08 22:45:39 +01:00
|
|
|
/// Fetch catalog file path
|
|
|
|
|
pub fn get_catalog_file_path(&self, publisher: &str, filename: &str) -> Result<PathBuf> {
|
|
|
|
|
if filename.contains('/') || filename.contains('\\') {
|
|
|
|
|
return Err(RepositoryError::PathPrefixError(filename.to_string()));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let catalog_dir = Self::construct_catalog_path(&self.path, publisher);
|
|
|
|
|
let path = catalog_dir.join(filename);
|
|
|
|
|
|
|
|
|
|
if path.exists() {
|
|
|
|
|
Ok(path)
|
|
|
|
|
} else {
|
|
|
|
|
Err(RepositoryError::NotFound(format!(
|
|
|
|
|
"Catalog file {} for publisher {} not found",
|
|
|
|
|
filename, publisher
|
|
|
|
|
)))
|
|
|
|
|
}
|
2025-12-08 21:36:37 +01:00
|
|
|
}
|
|
|
|
|
|
2025-08-02 13:17:49 +02:00
|
|
|
/// Save the legacy pkg5.repository INI file for backward compatibility
|
|
|
|
|
pub fn save_legacy_config(&self) -> Result<()> {
|
|
|
|
|
let legacy_config_path = self.path.join("pkg5.repository");
|
|
|
|
|
let mut conf = Ini::new();
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-02 13:17:49 +02:00
|
|
|
// Add publisher section with default publisher
|
|
|
|
|
if let Some(default_publisher) = &self.config.default_publisher {
|
|
|
|
|
conf.with_section(Some("publisher"))
|
|
|
|
|
.set("prefix", default_publisher);
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-02 13:17:49 +02:00
|
|
|
// Add repository section with version and default values
|
|
|
|
|
conf.with_section(Some("repository"))
|
|
|
|
|
.set("version", "4")
|
|
|
|
|
.set("trust-anchor-directory", "/etc/certs/CA/")
|
|
|
|
|
.set("signature-required-names", "[]")
|
|
|
|
|
.set("check-certificate-revocation", "False");
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-02 13:17:49 +02:00
|
|
|
// Add CONFIGURATION section with version
|
2025-12-22 20:10:17 +01:00
|
|
|
conf.with_section(Some("CONFIGURATION")).set("version", "4");
|
|
|
|
|
|
2025-08-02 13:17:49 +02:00
|
|
|
// Write the INI file
|
|
|
|
|
conf.write_to_file(legacy_config_path)?;
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-08-02 13:17:49 +02:00
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Create a pub.p5i file for a publisher for backward compatibility
|
2025-12-22 20:10:17 +01:00
|
|
|
///
|
2025-08-02 13:17:49 +02:00
|
|
|
/// Format: base_path/publisher/publisher_name/pub.p5i
|
|
|
|
|
fn create_pub_p5i_file(&self, publisher: &str) -> Result<()> {
|
|
|
|
|
// Define the structure for the pub.p5i file
|
|
|
|
|
#[derive(serde::Serialize)]
|
|
|
|
|
struct P5iPublisherInfo {
|
|
|
|
|
alias: Option<String>,
|
|
|
|
|
name: String,
|
|
|
|
|
packages: Vec<String>,
|
|
|
|
|
repositories: Vec<String>,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[derive(serde::Serialize)]
|
|
|
|
|
struct P5iFile {
|
|
|
|
|
packages: Vec<String>,
|
|
|
|
|
publishers: Vec<P5iPublisherInfo>,
|
|
|
|
|
version: u32,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Create the publisher info
|
|
|
|
|
let publisher_info = P5iPublisherInfo {
|
|
|
|
|
alias: None,
|
|
|
|
|
name: publisher.to_string(),
|
|
|
|
|
packages: Vec::new(),
|
|
|
|
|
repositories: Vec::new(),
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Create the p5i file content
|
|
|
|
|
let p5i_content = P5iFile {
|
|
|
|
|
packages: Vec::new(),
|
|
|
|
|
publishers: vec![publisher_info],
|
|
|
|
|
version: 1,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Serialize to JSON
|
|
|
|
|
let json_content = serde_json::to_string_pretty(&p5i_content)?;
|
|
|
|
|
|
|
|
|
|
// Create the path for the pub.p5i file
|
|
|
|
|
let pub_p5i_path = self.path.join("publisher").join(publisher).join("pub.p5i");
|
|
|
|
|
|
|
|
|
|
// Write the file
|
|
|
|
|
fs::write(pub_p5i_path, json_content)?;
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
2025-07-31 00:18:21 +02:00
|
|
|
/// Helper method to construct a catalog path consistently
|
2025-12-22 20:10:17 +01:00
|
|
|
///
|
2025-07-31 00:18:21 +02:00
|
|
|
/// Format: base_path/publisher/publisher_name/catalog
|
2025-12-22 20:10:17 +01:00
|
|
|
pub fn construct_catalog_path(base_path: &Path, publisher: &str) -> PathBuf {
|
2025-07-31 00:18:21 +02:00
|
|
|
base_path.join("publisher").join(publisher).join("catalog")
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-04 22:39:42 +01:00
|
|
|
/// Helper method to construct a shard directory path for catalog v2 shards
|
|
|
|
|
///
|
|
|
|
|
/// Format: base_path/publisher/publisher_name/catalog2
|
|
|
|
|
pub fn shard_dir(&self, publisher: &str) -> PathBuf {
|
|
|
|
|
self.path.join("publisher").join(publisher).join("catalog2")
|
|
|
|
|
}
|
|
|
|
|
|
2025-07-27 15:20:45 +02:00
|
|
|
/// Helper method to construct a manifest path consistently
|
2025-12-22 20:10:17 +01:00
|
|
|
///
|
2025-07-31 00:18:21 +02:00
|
|
|
/// Format: base_path/publisher/publisher_name/pkg/stem/encoded_version
|
|
|
|
|
pub fn construct_manifest_path(
|
2025-07-27 15:22:49 +02:00
|
|
|
base_path: &Path,
|
|
|
|
|
publisher: &str,
|
|
|
|
|
stem: &str,
|
|
|
|
|
version: &str,
|
|
|
|
|
) -> PathBuf {
|
2025-07-31 00:18:21 +02:00
|
|
|
let pkg_dir = Self::construct_package_dir(base_path, publisher, stem);
|
2025-07-27 15:20:45 +02:00
|
|
|
let encoded_version = Self::url_encode(version);
|
|
|
|
|
pkg_dir.join(encoded_version)
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-07-31 00:18:21 +02:00
|
|
|
/// Helper method to construct a package directory path consistently
|
2025-12-22 20:10:17 +01:00
|
|
|
///
|
2025-07-31 00:18:21 +02:00
|
|
|
/// Format: base_path/publisher/publisher_name/pkg/url_encoded_stem
|
2025-12-22 20:10:17 +01:00
|
|
|
pub fn construct_package_dir(base_path: &Path, publisher: &str, stem: &str) -> PathBuf {
|
2025-07-31 00:18:21 +02:00
|
|
|
let encoded_stem = Self::url_encode(stem);
|
2025-12-22 20:10:17 +01:00
|
|
|
base_path
|
|
|
|
|
.join("publisher")
|
|
|
|
|
.join(publisher)
|
|
|
|
|
.join("pkg")
|
|
|
|
|
.join(encoded_stem)
|
2025-07-31 00:18:21 +02:00
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-07-31 00:18:21 +02:00
|
|
|
/// Helper method to construct a file path consistently
|
2025-12-22 20:10:17 +01:00
|
|
|
///
|
2025-07-31 00:18:21 +02:00
|
|
|
/// Format: base_path/file/XX/hash
|
|
|
|
|
/// Where XX is the first two characters of the hash
|
2025-12-22 20:10:17 +01:00
|
|
|
pub fn construct_file_path(base_path: &Path, hash: &str) -> PathBuf {
|
2025-07-31 00:18:21 +02:00
|
|
|
if hash.len() < 2 {
|
|
|
|
|
// Fallback for very short hashes (shouldn't happen with SHA256)
|
|
|
|
|
base_path.join("file").join(hash)
|
|
|
|
|
} else {
|
|
|
|
|
// Extract the first two characters from the hash
|
|
|
|
|
let first_two = &hash[0..2];
|
|
|
|
|
|
|
|
|
|
// Create the path: $REPO/file/XX/XXYY...
|
2025-12-22 20:10:17 +01:00
|
|
|
base_path.join("file").join(first_two).join(hash)
|
2025-07-31 00:18:21 +02:00
|
|
|
}
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-07-31 00:18:21 +02:00
|
|
|
/// Helper method to construct a file path consistently with publisher
|
2025-12-22 20:10:17 +01:00
|
|
|
///
|
2025-07-31 00:18:21 +02:00
|
|
|
/// Format: base_path/publisher/publisher_name/file/XX/hash
|
|
|
|
|
/// Where XX is the first two characters of the hash
|
|
|
|
|
pub fn construct_file_path_with_publisher(
|
|
|
|
|
base_path: &Path,
|
|
|
|
|
publisher: &str,
|
|
|
|
|
hash: &str,
|
|
|
|
|
) -> PathBuf {
|
|
|
|
|
if hash.len() < 2 {
|
|
|
|
|
// Fallback for very short hashes (shouldn't happen with SHA256)
|
2025-12-22 20:10:17 +01:00
|
|
|
base_path
|
|
|
|
|
.join("publisher")
|
|
|
|
|
.join(publisher)
|
|
|
|
|
.join("file")
|
|
|
|
|
.join(hash)
|
2025-07-31 00:18:21 +02:00
|
|
|
} else {
|
|
|
|
|
// Extract the first two characters from the hash
|
|
|
|
|
let first_two = &hash[0..2];
|
|
|
|
|
|
|
|
|
|
// Create the path: $REPO/publisher/publisher_name/file/XX/XXYY...
|
|
|
|
|
base_path
|
|
|
|
|
.join("publisher")
|
|
|
|
|
.join(publisher)
|
|
|
|
|
.join("file")
|
|
|
|
|
.join(first_two)
|
|
|
|
|
.join(hash)
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-07-27 15:20:45 +02:00
|
|
|
|
|
|
|
|
/// Recursively find manifest files in a directory and its subdirectories
|
|
|
|
|
fn find_manifests_recursive(
|
|
|
|
|
&self,
|
|
|
|
|
dir: &Path,
|
|
|
|
|
publisher: &str,
|
|
|
|
|
pattern: Option<&str>,
|
|
|
|
|
packages: &mut Vec<PackageInfo>,
|
|
|
|
|
) -> Result<()> {
|
|
|
|
|
if let Ok(entries) = fs::read_dir(dir) {
|
|
|
|
|
for entry in entries.flatten() {
|
|
|
|
|
let path = entry.path();
|
|
|
|
|
|
|
|
|
|
if path.is_dir() {
|
|
|
|
|
// Recursively search subdirectories
|
|
|
|
|
self.find_manifests_recursive(&path, publisher, pattern, packages)?;
|
|
|
|
|
} else if path.is_file() {
|
2025-12-23 14:09:14 +01:00
|
|
|
// Check if this is a .json file and if a corresponding file without .json exists
|
|
|
|
|
if let Some(extension) = path.extension() {
|
|
|
|
|
if extension == "json" {
|
|
|
|
|
let path_without_ext = path.with_extension("");
|
|
|
|
|
if path_without_ext.exists() {
|
|
|
|
|
// Skip this .json file as we'll process the other one
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-07-27 15:20:45 +02:00
|
|
|
// Try to read the first few bytes of the file to check if it's a manifest file
|
|
|
|
|
let mut file = match fs::File::open(&path) {
|
|
|
|
|
Ok(file) => file,
|
|
|
|
|
Err(err) => {
|
2025-07-27 15:22:49 +02:00
|
|
|
error!(
|
|
|
|
|
"FileBackend::find_manifests_recursive: Error opening file {}: {}",
|
|
|
|
|
path.display(),
|
|
|
|
|
err
|
|
|
|
|
);
|
2025-07-27 15:20:45 +02:00
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
};
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-27 15:20:45 +02:00
|
|
|
let mut buffer = [0; 1024];
|
|
|
|
|
let bytes_read = match file.read(&mut buffer) {
|
|
|
|
|
Ok(bytes) => bytes,
|
|
|
|
|
Err(err) => {
|
2025-07-27 15:22:49 +02:00
|
|
|
error!(
|
|
|
|
|
"FileBackend::find_manifests_recursive: Error reading file {}: {}",
|
|
|
|
|
path.display(),
|
|
|
|
|
err
|
|
|
|
|
);
|
2025-07-27 15:20:45 +02:00
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
};
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-27 15:20:45 +02:00
|
|
|
// Check if the file starts with a valid manifest marker
|
|
|
|
|
// For example, if it's a JSON file, it should start with '{'
|
2025-07-27 15:22:49 +02:00
|
|
|
if bytes_read == 0
|
|
|
|
|
|| (buffer[0] != b'{' && buffer[0] != b'<' && buffer[0] != b's')
|
|
|
|
|
{
|
2025-07-27 15:20:45 +02:00
|
|
|
continue;
|
|
|
|
|
}
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-27 15:20:45 +02:00
|
|
|
// Process manifest files
|
|
|
|
|
match Manifest::parse_file(&path) {
|
|
|
|
|
Ok(manifest) => {
|
|
|
|
|
// Look for the pkg.fmri attribute
|
|
|
|
|
for attr in &manifest.attributes {
|
|
|
|
|
if attr.key == "pkg.fmri" && !attr.values.is_empty() {
|
|
|
|
|
let fmri = &attr.values[0];
|
|
|
|
|
|
|
|
|
|
// Parse the FMRI using our Fmri type
|
|
|
|
|
match Fmri::parse(fmri) {
|
|
|
|
|
Ok(parsed_fmri) => {
|
|
|
|
|
// Filter by pattern if specified
|
|
|
|
|
if let Some(pat) = pattern {
|
|
|
|
|
// Try to compile the pattern as a regex
|
|
|
|
|
match Regex::new(pat) {
|
|
|
|
|
Ok(regex) => {
|
|
|
|
|
// Use regex matching
|
|
|
|
|
if !regex.is_match(parsed_fmri.stem()) {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
Err(err) => {
|
|
|
|
|
// Log the error but fall back to the simple string contains
|
2025-12-22 20:10:17 +01:00
|
|
|
error!(
|
|
|
|
|
"FileBackend::find_manifests_recursive: Error compiling regex pattern '{}': {}",
|
|
|
|
|
pat, err
|
|
|
|
|
);
|
2025-07-27 15:20:45 +02:00
|
|
|
if !parsed_fmri.stem().contains(pat) {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// If the publisher is not set in the FMRI, use the current publisher
|
Introduce obsoleted package management system in IPS
- Add `obsoleted.rs` module to handle storing, metadata management, and operations for obsoleted packages.
- Implement commands for marking, listing, searching, restoring, exporting, and importing obsoleted packages (`pkg6repo`).
- Enhance `RepositoryError` with `From` implementations for various error types to manage database and serialization-related errors.
- Introduce reusable data structures for obsoleted package metadata and export representation.
- Update `Cargo.toml` and `Cargo.lock` to include new dependencies (`redb`, `bincode`, etc.).
- Document obsoleted package workflow and integration details in `doc/obsoleted_packages.md` for contributors.
- Refactor repository internals to integrate obsoleted package support without disrupting existing workflow.
- Add robust error handling, logging, and pagination for enhanced usability and scalability.
2025-07-29 16:16:12 +02:00
|
|
|
let final_fmri = if parsed_fmri.publisher.is_none() {
|
2025-07-27 15:20:45 +02:00
|
|
|
let mut fmri_with_publisher = parsed_fmri.clone();
|
2025-07-27 15:22:49 +02:00
|
|
|
fmri_with_publisher.publisher =
|
|
|
|
|
Some(publisher.to_string());
|
Introduce obsoleted package management system in IPS
- Add `obsoleted.rs` module to handle storing, metadata management, and operations for obsoleted packages.
- Implement commands for marking, listing, searching, restoring, exporting, and importing obsoleted packages (`pkg6repo`).
- Enhance `RepositoryError` with `From` implementations for various error types to manage database and serialization-related errors.
- Introduce reusable data structures for obsoleted package metadata and export representation.
- Update `Cargo.toml` and `Cargo.lock` to include new dependencies (`redb`, `bincode`, etc.).
- Document obsoleted package workflow and integration details in `doc/obsoleted_packages.md` for contributors.
- Refactor repository internals to integrate obsoleted package support without disrupting existing workflow.
- Add robust error handling, logging, and pagination for enhanced usability and scalability.
2025-07-29 16:16:12 +02:00
|
|
|
fmri_with_publisher
|
|
|
|
|
} else {
|
|
|
|
|
parsed_fmri.clone()
|
|
|
|
|
};
|
2025-12-22 20:10:17 +01:00
|
|
|
|
Introduce obsoleted package management system in IPS
- Add `obsoleted.rs` module to handle storing, metadata management, and operations for obsoleted packages.
- Implement commands for marking, listing, searching, restoring, exporting, and importing obsoleted packages (`pkg6repo`).
- Enhance `RepositoryError` with `From` implementations for various error types to manage database and serialization-related errors.
- Introduce reusable data structures for obsoleted package metadata and export representation.
- Update `Cargo.toml` and `Cargo.lock` to include new dependencies (`redb`, `bincode`, etc.).
- Document obsoleted package workflow and integration details in `doc/obsoleted_packages.md` for contributors.
- Refactor repository internals to integrate obsoleted package support without disrupting existing workflow.
- Add robust error handling, logging, and pagination for enhanced usability and scalability.
2025-07-29 16:16:12 +02:00
|
|
|
// Check if the package is obsoleted
|
2025-12-22 20:10:17 +01:00
|
|
|
let is_obsoleted = if let Some(obsoleted_manager) =
|
|
|
|
|
&self.obsoleted_manager
|
|
|
|
|
{
|
|
|
|
|
obsoleted_manager
|
2026-02-05 15:57:56 +01:00
|
|
|
.lock()
|
|
|
|
|
.map(|mgr| mgr.is_obsoleted(publisher, &final_fmri))
|
|
|
|
|
.unwrap_or(false)
|
2025-07-27 15:20:45 +02:00
|
|
|
} else {
|
Introduce obsoleted package management system in IPS
- Add `obsoleted.rs` module to handle storing, metadata management, and operations for obsoleted packages.
- Implement commands for marking, listing, searching, restoring, exporting, and importing obsoleted packages (`pkg6repo`).
- Enhance `RepositoryError` with `From` implementations for various error types to manage database and serialization-related errors.
- Introduce reusable data structures for obsoleted package metadata and export representation.
- Update `Cargo.toml` and `Cargo.lock` to include new dependencies (`redb`, `bincode`, etc.).
- Document obsoleted package workflow and integration details in `doc/obsoleted_packages.md` for contributors.
- Refactor repository internals to integrate obsoleted package support without disrupting existing workflow.
- Add robust error handling, logging, and pagination for enhanced usability and scalability.
2025-07-29 16:16:12 +02:00
|
|
|
false
|
|
|
|
|
};
|
2025-12-22 20:10:17 +01:00
|
|
|
|
Introduce obsoleted package management system in IPS
- Add `obsoleted.rs` module to handle storing, metadata management, and operations for obsoleted packages.
- Implement commands for marking, listing, searching, restoring, exporting, and importing obsoleted packages (`pkg6repo`).
- Enhance `RepositoryError` with `From` implementations for various error types to manage database and serialization-related errors.
- Introduce reusable data structures for obsoleted package metadata and export representation.
- Update `Cargo.toml` and `Cargo.lock` to include new dependencies (`redb`, `bincode`, etc.).
- Document obsoleted package workflow and integration details in `doc/obsoleted_packages.md` for contributors.
- Refactor repository internals to integrate obsoleted package support without disrupting existing workflow.
- Add robust error handling, logging, and pagination for enhanced usability and scalability.
2025-07-29 16:16:12 +02:00
|
|
|
// Only add the package if it's not obsoleted
|
|
|
|
|
if !is_obsoleted {
|
2025-07-27 15:20:45 +02:00
|
|
|
// Create a PackageInfo struct and add it to the list
|
2025-12-22 20:10:17 +01:00
|
|
|
packages.push(PackageInfo { fmri: final_fmri });
|
2025-07-27 15:20:45 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Found the package info, no need to check other attributes
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
Err(err) => {
|
|
|
|
|
// Log the error but continue processing
|
|
|
|
|
error!(
|
|
|
|
|
"FileBackend::find_manifests_recursive: Error parsing FMRI '{}': {}",
|
|
|
|
|
fmri, err
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
Err(err) => {
|
|
|
|
|
// Log the error but continue processing other files
|
|
|
|
|
error!(
|
|
|
|
|
"FileBackend::find_manifests_recursive: Error parsing manifest file {}: {}",
|
|
|
|
|
path.display(),
|
|
|
|
|
err
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
2025-07-21 22:02:05 +02:00
|
|
|
/// Create the repository directories
|
|
|
|
|
fn create_directories(&self) -> Result<()> {
|
|
|
|
|
// Create the main repository directories
|
2025-07-31 00:18:21 +02:00
|
|
|
fs::create_dir_all(self.path.join("publisher"))?;
|
2025-07-21 22:02:05 +02:00
|
|
|
fs::create_dir_all(self.path.join("index"))?;
|
|
|
|
|
fs::create_dir_all(self.path.join("trans"))?;
|
Introduce obsoleted package management system in IPS
- Add `obsoleted.rs` module to handle storing, metadata management, and operations for obsoleted packages.
- Implement commands for marking, listing, searching, restoring, exporting, and importing obsoleted packages (`pkg6repo`).
- Enhance `RepositoryError` with `From` implementations for various error types to manage database and serialization-related errors.
- Introduce reusable data structures for obsoleted package metadata and export representation.
- Update `Cargo.toml` and `Cargo.lock` to include new dependencies (`redb`, `bincode`, etc.).
- Document obsoleted package workflow and integration details in `doc/obsoleted_packages.md` for contributors.
- Refactor repository internals to integrate obsoleted package support without disrupting existing workflow.
- Add robust error handling, logging, and pagination for enhanced usability and scalability.
2025-07-29 16:16:12 +02:00
|
|
|
fs::create_dir_all(self.path.join("obsoleted"))?;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
Ok(())
|
|
|
|
|
}
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-12-09 14:23:55 +01:00
|
|
|
/// Rebuild catalog for a publisher (delegates to the batched implementation with defaults)
|
2025-07-27 13:43:56 +02:00
|
|
|
pub fn rebuild_catalog(&self, publisher: &str, create_update_log: bool) -> Result<()> {
|
2025-12-09 14:23:55 +01:00
|
|
|
let opts = crate::repository::BatchOptions::default();
|
|
|
|
|
self.rebuild_catalog_batched(publisher, create_update_log, opts)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Rebuild catalog for a publisher using a batched algorithm.
|
|
|
|
|
///
|
|
|
|
|
/// Batching currently streams package processing while accumulating in-memory
|
|
|
|
|
/// structures for the three catalog parts and optional update log, and emits
|
|
|
|
|
/// progress spans per batch. Future work may flush partial structures to
|
|
|
|
|
/// disk, but the public API is stable.
|
|
|
|
|
pub fn rebuild_catalog_batched(
|
|
|
|
|
&self,
|
|
|
|
|
publisher: &str,
|
|
|
|
|
create_update_log: bool,
|
|
|
|
|
opts: crate::repository::BatchOptions,
|
|
|
|
|
) -> Result<()> {
|
|
|
|
|
info!("Rebuilding catalog (batched) for publisher: {}", publisher);
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-12-23 14:09:14 +01:00
|
|
|
let quote_action_value = |s: &str| -> String {
|
2026-01-18 12:51:55 +01:00
|
|
|
if s.is_empty()
|
|
|
|
|
|| s.contains(char::is_whitespace)
|
|
|
|
|
|| s.contains('"')
|
|
|
|
|
|| s.contains('\'')
|
|
|
|
|
{
|
2025-12-23 14:09:14 +01:00
|
|
|
format!("\"{}\"", s.replace("\"", "\\\""))
|
|
|
|
|
} else {
|
|
|
|
|
s.to_string()
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2025-07-27 13:43:56 +02:00
|
|
|
// Create the catalog directory for the publisher if it doesn't exist
|
2025-07-31 00:18:21 +02:00
|
|
|
let catalog_dir = Self::construct_catalog_path(&self.path, publisher);
|
2025-07-27 13:43:56 +02:00
|
|
|
debug!("Publisher catalog directory: {}", catalog_dir.display());
|
|
|
|
|
fs::create_dir_all(&catalog_dir)?;
|
|
|
|
|
debug!("Created publisher catalog directory");
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-27 13:43:56 +02:00
|
|
|
// Collect package data
|
2025-07-24 00:28:33 +02:00
|
|
|
let packages = self.list_packages(Some(publisher), None)?;
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-24 00:28:33 +02:00
|
|
|
// Prepare data structures for catalog parts
|
|
|
|
|
let mut base_entries = Vec::new();
|
|
|
|
|
let mut dependency_entries = Vec::new();
|
|
|
|
|
let mut summary_entries = Vec::new();
|
|
|
|
|
let mut update_entries = Vec::new();
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-24 00:28:33 +02:00
|
|
|
// Track package counts
|
|
|
|
|
let mut package_count = 0;
|
|
|
|
|
let mut package_version_count = 0;
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-12-09 14:23:55 +01:00
|
|
|
// Process each package in deterministic order (by FMRI string)
|
|
|
|
|
let mut packages = packages;
|
|
|
|
|
packages.sort_by(|a, b| a.fmri.to_string().cmp(&b.fmri.to_string()));
|
|
|
|
|
|
|
|
|
|
let mut processed_in_batch = 0usize;
|
|
|
|
|
let mut batch_no = 0usize;
|
|
|
|
|
|
2025-07-24 00:28:33 +02:00
|
|
|
for package in packages {
|
|
|
|
|
let fmri = &package.fmri;
|
|
|
|
|
let stem = fmri.stem();
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-24 00:28:33 +02:00
|
|
|
// Skip if no version
|
|
|
|
|
if fmri.version().is_empty() {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-24 00:28:33 +02:00
|
|
|
// Get the package version
|
|
|
|
|
let version = fmri.version();
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-27 15:20:45 +02:00
|
|
|
// Construct the manifest path using the helper method
|
2025-07-27 15:22:49 +02:00
|
|
|
let manifest_path =
|
|
|
|
|
Self::construct_manifest_path(&self.path, publisher, stem, &version);
|
|
|
|
|
|
2025-07-27 15:20:45 +02:00
|
|
|
// Check if the package directory exists
|
|
|
|
|
if let Some(pkg_dir) = manifest_path.parent() {
|
|
|
|
|
if !pkg_dir.exists() {
|
2025-07-27 15:22:49 +02:00
|
|
|
error!(
|
|
|
|
|
"Package directory {} does not exist skipping",
|
|
|
|
|
pkg_dir.display()
|
|
|
|
|
);
|
2025-07-27 15:20:45 +02:00
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-24 00:28:33 +02:00
|
|
|
if !manifest_path.exists() {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-27 15:20:45 +02:00
|
|
|
// Read the manifest content for hash calculation
|
2025-12-22 20:10:17 +01:00
|
|
|
let manifest_content =
|
|
|
|
|
fs::read_to_string(&manifest_path).map_err(|e| RepositoryError::FileReadError {
|
|
|
|
|
path: manifest_path.clone(),
|
|
|
|
|
source: e,
|
|
|
|
|
})?;
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-27 15:20:45 +02:00
|
|
|
// Parse the manifest using parse_file which handles JSON correctly
|
|
|
|
|
let manifest = Manifest::parse_file(&manifest_path)?;
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-12-09 12:12:57 +01:00
|
|
|
// Calculate SHA-1 hash of the manifest for legacy catalog signature compatibility
|
|
|
|
|
let mut hasher = sha1::Sha1::new();
|
2025-07-24 00:28:33 +02:00
|
|
|
hasher.update(manifest_content.as_bytes());
|
2025-12-09 12:12:57 +01:00
|
|
|
let signature = hasher.finalize();
|
|
|
|
|
let signature = format!("{:x}", signature);
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-24 00:28:33 +02:00
|
|
|
// Add to base entries
|
|
|
|
|
base_entries.push((fmri.clone(), None, signature.clone()));
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-24 00:28:33 +02:00
|
|
|
// Extract dependency actions
|
|
|
|
|
let mut dependency_actions = Vec::new();
|
|
|
|
|
for dep in &manifest.dependencies {
|
|
|
|
|
if let Some(dep_fmri) = &dep.fmri {
|
2026-02-05 21:56:53 +01:00
|
|
|
let mut action = format!(
|
2025-07-26 10:34:45 +02:00
|
|
|
"depend fmri={} type={}",
|
|
|
|
|
dep_fmri, dep.dependency_type
|
2026-02-05 21:56:53 +01:00
|
|
|
);
|
|
|
|
|
|
|
|
|
|
// Add predicate for conditional dependencies
|
|
|
|
|
if let Some(predicate_fmri) = &dep.predicate {
|
|
|
|
|
action.push_str(&format!(" predicate={}", predicate_fmri));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Add root-image if present
|
|
|
|
|
if !dep.root_image.is_empty() {
|
|
|
|
|
action.push_str(&format!(" root-image={}", dep.root_image));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Add any optional properties
|
|
|
|
|
for prop in &dep.optional {
|
|
|
|
|
action.push_str(&format!(" {}={}", prop.key, quote_action_value(&prop.value)));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Add facets
|
|
|
|
|
for (facet_name, facet) in &dep.facets {
|
|
|
|
|
action.push_str(&format!(" facet.{}={}", facet_name, quote_action_value(&facet.value)));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Add any optional properties
|
|
|
|
|
for prop in &dep.optional {
|
|
|
|
|
action.push_str(&format!(" {}={}", prop.key, quote_action_value(&prop.value)));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Add facets
|
|
|
|
|
for (facet_name, facet) in &dep.facets {
|
|
|
|
|
action.push_str(&format!(" facet.{}={}", facet_name, quote_action_value(&facet.value)));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
dependency_actions.push(action);
|
2025-07-24 00:28:33 +02:00
|
|
|
}
|
|
|
|
|
}
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-24 00:28:33 +02:00
|
|
|
// Extract variant and facet actions
|
|
|
|
|
for attr in &manifest.attributes {
|
|
|
|
|
if attr.key.starts_with("variant.") || attr.key.starts_with("facet.") {
|
2026-01-18 12:51:55 +01:00
|
|
|
let values_str = attr
|
|
|
|
|
.values
|
|
|
|
|
.iter()
|
|
|
|
|
.map(|s| quote_action_value(s))
|
|
|
|
|
.collect::<Vec<_>>()
|
|
|
|
|
.join(" value=");
|
2025-07-24 00:28:33 +02:00
|
|
|
dependency_actions.push(format!("set name={} value={}", attr.key, values_str));
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-24 00:28:33 +02:00
|
|
|
// Add to dependency entries if there are dependency actions
|
|
|
|
|
if !dependency_actions.is_empty() {
|
2025-07-26 10:34:45 +02:00
|
|
|
dependency_entries.push((
|
|
|
|
|
fmri.clone(),
|
|
|
|
|
Some(dependency_actions.clone()),
|
|
|
|
|
signature.clone(),
|
|
|
|
|
));
|
2025-07-24 00:28:33 +02:00
|
|
|
}
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-24 00:28:33 +02:00
|
|
|
// Extract summary actions (set actions excluding variants and facets)
|
|
|
|
|
let mut summary_actions = Vec::new();
|
|
|
|
|
for attr in &manifest.attributes {
|
2025-12-23 13:19:51 +01:00
|
|
|
if !attr.key.starts_with("variant.")
|
|
|
|
|
&& !attr.key.starts_with("facet.")
|
|
|
|
|
&& attr.key != "pkg.fmri"
|
|
|
|
|
{
|
2026-01-18 12:51:55 +01:00
|
|
|
let values_str = attr
|
|
|
|
|
.values
|
|
|
|
|
.iter()
|
|
|
|
|
.map(|s| quote_action_value(s))
|
|
|
|
|
.collect::<Vec<_>>()
|
|
|
|
|
.join(" value=");
|
2025-07-24 00:28:33 +02:00
|
|
|
summary_actions.push(format!("set name={} value={}", attr.key, values_str));
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-24 00:28:33 +02:00
|
|
|
// Add to summary entries if there are summary actions
|
|
|
|
|
if !summary_actions.is_empty() {
|
2025-07-26 10:34:45 +02:00
|
|
|
summary_entries.push((
|
|
|
|
|
fmri.clone(),
|
|
|
|
|
Some(summary_actions.clone()),
|
|
|
|
|
signature.clone(),
|
|
|
|
|
));
|
2025-07-24 00:28:33 +02:00
|
|
|
}
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-24 00:28:33 +02:00
|
|
|
// Prepare update entry if needed
|
|
|
|
|
if create_update_log {
|
2025-12-22 22:42:56 +01:00
|
|
|
let mut catalog_parts = BTreeMap::new();
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-24 00:28:33 +02:00
|
|
|
// Add dependency actions to update entry
|
|
|
|
|
if !dependency_actions.is_empty() {
|
2025-12-22 22:42:56 +01:00
|
|
|
let mut actions = BTreeMap::new();
|
2025-07-24 00:28:33 +02:00
|
|
|
actions.insert("actions".to_string(), dependency_actions);
|
|
|
|
|
catalog_parts.insert("catalog.dependency.C".to_string(), actions);
|
|
|
|
|
}
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-24 00:28:33 +02:00
|
|
|
// Add summary actions to update entry
|
|
|
|
|
if !summary_actions.is_empty() {
|
2025-12-22 22:42:56 +01:00
|
|
|
let mut actions = BTreeMap::new();
|
2025-07-24 00:28:33 +02:00
|
|
|
actions.insert("actions".to_string(), summary_actions);
|
|
|
|
|
catalog_parts.insert("catalog.summary.C".to_string(), actions);
|
|
|
|
|
}
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-24 00:28:33 +02:00
|
|
|
// Add to update entries
|
|
|
|
|
update_entries.push((fmri.clone(), catalog_parts, signature));
|
|
|
|
|
}
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-24 00:28:33 +02:00
|
|
|
// Update counts
|
|
|
|
|
package_count += 1;
|
|
|
|
|
package_version_count += 1;
|
2025-12-09 14:23:55 +01:00
|
|
|
|
|
|
|
|
processed_in_batch += 1;
|
|
|
|
|
if processed_in_batch >= opts.batch_size {
|
|
|
|
|
batch_no += 1;
|
2025-12-22 20:10:17 +01:00
|
|
|
tracing::debug!(
|
|
|
|
|
publisher,
|
|
|
|
|
batch_no,
|
|
|
|
|
processed_in_batch,
|
|
|
|
|
"catalog rebuild batch processed"
|
|
|
|
|
);
|
2025-12-09 14:23:55 +01:00
|
|
|
processed_in_batch = 0;
|
|
|
|
|
}
|
2025-07-24 00:28:33 +02:00
|
|
|
}
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-27 13:43:56 +02:00
|
|
|
// Create and save catalog parts
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-27 13:43:56 +02:00
|
|
|
// Create a catalog.attrs file
|
|
|
|
|
let now = SystemTime::now();
|
2025-12-22 22:42:56 +01:00
|
|
|
let timestamp = crate::repository::catalog::format_iso8601_basic(&now);
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-27 13:43:56 +02:00
|
|
|
// Get the CatalogAttrs struct definition to see what fields it has
|
|
|
|
|
let mut attrs = crate::repository::catalog::CatalogAttrs {
|
|
|
|
|
created: timestamp.clone(),
|
|
|
|
|
last_modified: timestamp.clone(),
|
|
|
|
|
package_count,
|
|
|
|
|
package_version_count,
|
2025-12-22 22:42:56 +01:00
|
|
|
parts: BTreeMap::new(),
|
2025-07-27 13:43:56 +02:00
|
|
|
version: 1, // CatalogVersion::V1 is 1
|
|
|
|
|
signature: None,
|
2025-12-22 22:42:56 +01:00
|
|
|
updates: BTreeMap::new(),
|
2025-07-27 13:43:56 +02:00
|
|
|
};
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-27 13:43:56 +02:00
|
|
|
// Add part information
|
|
|
|
|
let base_part_name = "catalog.base.C";
|
|
|
|
|
attrs.parts.insert(
|
|
|
|
|
base_part_name.to_string(),
|
|
|
|
|
crate::repository::catalog::CatalogPartInfo {
|
|
|
|
|
last_modified: timestamp.clone(),
|
|
|
|
|
signature_sha1: None,
|
|
|
|
|
},
|
|
|
|
|
);
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-27 13:43:56 +02:00
|
|
|
let dependency_part_name = "catalog.dependency.C";
|
|
|
|
|
attrs.parts.insert(
|
|
|
|
|
dependency_part_name.to_string(),
|
|
|
|
|
crate::repository::catalog::CatalogPartInfo {
|
|
|
|
|
last_modified: timestamp.clone(),
|
|
|
|
|
signature_sha1: None,
|
|
|
|
|
},
|
|
|
|
|
);
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-27 13:43:56 +02:00
|
|
|
let summary_part_name = "catalog.summary.C";
|
|
|
|
|
attrs.parts.insert(
|
|
|
|
|
summary_part_name.to_string(),
|
|
|
|
|
crate::repository::catalog::CatalogPartInfo {
|
|
|
|
|
last_modified: timestamp.clone(),
|
|
|
|
|
signature_sha1: None,
|
|
|
|
|
},
|
|
|
|
|
);
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-27 13:43:56 +02:00
|
|
|
// Create and save catalog parts
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-27 13:43:56 +02:00
|
|
|
// Base part
|
|
|
|
|
let base_part_path = catalog_dir.join(base_part_name);
|
|
|
|
|
debug!("Writing base part to: {}", base_part_path.display());
|
|
|
|
|
let mut base_part = crate::repository::catalog::CatalogPart::new();
|
2025-07-24 00:28:33 +02:00
|
|
|
for (fmri, actions, signature) in base_entries {
|
|
|
|
|
base_part.add_package(publisher, &fmri, actions, Some(signature));
|
|
|
|
|
}
|
2025-12-09 12:49:25 +01:00
|
|
|
let base_sig = catalog_writer::write_catalog_part(&base_part_path, &mut base_part)?;
|
2025-07-27 13:43:56 +02:00
|
|
|
debug!("Wrote base part file");
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-27 13:43:56 +02:00
|
|
|
// Dependency part
|
|
|
|
|
let dependency_part_path = catalog_dir.join(dependency_part_name);
|
2025-07-27 15:22:49 +02:00
|
|
|
debug!(
|
|
|
|
|
"Writing dependency part to: {}",
|
|
|
|
|
dependency_part_path.display()
|
|
|
|
|
);
|
2025-07-27 13:43:56 +02:00
|
|
|
let mut dependency_part = crate::repository::catalog::CatalogPart::new();
|
2025-12-23 13:19:51 +01:00
|
|
|
for (fmri, actions, _signature) in dependency_entries {
|
|
|
|
|
dependency_part.add_package(publisher, &fmri, actions, None);
|
2025-07-24 00:28:33 +02:00
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
let dependency_sig =
|
|
|
|
|
catalog_writer::write_catalog_part(&dependency_part_path, &mut dependency_part)?;
|
2025-07-27 13:43:56 +02:00
|
|
|
debug!("Wrote dependency part file");
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-27 13:43:56 +02:00
|
|
|
// Summary part
|
|
|
|
|
let summary_part_path = catalog_dir.join(summary_part_name);
|
|
|
|
|
debug!("Writing summary part to: {}", summary_part_path.display());
|
|
|
|
|
let mut summary_part = crate::repository::catalog::CatalogPart::new();
|
2025-12-23 13:19:51 +01:00
|
|
|
for (fmri, actions, _signature) in summary_entries {
|
|
|
|
|
summary_part.add_package(publisher, &fmri, actions, None);
|
2025-07-24 00:28:33 +02:00
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
let summary_sig =
|
|
|
|
|
catalog_writer::write_catalog_part(&summary_part_path, &mut summary_part)?;
|
2025-07-27 13:43:56 +02:00
|
|
|
debug!("Wrote summary part file");
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-12-09 12:49:25 +01:00
|
|
|
// Update part signatures in attrs (written after parts)
|
|
|
|
|
if let Some(info) = attrs.parts.get_mut(base_part_name) {
|
|
|
|
|
info.signature_sha1 = Some(base_sig);
|
|
|
|
|
}
|
|
|
|
|
if let Some(info) = attrs.parts.get_mut(dependency_part_name) {
|
|
|
|
|
info.signature_sha1 = Some(dependency_sig);
|
|
|
|
|
}
|
|
|
|
|
if let Some(info) = attrs.parts.get_mut(summary_part_name) {
|
|
|
|
|
info.signature_sha1 = Some(summary_sig);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Save the catalog.attrs file (after parts so signatures are present)
|
|
|
|
|
let attrs_path = catalog_dir.join("catalog.attrs");
|
|
|
|
|
debug!("Writing catalog.attrs to: {}", attrs_path.display());
|
|
|
|
|
let _attrs_sig = catalog_writer::write_catalog_attrs(&attrs_path, &mut attrs)?;
|
|
|
|
|
debug!("Wrote catalog.attrs file");
|
|
|
|
|
|
2025-07-27 13:43:56 +02:00
|
|
|
// Create and save the update log if needed
|
2025-07-24 00:28:33 +02:00
|
|
|
if create_update_log {
|
2025-07-27 13:43:56 +02:00
|
|
|
debug!("Creating update log");
|
2025-07-24 00:28:33 +02:00
|
|
|
let update_log_name = format!("update.{}Z.C", timestamp.split('.').next().unwrap());
|
2025-07-27 13:43:56 +02:00
|
|
|
let update_log_path = catalog_dir.join(&update_log_name);
|
|
|
|
|
debug!("Update log path: {}", update_log_path.display());
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-27 13:43:56 +02:00
|
|
|
let mut update_log = crate::repository::catalog::UpdateLog::new();
|
|
|
|
|
debug!("Adding {} updates to the log", update_entries.len());
|
2025-07-24 00:28:33 +02:00
|
|
|
for (fmri, catalog_parts, signature) in update_entries {
|
|
|
|
|
update_log.add_update(
|
|
|
|
|
publisher,
|
|
|
|
|
&fmri,
|
|
|
|
|
crate::repository::catalog::CatalogOperationType::Add,
|
|
|
|
|
catalog_parts,
|
|
|
|
|
Some(signature),
|
|
|
|
|
);
|
|
|
|
|
}
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2026-01-18 12:51:55 +01:00
|
|
|
let update_log_sig =
|
|
|
|
|
catalog_writer::write_update_log(&update_log_path, &mut update_log)?;
|
2025-07-27 13:43:56 +02:00
|
|
|
debug!("Wrote update log file");
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-27 13:43:56 +02:00
|
|
|
// Add an update log to catalog.attrs
|
|
|
|
|
debug!("Adding update log to catalog.attrs");
|
|
|
|
|
attrs.updates.insert(
|
|
|
|
|
update_log_name.clone(),
|
|
|
|
|
crate::repository::catalog::UpdateLogInfo {
|
|
|
|
|
last_modified: timestamp.clone(),
|
2025-12-23 12:33:12 +01:00
|
|
|
signature_sha1: Some(update_log_sig),
|
2025-07-27 13:43:56 +02:00
|
|
|
},
|
|
|
|
|
);
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-27 13:43:56 +02:00
|
|
|
// Update the catalog.attrs file with the new update log
|
|
|
|
|
debug!("Updating catalog.attrs file with new update log");
|
2025-12-09 12:49:25 +01:00
|
|
|
let _ = catalog_writer::write_catalog_attrs(&attrs_path, &mut attrs)?;
|
2025-07-27 13:43:56 +02:00
|
|
|
debug!("Updated catalog.attrs file");
|
2025-07-24 00:28:33 +02:00
|
|
|
}
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-27 13:43:56 +02:00
|
|
|
info!("Catalog rebuilt for publisher: {}", publisher);
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
2025-12-09 12:12:57 +01:00
|
|
|
|
|
|
|
|
/// Save an update log file to the publisher's catalog directory.
|
|
|
|
|
///
|
|
|
|
|
/// The file name must follow the legacy pattern: `update.<logdate>.<locale>`
|
|
|
|
|
/// for example: `update.20090524T042841Z.C`.
|
|
|
|
|
pub fn save_update_log(
|
|
|
|
|
&self,
|
|
|
|
|
publisher: &str,
|
|
|
|
|
log_filename: &str,
|
|
|
|
|
log: &crate::repository::catalog::UpdateLog,
|
|
|
|
|
) -> Result<()> {
|
|
|
|
|
if log_filename.contains('/') || log_filename.contains('\\') {
|
|
|
|
|
return Err(RepositoryError::PathPrefixError(log_filename.to_string()));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Ensure catalog dir exists
|
|
|
|
|
let catalog_dir = Self::construct_catalog_path(&self.path, publisher);
|
2025-12-22 20:10:17 +01:00
|
|
|
std::fs::create_dir_all(&catalog_dir).map_err(|e| {
|
|
|
|
|
RepositoryError::DirectoryCreateError {
|
|
|
|
|
path: catalog_dir.clone(),
|
|
|
|
|
source: e,
|
|
|
|
|
}
|
|
|
|
|
})?;
|
2025-12-09 12:12:57 +01:00
|
|
|
|
|
|
|
|
// Serialize JSON
|
2025-12-22 20:10:17 +01:00
|
|
|
let json = serde_json::to_vec_pretty(log).map_err(|e| {
|
|
|
|
|
RepositoryError::JsonSerializeError(format!("Update log serialize error: {}", e))
|
|
|
|
|
})?;
|
2025-12-09 12:12:57 +01:00
|
|
|
|
|
|
|
|
// Write atomically
|
|
|
|
|
let target = catalog_dir.join(log_filename);
|
|
|
|
|
let tmp = target.with_extension("tmp");
|
|
|
|
|
{
|
2025-12-22 20:10:17 +01:00
|
|
|
let mut f =
|
|
|
|
|
std::fs::File::create(&tmp).map_err(|e| RepositoryError::FileWriteError {
|
|
|
|
|
path: tmp.clone(),
|
|
|
|
|
source: e,
|
|
|
|
|
})?;
|
2025-12-09 12:12:57 +01:00
|
|
|
use std::io::Write as _;
|
|
|
|
|
f.write_all(&json)
|
2025-12-22 20:10:17 +01:00
|
|
|
.map_err(|e| RepositoryError::FileWriteError {
|
|
|
|
|
path: tmp.clone(),
|
|
|
|
|
source: e,
|
|
|
|
|
})?;
|
|
|
|
|
f.flush().map_err(|e| RepositoryError::FileWriteError {
|
|
|
|
|
path: tmp.clone(),
|
|
|
|
|
source: e,
|
|
|
|
|
})?;
|
2025-12-09 12:12:57 +01:00
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
std::fs::rename(&tmp, &target).map_err(|e| RepositoryError::FileWriteError {
|
|
|
|
|
path: target.clone(),
|
|
|
|
|
source: e,
|
|
|
|
|
})?;
|
2025-12-09 12:12:57 +01:00
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
2025-07-31 00:18:21 +02:00
|
|
|
/// Generate the file path for a given hash using the new directory structure with publisher
|
|
|
|
|
/// This is a wrapper around the construct_file_path_with_publisher helper method
|
|
|
|
|
fn generate_file_path_with_publisher(&self, publisher: &str, hash: &str) -> PathBuf {
|
|
|
|
|
Self::construct_file_path_with_publisher(&self.path, publisher, hash)
|
2025-07-27 13:43:56 +02:00
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-27 13:43:56 +02:00
|
|
|
/// Get or initialize the catalog manager
|
2025-07-27 15:22:49 +02:00
|
|
|
///
|
2025-07-27 13:43:56 +02:00
|
|
|
/// This method returns a mutable reference to the catalog manager.
|
2026-02-05 15:57:56 +01:00
|
|
|
/// It uses interior mutability with Mutex to allow mutation through an immutable reference.
|
2025-12-22 20:10:17 +01:00
|
|
|
///
|
2025-07-31 00:18:21 +02:00
|
|
|
/// The catalog manager is specific to the given publisher.
|
2025-07-27 13:43:56 +02:00
|
|
|
pub fn get_catalog_manager(
|
|
|
|
|
&mut self,
|
2025-07-31 00:18:21 +02:00
|
|
|
publisher: &str,
|
2026-02-05 15:57:56 +01:00
|
|
|
) -> Result<std::sync::MutexGuard<'_, crate::repository::catalog::CatalogManager>> {
|
2025-07-27 13:43:56 +02:00
|
|
|
if self.catalog_manager.is_none() {
|
2025-07-31 00:18:21 +02:00
|
|
|
let publisher_dir = self.path.join("publisher");
|
2025-12-22 20:10:17 +01:00
|
|
|
let manager =
|
|
|
|
|
crate::repository::catalog::CatalogManager::new(&publisher_dir, publisher)?;
|
2026-02-05 15:57:56 +01:00
|
|
|
let mutex = Mutex::new(manager);
|
|
|
|
|
self.catalog_manager = Some(mutex);
|
2025-07-27 13:43:56 +02:00
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2026-02-05 15:57:56 +01:00
|
|
|
self.catalog_manager
|
|
|
|
|
.as_ref()
|
|
|
|
|
.ok_or_else(|| RepositoryError::Other("Catalog manager not initialized".to_string()))?
|
|
|
|
|
.lock()
|
|
|
|
|
.map_err(|e| RepositoryError::Other(format!("Failed to lock catalog manager: {}", e)))
|
2025-07-27 13:43:56 +02:00
|
|
|
}
|
2025-12-22 20:10:17 +01:00
|
|
|
|
Introduce obsoleted package management system in IPS
- Add `obsoleted.rs` module to handle storing, metadata management, and operations for obsoleted packages.
- Implement commands for marking, listing, searching, restoring, exporting, and importing obsoleted packages (`pkg6repo`).
- Enhance `RepositoryError` with `From` implementations for various error types to manage database and serialization-related errors.
- Introduce reusable data structures for obsoleted package metadata and export representation.
- Update `Cargo.toml` and `Cargo.lock` to include new dependencies (`redb`, `bincode`, etc.).
- Document obsoleted package workflow and integration details in `doc/obsoleted_packages.md` for contributors.
- Refactor repository internals to integrate obsoleted package support without disrupting existing workflow.
- Add robust error handling, logging, and pagination for enhanced usability and scalability.
2025-07-29 16:16:12 +02:00
|
|
|
/// Get or initialize the obsoleted package manager
|
|
|
|
|
///
|
|
|
|
|
/// This method returns a mutable reference to the obsoleted package manager.
|
2026-02-05 15:57:56 +01:00
|
|
|
/// It uses interior mutability with Mutex to allow mutation through an immutable reference.
|
Introduce obsoleted package management system in IPS
- Add `obsoleted.rs` module to handle storing, metadata management, and operations for obsoleted packages.
- Implement commands for marking, listing, searching, restoring, exporting, and importing obsoleted packages (`pkg6repo`).
- Enhance `RepositoryError` with `From` implementations for various error types to manage database and serialization-related errors.
- Introduce reusable data structures for obsoleted package metadata and export representation.
- Update `Cargo.toml` and `Cargo.lock` to include new dependencies (`redb`, `bincode`, etc.).
- Document obsoleted package workflow and integration details in `doc/obsoleted_packages.md` for contributors.
- Refactor repository internals to integrate obsoleted package support without disrupting existing workflow.
- Add robust error handling, logging, and pagination for enhanced usability and scalability.
2025-07-29 16:16:12 +02:00
|
|
|
pub fn get_obsoleted_manager(
|
|
|
|
|
&mut self,
|
2026-02-05 15:57:56 +01:00
|
|
|
) -> Result<std::sync::MutexGuard<'_, crate::repository::obsoleted::ObsoletedPackageManager>> {
|
Introduce obsoleted package management system in IPS
- Add `obsoleted.rs` module to handle storing, metadata management, and operations for obsoleted packages.
- Implement commands for marking, listing, searching, restoring, exporting, and importing obsoleted packages (`pkg6repo`).
- Enhance `RepositoryError` with `From` implementations for various error types to manage database and serialization-related errors.
- Introduce reusable data structures for obsoleted package metadata and export representation.
- Update `Cargo.toml` and `Cargo.lock` to include new dependencies (`redb`, `bincode`, etc.).
- Document obsoleted package workflow and integration details in `doc/obsoleted_packages.md` for contributors.
- Refactor repository internals to integrate obsoleted package support without disrupting existing workflow.
- Add robust error handling, logging, and pagination for enhanced usability and scalability.
2025-07-29 16:16:12 +02:00
|
|
|
if self.obsoleted_manager.is_none() {
|
|
|
|
|
let manager = crate::repository::obsoleted::ObsoletedPackageManager::new(&self.path);
|
2026-02-05 15:57:56 +01:00
|
|
|
let mutex = Mutex::new(manager);
|
|
|
|
|
self.obsoleted_manager = Some(mutex);
|
Introduce obsoleted package management system in IPS
- Add `obsoleted.rs` module to handle storing, metadata management, and operations for obsoleted packages.
- Implement commands for marking, listing, searching, restoring, exporting, and importing obsoleted packages (`pkg6repo`).
- Enhance `RepositoryError` with `From` implementations for various error types to manage database and serialization-related errors.
- Introduce reusable data structures for obsoleted package metadata and export representation.
- Update `Cargo.toml` and `Cargo.lock` to include new dependencies (`redb`, `bincode`, etc.).
- Document obsoleted package workflow and integration details in `doc/obsoleted_packages.md` for contributors.
- Refactor repository internals to integrate obsoleted package support without disrupting existing workflow.
- Add robust error handling, logging, and pagination for enhanced usability and scalability.
2025-07-29 16:16:12 +02:00
|
|
|
}
|
|
|
|
|
|
2026-02-05 15:57:56 +01:00
|
|
|
self.obsoleted_manager
|
|
|
|
|
.as_ref()
|
|
|
|
|
.ok_or_else(|| RepositoryError::Other("Obsoleted manager not initialized".to_string()))?
|
|
|
|
|
.lock()
|
|
|
|
|
.map_err(|e| RepositoryError::Other(format!("Failed to lock obsoleted manager: {}", e)))
|
Introduce obsoleted package management system in IPS
- Add `obsoleted.rs` module to handle storing, metadata management, and operations for obsoleted packages.
- Implement commands for marking, listing, searching, restoring, exporting, and importing obsoleted packages (`pkg6repo`).
- Enhance `RepositoryError` with `From` implementations for various error types to manage database and serialization-related errors.
- Introduce reusable data structures for obsoleted package metadata and export representation.
- Update `Cargo.toml` and `Cargo.lock` to include new dependencies (`redb`, `bincode`, etc.).
- Document obsoleted package workflow and integration details in `doc/obsoleted_packages.md` for contributors.
- Refactor repository internals to integrate obsoleted package support without disrupting existing workflow.
- Add robust error handling, logging, and pagination for enhanced usability and scalability.
2025-07-29 16:16:12 +02:00
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-27 13:43:56 +02:00
|
|
|
/// URL encode a string for use in a filename
|
2026-04-03 13:21:36 +02:00
|
|
|
pub(crate) fn url_encode(s: &str) -> String {
|
2025-07-27 13:43:56 +02:00
|
|
|
let mut result = String::new();
|
|
|
|
|
for c in s.chars() {
|
|
|
|
|
match c {
|
|
|
|
|
'a'..='z' | 'A'..='Z' | '0'..='9' | '-' | '_' | '.' | '~' => result.push(c),
|
|
|
|
|
' ' => result.push('+'),
|
|
|
|
|
_ => {
|
|
|
|
|
result.push('%');
|
|
|
|
|
result.push_str(&format!("{:02X}", c as u8));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
result
|
2025-07-24 00:28:33 +02:00
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2026-03-14 22:01:48 +01:00
|
|
|
/// Search for packages with detailed results using SQLite FTS5.
|
2026-01-18 12:29:44 +01:00
|
|
|
pub fn search_detailed(
|
|
|
|
|
&self,
|
|
|
|
|
query: &str,
|
|
|
|
|
publisher: Option<&str>,
|
|
|
|
|
limit: Option<usize>,
|
2026-03-14 22:01:48 +01:00
|
|
|
_case_sensitive: bool,
|
2026-01-18 12:29:44 +01:00
|
|
|
) -> Result<Vec<IndexEntry>> {
|
|
|
|
|
debug!("Searching (detailed) for packages with query: {}", query);
|
|
|
|
|
|
|
|
|
|
// If no publisher is specified, use the default publisher if available
|
|
|
|
|
let publisher = publisher.or_else(|| self.config.default_publisher.as_deref());
|
|
|
|
|
|
|
|
|
|
// If still no publisher, we need to search all publishers
|
|
|
|
|
let publishers = if let Some(pub_name) = publisher {
|
|
|
|
|
vec![pub_name.to_string()]
|
|
|
|
|
} else {
|
|
|
|
|
self.config.publishers.clone()
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let mut results = Vec::new();
|
|
|
|
|
|
2026-03-14 22:01:48 +01:00
|
|
|
for pub_name in &publishers {
|
|
|
|
|
let fts_path = self.shard_dir(pub_name).join("fts.db");
|
|
|
|
|
let active_path = self.shard_dir(pub_name).join("active.db");
|
|
|
|
|
|
|
|
|
|
if fts_path.exists() {
|
|
|
|
|
// Use FTS5 search
|
|
|
|
|
let fts_results = crate::repository::sqlite_catalog::search_fts(
|
|
|
|
|
&fts_path,
|
|
|
|
|
query,
|
|
|
|
|
Some(pub_name),
|
|
|
|
|
limit,
|
|
|
|
|
)
|
|
|
|
|
.map_err(|e| {
|
|
|
|
|
RepositoryError::Other(format!("FTS search error: {}", e.message))
|
|
|
|
|
})?;
|
|
|
|
|
|
|
|
|
|
if fts_results.is_empty() {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Resolve full FMRIs from active.db
|
|
|
|
|
let stems: Vec<(String, String)> = fts_results
|
|
|
|
|
.iter()
|
|
|
|
|
.map(|r| (r.stem.clone(), r.publisher.clone()))
|
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
|
|
let fmri_map = if active_path.exists() {
|
|
|
|
|
crate::repository::sqlite_catalog::resolve_latest_fmris(&active_path, &stems)
|
|
|
|
|
.unwrap_or_default()
|
|
|
|
|
} else {
|
|
|
|
|
std::collections::HashMap::new()
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
for fts_result in fts_results {
|
|
|
|
|
let key = (fts_result.stem.clone(), fts_result.publisher.clone());
|
|
|
|
|
let fmri = fmri_map
|
|
|
|
|
.get(&key)
|
|
|
|
|
.cloned()
|
|
|
|
|
.unwrap_or_else(|| {
|
|
|
|
|
format!("pkg://{}/{}", fts_result.publisher, fts_result.stem)
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
results.push(IndexEntry {
|
|
|
|
|
fmri,
|
|
|
|
|
action_type: "set".to_string(),
|
|
|
|
|
index_type: "name".to_string(),
|
|
|
|
|
value: fts_result.stem.clone(),
|
|
|
|
|
token: fts_result.stem,
|
|
|
|
|
attributes: BTreeMap::new(),
|
|
|
|
|
});
|
|
|
|
|
}
|
2026-01-18 12:29:44 +01:00
|
|
|
} else {
|
2026-01-18 12:51:55 +01:00
|
|
|
debug!(
|
2026-03-14 22:01:48 +01:00
|
|
|
"No fts.db found for publisher: {}, falling back to simple listing",
|
2026-01-18 12:51:55 +01:00
|
|
|
pub_name
|
|
|
|
|
);
|
2026-03-14 22:01:48 +01:00
|
|
|
// Fallback: list packages and filter by stem
|
|
|
|
|
let all_packages = self.list_packages(Some(pub_name), None)?;
|
|
|
|
|
let query_lower = query.to_lowercase();
|
2026-01-18 12:51:55 +01:00
|
|
|
let matching_packages: Vec<IndexEntry> = all_packages
|
2026-01-18 12:29:44 +01:00
|
|
|
.into_iter()
|
2026-03-14 22:01:48 +01:00
|
|
|
.filter(|pkg| pkg.fmri.stem().to_lowercase().contains(&query_lower))
|
2026-01-18 12:29:44 +01:00
|
|
|
.map(|pkg| {
|
|
|
|
|
let fmri = pkg.fmri.to_string();
|
|
|
|
|
let stem = pkg.fmri.stem().to_string();
|
|
|
|
|
IndexEntry {
|
|
|
|
|
fmri,
|
2026-03-14 22:01:48 +01:00
|
|
|
action_type: "set".to_string(),
|
2026-01-18 12:29:44 +01:00
|
|
|
index_type: "name".to_string(),
|
|
|
|
|
value: stem.clone(),
|
|
|
|
|
token: stem,
|
|
|
|
|
attributes: BTreeMap::new(),
|
|
|
|
|
}
|
|
|
|
|
})
|
|
|
|
|
.collect();
|
|
|
|
|
results.extend(matching_packages);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Apply limit if specified
|
|
|
|
|
if let Some(max_results) = limit {
|
|
|
|
|
results.truncate(max_results);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Ok(results)
|
|
|
|
|
}
|
|
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
#[cfg(test)]
|
|
|
|
|
pub fn test_publish_files(&mut self, test_dir: &Path) -> Result<()> {
|
2025-07-26 21:20:50 +02:00
|
|
|
debug!("Testing file publishing...");
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Create a test publisher
|
2025-07-26 21:20:50 +02:00
|
|
|
let publisher = "test";
|
|
|
|
|
self.add_publisher(publisher)?;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Create a nested directory structure
|
|
|
|
|
let nested_dir = test_dir.join("nested").join("dir");
|
|
|
|
|
fs::create_dir_all(&nested_dir)?;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Create a test file in the nested directory
|
|
|
|
|
let test_file_path = nested_dir.join("test_file.txt");
|
|
|
|
|
fs::write(&test_file_path, "This is a test file")?;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Begin a transaction
|
|
|
|
|
let mut transaction = self.begin_transaction()?;
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-26 21:20:50 +02:00
|
|
|
// Set the publisher for the transaction
|
|
|
|
|
transaction.set_publisher(publisher);
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Create a FileAction from the test file path
|
|
|
|
|
let mut file_action = FileAction::read_from_path(&test_file_path)?;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Calculate the relative path from the test file path to the base directory
|
2025-07-26 10:34:45 +02:00
|
|
|
let relative_path = test_file_path
|
|
|
|
|
.strip_prefix(test_dir)?
|
|
|
|
|
.to_string_lossy()
|
|
|
|
|
.to_string();
|
|
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Set the relative path in the FileAction
|
|
|
|
|
file_action.path = relative_path;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Add the test file to the transaction
|
|
|
|
|
transaction.add_file(file_action, &test_file_path)?;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Verify that the path in the FileAction is the relative path
|
|
|
|
|
// The path should be "nested/dir/test_file.txt", not the full path
|
|
|
|
|
let expected_path = "nested/dir/test_file.txt";
|
|
|
|
|
let actual_path = &transaction.manifest.files[0].path;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
if actual_path != expected_path {
|
2025-07-27 15:22:49 +02:00
|
|
|
return Err(RepositoryError::Other(format!(
|
|
|
|
|
"Path in FileAction is incorrect. Expected: {}, Actual: {}",
|
|
|
|
|
expected_path, actual_path
|
|
|
|
|
)));
|
2025-07-21 22:02:05 +02:00
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Commit the transaction
|
|
|
|
|
transaction.commit()?;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Verify the file was stored
|
|
|
|
|
let hash = Transaction::calculate_file_hash(&test_file_path)?;
|
2025-07-31 00:18:21 +02:00
|
|
|
// Use the new method with publisher
|
|
|
|
|
let stored_file_path = self.generate_file_path_with_publisher(publisher, &hash);
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
if !stored_file_path.exists() {
|
2025-07-27 15:22:49 +02:00
|
|
|
return Err(RepositoryError::Other(
|
|
|
|
|
"File was not stored correctly".to_string(),
|
|
|
|
|
));
|
2025-07-21 22:02:05 +02:00
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-26 21:20:50 +02:00
|
|
|
// Verify the manifest was updated in the publisher-specific directory
|
|
|
|
|
// The manifest should be named "unknown.manifest" since we didn't set a package name
|
2025-07-31 00:18:21 +02:00
|
|
|
// Use the construct_package_dir helper to get the base directory, then join with the manifest name
|
|
|
|
|
let pkg_dir = Self::construct_package_dir(&self.path, publisher, "unknown");
|
|
|
|
|
let manifest_path = pkg_dir.join("manifest");
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
if !manifest_path.exists() {
|
2025-07-26 21:20:50 +02:00
|
|
|
return Err(RepositoryError::Other(format!(
|
|
|
|
|
"Manifest was not created at the expected location: {}",
|
|
|
|
|
manifest_path.display()
|
|
|
|
|
)));
|
2025-07-21 22:02:05 +02:00
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-26 21:20:50 +02:00
|
|
|
// Regenerate catalog and search index
|
|
|
|
|
self.rebuild(Some(publisher), false, false)?;
|
|
|
|
|
|
|
|
|
|
debug!("File publishing test passed!");
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
Ok(())
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
/// Begin a new transaction for publishing
|
|
|
|
|
pub fn begin_transaction(&self) -> Result<Transaction> {
|
|
|
|
|
Transaction::new(self.path.clone())
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
/// Publish files from a prototype directory
|
2025-07-27 13:15:52 +02:00
|
|
|
pub fn publish_files<P: AsRef<Path>>(&mut self, proto_dir: P, publisher: &str) -> Result<()> {
|
2025-07-21 22:02:05 +02:00
|
|
|
let proto_dir = proto_dir.as_ref();
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Check if the prototype directory exists
|
|
|
|
|
if !proto_dir.exists() {
|
2025-07-26 15:33:39 +02:00
|
|
|
return Err(RepositoryError::NotFound(format!(
|
2025-07-26 10:34:45 +02:00
|
|
|
"Prototype directory does not exist: {}",
|
|
|
|
|
proto_dir.display()
|
2025-07-26 15:33:39 +02:00
|
|
|
)));
|
2025-07-21 22:02:05 +02:00
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Check if the publisher exists
|
|
|
|
|
if !self.config.publishers.contains(&publisher.to_string()) {
|
2025-07-26 15:33:39 +02:00
|
|
|
return Err(RepositoryError::PublisherNotFound(publisher.to_string()));
|
2025-07-21 22:02:05 +02:00
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Begin a transaction
|
|
|
|
|
let mut transaction = self.begin_transaction()?;
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-26 21:20:50 +02:00
|
|
|
// Set the publisher for the transaction
|
|
|
|
|
transaction.set_publisher(publisher);
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Walk the prototype directory and add files to the transaction
|
|
|
|
|
self.add_files_to_transaction(&mut transaction, proto_dir, proto_dir)?;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Commit the transaction
|
|
|
|
|
transaction.commit()?;
|
2025-07-27 15:22:49 +02:00
|
|
|
|
2025-07-26 21:20:50 +02:00
|
|
|
// Regenerate catalog and search index
|
|
|
|
|
self.rebuild(Some(publisher), false, false)?;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
Ok(())
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
/// Add files from a directory to a transaction
|
2025-07-26 10:34:45 +02:00
|
|
|
fn add_files_to_transaction(
|
|
|
|
|
&self,
|
|
|
|
|
transaction: &mut Transaction,
|
|
|
|
|
base_dir: &Path,
|
|
|
|
|
dir: &Path,
|
|
|
|
|
) -> Result<()> {
|
2025-07-21 22:02:05 +02:00
|
|
|
// Read the directory entries
|
|
|
|
|
for entry in fs::read_dir(dir)? {
|
|
|
|
|
let entry = entry?;
|
|
|
|
|
let path = entry.path();
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
if path.is_dir() {
|
|
|
|
|
// Recursively add files from subdirectories
|
|
|
|
|
self.add_files_to_transaction(transaction, base_dir, &path)?;
|
|
|
|
|
} else {
|
|
|
|
|
// Create a FileAction from the file path
|
|
|
|
|
let mut file_action = FileAction::read_from_path(&path)?;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Calculate the relative path from the file path to the base directory
|
|
|
|
|
let relative_path = path.strip_prefix(base_dir)?.to_string_lossy().to_string();
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Set the relative path in the FileAction
|
|
|
|
|
file_action.path = relative_path;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Add the file to the transaction
|
|
|
|
|
transaction.add_file(file_action, &path)?;
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
Ok(())
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
/// Store a file in the repository
|
2025-07-31 00:18:21 +02:00
|
|
|
pub fn store_file<P: AsRef<Path>>(&self, file_path: P, publisher: &str) -> Result<String> {
|
2025-07-21 22:02:05 +02:00
|
|
|
let file_path = file_path.as_ref();
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Calculate the SHA256 hash of the file
|
|
|
|
|
let hash = Transaction::calculate_file_hash(file_path)?;
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-31 00:18:21 +02:00
|
|
|
// Create the destination path using the new directory structure with publisher
|
|
|
|
|
let dest_path = self.generate_file_path_with_publisher(publisher, &hash);
|
2025-07-27 13:15:52 +02:00
|
|
|
|
|
|
|
|
// Create parent directories if they don't exist
|
|
|
|
|
if let Some(parent) = dest_path.parent() {
|
|
|
|
|
fs::create_dir_all(parent)?;
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
// Copy the file if it doesn't already exist
|
|
|
|
|
if !dest_path.exists() {
|
|
|
|
|
fs::copy(file_path, &dest_path)?;
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
|
2025-07-21 22:02:05 +02:00
|
|
|
Ok(hash)
|
|
|
|
|
}
|
2025-07-26 10:34:45 +02:00
|
|
|
}
|