chore(fmt): format with cargo fmt

This commit is contained in:
Till Wegmueller 2025-07-26 12:54:01 +02:00
parent f5b80a7d12
commit a33a3246b6
No known key found for this signature in database
17 changed files with 1012 additions and 644 deletions

View file

@ -2,94 +2,110 @@ use diff::Diff;
use libips::actions::File;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use libips::payload::Payload;
#[derive(Serialize, Deserialize, Debug, Clone, Diff)]
#[diff(attr(
#[derive(Debug, PartialEq)]
))]
struct Manifest {
files: HashMap<String, File>
files: HashMap<String, File>,
}
fn main() {
let base = Manifest{files: HashMap::from([
("0dh5".to_string(), File{
payload: None,
path: "var/file".to_string(),
group: "bin".to_string(),
owner: "root".to_string(),
mode: "0755".to_string(),
preserve: false,
overlay: false,
original_name: "".to_string(),
revert_tag: "".to_string(),
sys_attr: "".to_string(),
properties: vec![],
facets: Default::default(),
}),
("12ds3".to_string(), File{
payload: None,
path: "var/file1".to_string(),
group: "bin".to_string(),
owner: "root".to_string(),
mode: "0755".to_string(),
preserve: false,
overlay: false,
original_name: "".to_string(),
revert_tag: "".to_string(),
sys_attr: "".to_string(),
properties: vec![],
facets: Default::default(),
}),
("654".to_string(), File{
payload: None,
path: "var/file1".to_string(),
group: "bin".to_string(),
owner: "root".to_string(),
mode: "0755".to_string(),
preserve: false,
overlay: false,
original_name: "".to_string(),
revert_tag: "".to_string(),
sys_attr: "".to_string(),
properties: vec![],
facets: Default::default(),
})
])};
let base = Manifest {
files: HashMap::from([
(
"0dh5".to_string(),
File {
payload: None,
path: "var/file".to_string(),
group: "bin".to_string(),
owner: "root".to_string(),
mode: "0755".to_string(),
preserve: false,
overlay: false,
original_name: "".to_string(),
revert_tag: "".to_string(),
sys_attr: "".to_string(),
properties: vec![],
facets: Default::default(),
},
),
(
"12ds3".to_string(),
File {
payload: None,
path: "var/file1".to_string(),
group: "bin".to_string(),
owner: "root".to_string(),
mode: "0755".to_string(),
preserve: false,
overlay: false,
original_name: "".to_string(),
revert_tag: "".to_string(),
sys_attr: "".to_string(),
properties: vec![],
facets: Default::default(),
},
),
(
"654".to_string(),
File {
payload: None,
path: "var/file1".to_string(),
group: "bin".to_string(),
owner: "root".to_string(),
mode: "0755".to_string(),
preserve: false,
overlay: false,
original_name: "".to_string(),
revert_tag: "".to_string(),
sys_attr: "".to_string(),
properties: vec![],
facets: Default::default(),
},
),
]),
};
let new_set = Manifest{files: HashMap::from([
("0dh5".to_string(), File{
payload: None,
path: "var/file".to_string(),
group: "bin".to_string(),
owner: "root".to_string(),
mode: "0755".to_string(),
preserve: false,
overlay: false,
original_name: "".to_string(),
revert_tag: "".to_string(),
sys_attr: "".to_string(),
properties: vec![],
facets: Default::default(),
}),
("654".to_string(), File{
payload: None,
path: "var/file1".to_string(),
group: "bin".to_string(),
owner: "root".to_string(),
mode: "0755".to_string(),
preserve: false,
overlay: false,
original_name: "".to_string(),
revert_tag: "".to_string(),
sys_attr: "".to_string(),
properties: vec![],
facets: Default::default(),
})
])};
let new_set = Manifest {
files: HashMap::from([
(
"0dh5".to_string(),
File {
payload: None,
path: "var/file".to_string(),
group: "bin".to_string(),
owner: "root".to_string(),
mode: "0755".to_string(),
preserve: false,
overlay: false,
original_name: "".to_string(),
revert_tag: "".to_string(),
sys_attr: "".to_string(),
properties: vec![],
facets: Default::default(),
},
),
(
"654".to_string(),
File {
payload: None,
path: "var/file1".to_string(),
group: "bin".to_string(),
owner: "root".to_string(),
mode: "0755".to_string(),
preserve: false,
overlay: false,
original_name: "".to_string(),
revert_tag: "".to_string(),
sys_attr: "".to_string(),
properties: vec![],
facets: Default::default(),
},
),
]),
};
let d = base.diff(&new_set);
println!("{:#?}", d);
}

View file

@ -8,16 +8,16 @@
use crate::digest::Digest;
use crate::fmri::Fmri;
use crate::payload::{Payload, PayloadError};
use diff::Diff;
use pest::Parser;
use pest_derive::Parser;
use serde::{Deserialize, Serialize};
use std::clone::Clone;
use std::collections::HashMap;
use std::fs::read_to_string;
use std::path::Path;
use std::result::Result as StdResult;
use std::str::FromStr;
use diff::Diff;
use serde::{Deserialize, Serialize};
use thiserror::Error;
type Result<T> = StdResult<T, ActionError>;
@ -289,23 +289,19 @@ impl From<Action> for Dependency {
}
for prop in props {
match prop.key.as_str() {
"fmri" => {
match Fmri::parse(&prop.value) {
Ok(fmri) => dep.fmri = Some(fmri),
Err(err) => {
eprintln!("Error parsing FMRI '{}': {}", prop.value, err);
dep.fmri = None;
}
"fmri" => match Fmri::parse(&prop.value) {
Ok(fmri) => dep.fmri = Some(fmri),
Err(err) => {
eprintln!("Error parsing FMRI '{}': {}", prop.value, err);
dep.fmri = None;
}
},
"type" => dep.dependency_type = prop.value,
"predicate" => {
match Fmri::parse(&prop.value) {
Ok(fmri) => dep.predicate = Some(fmri),
Err(err) => {
eprintln!("Error parsing predicate FMRI '{}': {}", prop.value, err);
dep.predicate = None;
}
"predicate" => match Fmri::parse(&prop.value) {
Ok(fmri) => dep.predicate = Some(fmri),
Err(err) => {
eprintln!("Error parsing predicate FMRI '{}': {}", prop.value, err);
dep.predicate = None;
}
},
"root-image" => dep.root_image = prop.value,

View file

@ -3,14 +3,14 @@
// MPL was not distributed with this file, You can
// obtain one at https://mozilla.org/MPL/2.0/.
use diff::Diff;
use serde::{Deserialize, Serialize};
use sha2::Digest as Sha2Digest;
#[allow(unused_imports)]
use sha3::Digest as Sha3Digest;
use std::fmt::Display;
use std::str::FromStr;
use std::{convert::TryInto, result::Result as StdResult};
use diff::Diff;
use serde::{Deserialize, Serialize};
use strum::{Display as StrumDisplay, EnumString};
use thiserror::Error;
@ -19,7 +19,9 @@ type Result<T> = StdResult<T, DigestError>;
#[allow(dead_code)]
static DEFAULT_ALGORITHM: DigestAlgorithm = DigestAlgorithm::SHA512;
#[derive(Debug, PartialEq, Clone, StrumDisplay, EnumString, Default, Deserialize, Serialize, Diff)]
#[derive(
Debug, PartialEq, Clone, StrumDisplay, EnumString, Default, Deserialize, Serialize, Diff,
)]
#[diff(attr(
#[derive(Debug, PartialEq)]
))]
@ -41,7 +43,9 @@ pub enum DigestAlgorithm {
SHA3512, // Sha3 version of sha512t
}
#[derive(Debug, PartialEq, Clone, StrumDisplay, EnumString, Default, Deserialize, Serialize, Diff)]
#[derive(
Debug, PartialEq, Clone, StrumDisplay, EnumString, Default, Deserialize, Serialize, Diff,
)]
#[diff(attr(
#[derive(Debug, PartialEq)]
))]

View file

@ -57,11 +57,11 @@
//! assert_eq!(version.timestamp, Some("20200421T195136Z".to_string()));
//! ```
use diff::Diff;
use serde::{Deserialize, Serialize};
use std::fmt;
use std::str::FromStr;
use thiserror::Error;
use serde::{Serialize, Deserialize};
use diff::Diff;
/// Errors that can occur when parsing an FMRI
#[derive(Debug, Error, PartialEq)]
@ -284,7 +284,13 @@ impl Version {
///
/// This method returns all version components as semver::Version objects.
/// If a component is not present or cannot be parsed, it will be None.
pub fn to_semver(&self) -> (Result<semver::Version, semver::Error>, Option<Result<semver::Version, semver::Error>>, Option<Result<semver::Version, semver::Error>>) {
pub fn to_semver(
&self,
) -> (
Result<semver::Version, semver::Error>,
Option<Result<semver::Version, semver::Error>>,
Option<Result<semver::Version, semver::Error>>,
) {
let release = self.release_to_semver();
let branch = self.branch_to_semver();
let build = self.build_to_semver();
@ -364,7 +370,10 @@ impl Version {
if timestamp.is_empty() {
return Err(FmriError::InvalidTimestampFormat);
}
if !timestamp.chars().all(|c| c.is_ascii_hexdigit() || c == 'T' || c == 'Z') {
if !timestamp
.chars()
.all(|c| c.is_ascii_hexdigit() || c == 'T' || c == 'Z')
{
return Err(FmriError::InvalidTimestampFormat);
}
version.timestamp = Some(timestamp.to_string());
@ -589,8 +598,7 @@ impl Fmri {
// Set the name
fmri.name = rest.to_string();
}
else {
} else {
// No scheme, just a name
fmri.name = name_part.to_string();
}
@ -857,7 +865,8 @@ mod tests {
assert_eq!(version.to_string(), "5.11-2020.0.1.0");
// Test displaying a release, branch, build, and timestamp
let version = Version::with_timestamp("5.11", Some("1"), Some("2020.0.1.0"), "20200421T195136Z");
let version =
Version::with_timestamp("5.11", Some("1"), Some("2020.0.1.0"), "20200421T195136Z");
assert_eq!(version.to_string(), "5.11,1-2020.0.1.0:20200421T195136Z");
// Test displaying a release and timestamp (no branch or build)
@ -908,7 +917,10 @@ mod tests {
assert_eq!(fmri.version, None);
// Test parsing with scheme, publisher, and version
let fmri = Fmri::parse("pkg://openindiana.org/web/server/nginx@1.18.0,5.11-2020.0.1.0:20200421T195136Z").unwrap();
let fmri = Fmri::parse(
"pkg://openindiana.org/web/server/nginx@1.18.0,5.11-2020.0.1.0:20200421T195136Z",
)
.unwrap();
assert_eq!(fmri.scheme, "pkg");
assert_eq!(fmri.publisher, Some("openindiana.org".to_string()));
assert_eq!(fmri.name, "web/server/nginx");
@ -947,14 +959,22 @@ mod tests {
// Test displaying a name and version
let version = Version::with_timestamp("5.11", Some("1"), None, "20200421T195136Z");
let fmri = Fmri::with_version("sunos/coreutils", version);
assert_eq!(fmri.to_string(), "pkg:///sunos/coreutils@5.11,1:20200421T195136Z");
assert_eq!(
fmri.to_string(),
"pkg:///sunos/coreutils@5.11,1:20200421T195136Z"
);
// Test displaying with publisher
let fmri = Fmri::with_publisher("openindiana.org", "web/server/nginx", None);
assert_eq!(fmri.to_string(), "pkg://openindiana.org/web/server/nginx");
// Test displaying with publisher and version
let version = Version::with_timestamp("1.18.0", Some("5.11"), Some("2020.0.1.0"), "20200421T195136Z");
let version = Version::with_timestamp(
"1.18.0",
Some("5.11"),
Some("2020.0.1.0"),
"20200421T195136Z",
);
let fmri = Fmri::with_publisher("openindiana.org", "web/server/nginx", Some(version));
assert_eq!(
fmri.to_string(),
@ -968,31 +988,76 @@ mod tests {
assert_eq!(Version::parse(""), Err(FmriError::InvalidReleaseFormat));
assert_eq!(Version::parse(".11"), Err(FmriError::InvalidReleaseFormat));
assert_eq!(Version::parse("5."), Err(FmriError::InvalidReleaseFormat));
assert_eq!(Version::parse("5..11"), Err(FmriError::InvalidReleaseFormat));
assert_eq!(Version::parse("5a.11"), Err(FmriError::InvalidReleaseFormat));
assert_eq!(
Version::parse("5..11"),
Err(FmriError::InvalidReleaseFormat)
);
assert_eq!(
Version::parse("5a.11"),
Err(FmriError::InvalidReleaseFormat)
);
// Test invalid branch format
assert_eq!(Version::parse("5.11,"), Err(FmriError::InvalidBranchFormat));
assert_eq!(Version::parse("5.11,.1"), Err(FmriError::InvalidBranchFormat));
assert_eq!(Version::parse("5.11,1."), Err(FmriError::InvalidBranchFormat));
assert_eq!(Version::parse("5.11,1..2"), Err(FmriError::InvalidBranchFormat));
assert_eq!(Version::parse("5.11,1a.2"), Err(FmriError::InvalidBranchFormat));
assert_eq!(
Version::parse("5.11,.1"),
Err(FmriError::InvalidBranchFormat)
);
assert_eq!(
Version::parse("5.11,1."),
Err(FmriError::InvalidBranchFormat)
);
assert_eq!(
Version::parse("5.11,1..2"),
Err(FmriError::InvalidBranchFormat)
);
assert_eq!(
Version::parse("5.11,1a.2"),
Err(FmriError::InvalidBranchFormat)
);
// Test invalid build format
assert_eq!(Version::parse("5.11-"), Err(FmriError::InvalidBuildFormat));
assert_eq!(Version::parse("5.11-.1"), Err(FmriError::InvalidBuildFormat));
assert_eq!(Version::parse("5.11-1."), Err(FmriError::InvalidBuildFormat));
assert_eq!(Version::parse("5.11-1..2"), Err(FmriError::InvalidBuildFormat));
assert_eq!(Version::parse("5.11-1a.2"), Err(FmriError::InvalidBuildFormat));
assert_eq!(
Version::parse("5.11-.1"),
Err(FmriError::InvalidBuildFormat)
);
assert_eq!(
Version::parse("5.11-1."),
Err(FmriError::InvalidBuildFormat)
);
assert_eq!(
Version::parse("5.11-1..2"),
Err(FmriError::InvalidBuildFormat)
);
assert_eq!(
Version::parse("5.11-1a.2"),
Err(FmriError::InvalidBuildFormat)
);
// Test invalid timestamp format
assert_eq!(Version::parse("5.11:"), Err(FmriError::InvalidTimestampFormat));
assert_eq!(Version::parse("5.11:xyz"), Err(FmriError::InvalidTimestampFormat));
assert_eq!(
Version::parse("5.11:"),
Err(FmriError::InvalidTimestampFormat)
);
assert_eq!(
Version::parse("5.11:xyz"),
Err(FmriError::InvalidTimestampFormat)
);
// Test invalid version format
assert_eq!(Version::parse("5.11,1,2"), Err(FmriError::InvalidVersionFormat));
assert_eq!(Version::parse("5.11-1-2"), Err(FmriError::InvalidVersionFormat));
assert_eq!(Version::parse("5.11:1:2"), Err(FmriError::InvalidVersionFormat));
assert_eq!(
Version::parse("5.11,1,2"),
Err(FmriError::InvalidVersionFormat)
);
assert_eq!(
Version::parse("5.11-1-2"),
Err(FmriError::InvalidVersionFormat)
);
assert_eq!(
Version::parse("5.11:1:2"),
Err(FmriError::InvalidVersionFormat)
);
}
#[test]
@ -1001,14 +1066,29 @@ mod tests {
assert_eq!(Fmri::parse(""), Err(FmriError::InvalidFormat));
assert_eq!(Fmri::parse("pkg://"), Err(FmriError::InvalidFormat));
assert_eq!(Fmri::parse("pkg:///"), Err(FmriError::InvalidFormat));
assert_eq!(Fmri::parse("pkg://publisher/"), Err(FmriError::InvalidFormat));
assert_eq!(
Fmri::parse("pkg://publisher/"),
Err(FmriError::InvalidFormat)
);
assert_eq!(Fmri::parse("@5.11"), Err(FmriError::InvalidFormat));
assert_eq!(Fmri::parse("name@version@extra"), Err(FmriError::InvalidFormat));
assert_eq!(
Fmri::parse("name@version@extra"),
Err(FmriError::InvalidFormat)
);
// Test invalid version
assert_eq!(Fmri::parse("name@"), Err(FmriError::InvalidReleaseFormat));
assert_eq!(Fmri::parse("name@5.11,"), Err(FmriError::InvalidBranchFormat));
assert_eq!(Fmri::parse("name@5.11-"), Err(FmriError::InvalidBuildFormat));
assert_eq!(Fmri::parse("name@5.11:"), Err(FmriError::InvalidTimestampFormat));
assert_eq!(
Fmri::parse("name@5.11,"),
Err(FmriError::InvalidBranchFormat)
);
assert_eq!(
Fmri::parse("name@5.11-"),
Err(FmriError::InvalidBuildFormat)
);
assert_eq!(
Fmri::parse("name@5.11:"),
Err(FmriError::InvalidTimestampFormat)
);
}
}

View file

@ -1,10 +1,10 @@
mod properties;
use properties::*;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::fs::File;
use properties::*;
use std::path::{Path, PathBuf};
use serde::{Deserialize, Serialize};
use thiserror::Error;
#[derive(Debug, Error)]
@ -29,7 +29,7 @@ pub struct Image {
impl Image {
pub fn new<P: Into<PathBuf>>(path: P) -> Image {
Image{
Image {
path: path.into(),
version: 5,
variants: HashMap::new(),

View file

@ -7,8 +7,8 @@
pub mod actions;
pub mod digest;
pub mod fmri;
pub mod payload;
pub mod image;
pub mod payload;
pub mod repository;
#[cfg(test)]
@ -987,12 +987,17 @@ depend facet.version-lock.system/mozilla-nss=true fmri=system/mozilla-nss@3.51.1
..Dependency::default()
},
Dependency {
fmri: Some(Fmri::parse("pkg:/system/file-system/nfs@0.5.11,5.11-2020.0.1.19951").unwrap()),
fmri: Some(
Fmri::parse("pkg:/system/file-system/nfs@0.5.11,5.11-2020.0.1.19951").unwrap(),
),
dependency_type: "incorporate".to_string(),
..Dependency::default()
},
Dependency {
fmri: Some(Fmri::parse("pkg:/system/data/hardware-registry@2020.2.22,5.11-2020.0.1.19951").unwrap()),
fmri: Some(
Fmri::parse("pkg:/system/data/hardware-registry@2020.2.22,5.11-2020.0.1.19951")
.unwrap(),
),
dependency_type: "incorporate".to_string(),
facets: hashmap! {
"version-lock.system/data/hardware-registry".to_string() => Facet{

View file

@ -4,12 +4,12 @@
// obtain one at https://mozilla.org/MPL/2.0/.
use crate::digest::{Digest, DigestAlgorithm, DigestError, DigestSource};
use diff::Diff;
use object::Object;
use serde::{Deserialize, Serialize};
use std::io::Error as IOError;
use std::path::Path;
use std::result::Result as StdResult;
use diff::Diff;
use serde::{Deserialize, Serialize};
use thiserror::Error;
type Result<T> = StdResult<T, PayloadError>;

View file

@ -4,7 +4,7 @@
// obtain one at https://mozilla.org/MPL/2.0/.
use anyhow::Result;
use serde::{Serialize, Deserialize};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::fs;
use std::path::{Path, PathBuf};
@ -27,11 +27,12 @@ fn convert_system_time_to_datetime(time: &SystemTime) -> chrono::DateTime<chrono
let secs = duration.as_secs() as i64;
let nanos = duration.subsec_nanos();
chrono::DateTime::from_timestamp(secs, nanos)
.unwrap_or_else(|| chrono::DateTime::<chrono::Utc>::from_naive_utc_and_offset(
chrono::DateTime::from_timestamp(secs, nanos).unwrap_or_else(|| {
chrono::DateTime::<chrono::Utc>::from_naive_utc_and_offset(
chrono::NaiveDateTime::default(),
chrono::Utc,
))
)
})
}
/// Catalog version
@ -172,9 +173,20 @@ impl CatalogPart {
}
/// Add a package to the catalog part
pub fn add_package(&mut self, publisher: &str, fmri: &Fmri, actions: Option<Vec<String>>, signature: Option<String>) {
let publisher_packages = self.packages.entry(publisher.to_string()).or_insert_with(HashMap::new);
let stem_versions = publisher_packages.entry(fmri.stem().to_string()).or_insert_with(Vec::new);
pub fn add_package(
&mut self,
publisher: &str,
fmri: &Fmri,
actions: Option<Vec<String>>,
signature: Option<String>,
) {
let publisher_packages = self
.packages
.entry(publisher.to_string())
.or_insert_with(HashMap::new);
let stem_versions = publisher_packages
.entry(fmri.stem().to_string())
.or_insert_with(Vec::new);
// Check if this version already exists
for entry in stem_versions.iter_mut() {
@ -277,8 +289,13 @@ impl UpdateLog {
catalog_parts: HashMap<String, HashMap<String, Vec<String>>>,
signature: Option<String>,
) {
let publisher_updates = self.updates.entry(publisher.to_string()).or_insert_with(HashMap::new);
let stem_updates = publisher_updates.entry(fmri.stem().to_string()).or_insert_with(Vec::new);
let publisher_updates = self
.updates
.entry(publisher.to_string())
.or_insert_with(HashMap::new);
let stem_updates = publisher_updates
.entry(fmri.stem().to_string())
.or_insert_with(Vec::new);
let now = SystemTime::now();
let timestamp = format_iso8601_basic(&now);
@ -393,7 +410,9 @@ impl CatalogManager {
/// Create a new catalog part
pub fn create_part(&mut self, name: &str) -> &mut CatalogPart {
self.parts.entry(name.to_string()).or_insert_with(CatalogPart::new)
self.parts
.entry(name.to_string())
.or_insert_with(CatalogPart::new)
}
/// Save catalog attributes
@ -405,7 +424,9 @@ impl CatalogManager {
/// Create a new update log
pub fn create_update_log(&mut self, name: &str) -> &mut UpdateLog {
self.update_logs.entry(name.to_string()).or_insert_with(UpdateLog::new)
self.update_logs
.entry(name.to_string())
.or_insert_with(UpdateLog::new)
}
/// Save an update log
@ -419,10 +440,13 @@ impl CatalogManager {
let timestamp = format_iso8601_basic(&now);
let mut attrs = self.attrs.clone();
attrs.updates.insert(name.to_string(), UpdateLogInfo {
last_modified: timestamp,
signature_sha1: None,
});
attrs.updates.insert(
name.to_string(),
UpdateLogInfo {
last_modified: timestamp,
signature_sha1: None,
},
);
let attrs_path = self.catalog_dir.join("catalog.attrs");
attrs.save(&attrs_path)?;

View file

@ -623,7 +623,8 @@ impl ReadableRepository for FileBackend {
let path = entry.path();
// Skip directories, only process files with .manifest extension
if path.is_file() && path.extension().map_or(false, |ext| ext == "manifest") {
if path.is_file() && path.extension().map_or(false, |ext| ext == "manifest")
{
// Parse the manifest file to get real package information
match Manifest::parse_file(&path) {
Ok(manifest) => {
@ -765,7 +766,8 @@ impl ReadableRepository for FileBackend {
let path = entry.path();
// Skip directories, only process files with .manifest extension
if path.is_file() && path.extension().map_or(false, |ext| ext == "manifest") {
if path.is_file() && path.extension().map_or(false, |ext| ext == "manifest")
{
// Parse the manifest file to get package information
match Manifest::parse_file(&path) {
Ok(manifest) => {
@ -1260,7 +1262,9 @@ impl FileBackend {
}
/// Get or initialize the catalog manager
pub fn get_catalog_manager(&mut self) -> Result<&mut crate::repository::catalog::CatalogManager> {
pub fn get_catalog_manager(
&mut self,
) -> Result<&mut crate::repository::catalog::CatalogManager> {
if self.catalog_manager.is_none() {
let catalog_dir = self.path.join("catalog");
self.catalog_manager = Some(crate::repository::catalog::CatalogManager::new(

View file

@ -4,18 +4,18 @@
// obtain one at https://mozilla.org/MPL/2.0/.
use anyhow::Result;
use std::path::Path;
use std::collections::HashMap;
use std::path::Path;
mod catalog;
mod file_backend;
mod rest_backend;
mod catalog;
#[cfg(test)]
mod tests;
pub use catalog::{CatalogAttrs, CatalogManager, CatalogOperationType, CatalogPart, UpdateLog};
pub use file_backend::FileBackend;
pub use rest_backend::RestBackend;
pub use catalog::{CatalogManager, CatalogAttrs, CatalogPart, UpdateLog, CatalogOperationType};
/// Repository configuration filename
pub const REPOSITORY_CONFIG_FILENAME: &str = "pkg6.repository";
@ -110,16 +110,27 @@ impl Default for RepositoryConfig {
/// Repository trait for read-only operations
pub trait ReadableRepository {
/// Open an existing repository
fn open<P: AsRef<Path>>(path: P) -> Result<Self> where Self: Sized;
fn open<P: AsRef<Path>>(path: P) -> Result<Self>
where
Self: Sized;
/// Get repository information
fn get_info(&self) -> Result<RepositoryInfo>;
/// List packages in the repository
fn list_packages(&self, publisher: Option<&str>, pattern: Option<&str>) -> Result<Vec<PackageInfo>>;
fn list_packages(
&self,
publisher: Option<&str>,
pattern: Option<&str>,
) -> Result<Vec<PackageInfo>>;
/// Show contents of packages
fn show_contents(&self, publisher: Option<&str>, pattern: Option<&str>, action_types: Option<&[String]>) -> Result<Vec<PackageContents>>;
fn show_contents(
&self,
publisher: Option<&str>,
pattern: Option<&str>,
action_types: Option<&[String]>,
) -> Result<Vec<PackageContents>>;
/// Search for packages in the repository
///
@ -131,13 +142,20 @@ pub trait ReadableRepository {
/// * `query` - The search query
/// * `publisher` - Optional publisher to limit the search to
/// * `limit` - Optional maximum number of results to return
fn search(&self, query: &str, publisher: Option<&str>, limit: Option<usize>) -> Result<Vec<PackageInfo>>;
fn search(
&self,
query: &str,
publisher: Option<&str>,
limit: Option<usize>,
) -> Result<Vec<PackageInfo>>;
}
/// Repository trait for write operations
pub trait WritableRepository {
/// Create a new repository at the specified path
fn create<P: AsRef<Path>>(path: P, version: RepositoryVersion) -> Result<Self> where Self: Sized;
fn create<P: AsRef<Path>>(path: P, version: RepositoryVersion) -> Result<Self>
where
Self: Sized;
/// Save the repository configuration
fn save_config(&self) -> Result<()>;
@ -152,7 +170,12 @@ pub trait WritableRepository {
fn set_property(&mut self, property: &str, value: &str) -> Result<()>;
/// Set a publisher property
fn set_publisher_property(&mut self, publisher: &str, property: &str, value: &str) -> Result<()>;
fn set_publisher_property(
&mut self,
publisher: &str,
property: &str,
value: &str,
) -> Result<()>;
/// Rebuild repository metadata
fn rebuild(&self, publisher: Option<&str>, no_catalog: bool, no_index: bool) -> Result<()>;

View file

@ -8,8 +8,8 @@ mod tests {
use crate::actions::Manifest;
use crate::fmri::Fmri;
use crate::repository::{
CatalogManager, FileBackend, ReadableRepository,
RepositoryVersion, WritableRepository, REPOSITORY_CONFIG_FILENAME,
CatalogManager, FileBackend, ReadableRepository, RepositoryVersion, WritableRepository,
REPOSITORY_CONFIG_FILENAME,
};
use std::fs;
use std::path::PathBuf;
@ -73,26 +73,38 @@ mod tests {
prototype_dir: &PathBuf,
publisher: &str,
) -> Result<(), anyhow::Error> {
println!("Publishing package from manifest: {}", manifest_path.display());
println!(
"Publishing package from manifest: {}",
manifest_path.display()
);
println!("Prototype directory: {}", prototype_dir.display());
println!("Publisher: {}", publisher);
// Check if the manifest file exists
if !manifest_path.exists() {
println!("Error: Manifest file does not exist");
return Err(anyhow::anyhow!("Manifest file does not exist: {}", manifest_path.display()));
return Err(anyhow::anyhow!(
"Manifest file does not exist: {}",
manifest_path.display()
));
}
// Check if the prototype directory exists
if !prototype_dir.exists() {
println!("Error: Prototype directory does not exist");
return Err(anyhow::anyhow!("Prototype directory does not exist: {}", prototype_dir.display()));
return Err(anyhow::anyhow!(
"Prototype directory does not exist: {}",
prototype_dir.display()
));
}
// Parse the manifest file
println!("Parsing manifest file...");
let manifest = Manifest::parse_file(manifest_path)?;
println!("Manifest parsed successfully. Files: {}", manifest.files.len());
println!(
"Manifest parsed successfully. Files: {}",
manifest.files.len()
);
// Begin a transaction
println!("Beginning transaction...");
@ -106,7 +118,10 @@ mod tests {
// Check if the file exists
if !file_path.exists() {
println!("Warning: File does not exist in prototype directory: {}", file_path.display());
println!(
"Warning: File does not exist in prototype directory: {}",
file_path.display()
);
continue;
}
@ -130,7 +145,10 @@ mod tests {
// Debug: Check if the package manifest was stored in the correct location
let publisher_pkg_dir = repo.path.join("pkg").join(publisher);
println!("Publisher package directory: {}", publisher_pkg_dir.display());
println!(
"Publisher package directory: {}",
publisher_pkg_dir.display()
);
if publisher_pkg_dir.exists() {
println!("Publisher directory exists");
@ -334,7 +352,9 @@ mod tests {
// Check for specific files
assert!(files.iter().any(|f| f.contains("usr/bin/hello")));
assert!(files.iter().any(|f| f.contains("usr/share/doc/example/README.txt")));
assert!(files
.iter()
.any(|f| f.contains("usr/share/doc/example/README.txt")));
assert!(files.iter().any(|f| f.contains("etc/config/example.conf")));
// Clean up

View file

@ -1,8 +1,8 @@
use clap::{Parser, Subcommand};
use libips::actions::{ActionError, File, Manifest};
use libips::repository::{ReadableRepository, WritableRepository, FileBackend};
use libips::repository::{FileBackend, ReadableRepository, WritableRepository};
use anyhow::{Result, anyhow};
use anyhow::{anyhow, Result};
use std::collections::HashMap;
use std::fs::{read_dir, OpenOptions};
use std::io::Write;
@ -325,12 +325,18 @@ fn publish_package(
) -> Result<()> {
// Check if the manifest file exists
if !manifest_path.exists() {
return Err(anyhow!("Manifest file does not exist: {}", manifest_path.display()));
return Err(anyhow!(
"Manifest file does not exist: {}",
manifest_path.display()
));
}
// Check if the prototype directory exists
if !prototype_dir.exists() {
return Err(anyhow!("Prototype directory does not exist: {}", prototype_dir.display()));
return Err(anyhow!(
"Prototype directory does not exist: {}",
prototype_dir.display()
));
}
// Parse the manifest file
@ -368,14 +374,20 @@ fn publish_package(
let mut transaction = repo.begin_transaction()?;
// Add files from the prototype directory to the transaction
println!("Adding files from prototype directory: {}", prototype_dir.display());
println!(
"Adding files from prototype directory: {}",
prototype_dir.display()
);
for file_action in manifest.files.iter() {
// Construct the full path to the file in the prototype directory
let file_path = prototype_dir.join(&file_action.path);
// Check if the file exists
if !file_path.exists() {
println!("Warning: File does not exist in prototype directory: {}", file_path.display());
println!(
"Warning: File does not exist in prototype directory: {}",
file_path.display()
);
continue;
}

View file

@ -103,7 +103,11 @@ mod e2e_tests {
// Create a repository using pkg6repo
let result = run_pkg6repo(&["create", "--repo-version", "4", repo_path.to_str().unwrap()]);
assert!(result.is_ok(), "Failed to create repository: {:?}", result.err());
assert!(
result.is_ok(),
"Failed to create repository: {:?}",
result.err()
);
// Check that the repository was created
assert!(repo_path.exists());
@ -126,15 +130,19 @@ mod e2e_tests {
// Create a repository using pkg6repo
let result = run_pkg6repo(&["create", "--repo-version", "4", repo_path.to_str().unwrap()]);
assert!(result.is_ok(), "Failed to create repository: {:?}", result.err());
assert!(
result.is_ok(),
"Failed to create repository: {:?}",
result.err()
);
// Add a publisher using pkg6repo
let result = run_pkg6repo(&[
"add-publisher",
repo_path.to_str().unwrap(),
"example.com",
]);
assert!(result.is_ok(), "Failed to add publisher: {:?}", result.err());
let result = run_pkg6repo(&["add-publisher", repo_path.to_str().unwrap(), "example.com"]);
assert!(
result.is_ok(),
"Failed to add publisher: {:?}",
result.err()
);
// Check that the publisher was added
assert!(repo_path.join("catalog").join("example.com").exists());
@ -155,15 +163,19 @@ mod e2e_tests {
// Create a repository using pkg6repo
let result = run_pkg6repo(&["create", "--repo-version", "4", repo_path.to_str().unwrap()]);
assert!(result.is_ok(), "Failed to create repository: {:?}", result.err());
assert!(
result.is_ok(),
"Failed to create repository: {:?}",
result.err()
);
// Add a publisher using pkg6repo
let result = run_pkg6repo(&[
"add-publisher",
repo_path.to_str().unwrap(),
"test",
]);
assert!(result.is_ok(), "Failed to add publisher: {:?}", result.err());
let result = run_pkg6repo(&["add-publisher", repo_path.to_str().unwrap(), "test"]);
assert!(
result.is_ok(),
"Failed to add publisher: {:?}",
result.err()
);
// Publish a package using pkg6dev
let manifest_path = manifest_dir.join("example.p5m");
@ -173,17 +185,25 @@ mod e2e_tests {
prototype_dir.to_str().unwrap(),
repo_path.to_str().unwrap(),
]);
assert!(result.is_ok(), "Failed to publish package: {:?}", result.err());
assert!(
result.is_ok(),
"Failed to publish package: {:?}",
result.err()
);
// Check that the package was published
let result = run_pkg6repo(&[
"list",
repo_path.to_str().unwrap(),
]);
assert!(result.is_ok(), "Failed to list packages: {:?}", result.err());
let result = run_pkg6repo(&["list", repo_path.to_str().unwrap()]);
assert!(
result.is_ok(),
"Failed to list packages: {:?}",
result.err()
);
let output = result.unwrap();
assert!(output.contains("example"), "Package not found in repository");
assert!(
output.contains("example"),
"Package not found in repository"
);
// Clean up
cleanup_test_dir(&test_dir);
@ -200,15 +220,19 @@ mod e2e_tests {
// Create a repository using pkg6repo
let result = run_pkg6repo(&["create", "--repo-version", "4", repo_path.to_str().unwrap()]);
assert!(result.is_ok(), "Failed to create repository: {:?}", result.err());
assert!(
result.is_ok(),
"Failed to create repository: {:?}",
result.err()
);
// Add a publisher using pkg6repo
let result = run_pkg6repo(&[
"add-publisher",
repo_path.to_str().unwrap(),
"test",
]);
assert!(result.is_ok(), "Failed to add publisher: {:?}", result.err());
let result = run_pkg6repo(&["add-publisher", repo_path.to_str().unwrap(), "test"]);
assert!(
result.is_ok(),
"Failed to add publisher: {:?}",
result.err()
);
// Publish a package using pkg6dev
let manifest_path = manifest_dir.join("example.p5m");
@ -218,20 +242,33 @@ mod e2e_tests {
prototype_dir.to_str().unwrap(),
repo_path.to_str().unwrap(),
]);
assert!(result.is_ok(), "Failed to publish package: {:?}", result.err());
assert!(
result.is_ok(),
"Failed to publish package: {:?}",
result.err()
);
// Show package contents using pkg6repo
let result = run_pkg6repo(&[
"contents",
repo_path.to_str().unwrap(),
"example",
]);
assert!(result.is_ok(), "Failed to show package contents: {:?}", result.err());
let result = run_pkg6repo(&["contents", repo_path.to_str().unwrap(), "example"]);
assert!(
result.is_ok(),
"Failed to show package contents: {:?}",
result.err()
);
let output = result.unwrap();
assert!(output.contains("usr/bin/hello"), "File not found in package contents");
assert!(output.contains("usr/share/doc/example/README.txt"), "File not found in package contents");
assert!(output.contains("etc/config/example.conf"), "File not found in package contents");
assert!(
output.contains("usr/bin/hello"),
"File not found in package contents"
);
assert!(
output.contains("usr/share/doc/example/README.txt"),
"File not found in package contents"
);
assert!(
output.contains("etc/config/example.conf"),
"File not found in package contents"
);
// Clean up
cleanup_test_dir(&test_dir);
@ -248,15 +285,19 @@ mod e2e_tests {
// Create a repository using pkg6repo
let result = run_pkg6repo(&["create", "--repo-version", "4", repo_path.to_str().unwrap()]);
assert!(result.is_ok(), "Failed to create repository: {:?}", result.err());
assert!(
result.is_ok(),
"Failed to create repository: {:?}",
result.err()
);
// Add a publisher using pkg6repo
let result = run_pkg6repo(&[
"add-publisher",
repo_path.to_str().unwrap(),
"test",
]);
assert!(result.is_ok(), "Failed to add publisher: {:?}", result.err());
let result = run_pkg6repo(&["add-publisher", repo_path.to_str().unwrap(), "test"]);
assert!(
result.is_ok(),
"Failed to add publisher: {:?}",
result.err()
);
// Publish the first package using pkg6dev
let manifest_path1 = manifest_dir.join("example.p5m");
@ -266,7 +307,11 @@ mod e2e_tests {
prototype_dir.to_str().unwrap(),
repo_path.to_str().unwrap(),
]);
assert!(result.is_ok(), "Failed to publish first package: {:?}", result.err());
assert!(
result.is_ok(),
"Failed to publish first package: {:?}",
result.err()
);
// Publish the second package using pkg6dev
let manifest_path2 = manifest_dir.join("example2.p5m");
@ -276,18 +321,29 @@ mod e2e_tests {
prototype_dir.to_str().unwrap(),
repo_path.to_str().unwrap(),
]);
assert!(result.is_ok(), "Failed to publish second package: {:?}", result.err());
assert!(
result.is_ok(),
"Failed to publish second package: {:?}",
result.err()
);
// List packages using pkg6repo
let result = run_pkg6repo(&[
"list",
repo_path.to_str().unwrap(),
]);
assert!(result.is_ok(), "Failed to list packages: {:?}", result.err());
let result = run_pkg6repo(&["list", repo_path.to_str().unwrap()]);
assert!(
result.is_ok(),
"Failed to list packages: {:?}",
result.err()
);
let output = result.unwrap();
assert!(output.contains("example"), "First package not found in repository");
assert!(output.contains("example2"), "Second package not found in repository");
assert!(
output.contains("example"),
"First package not found in repository"
);
assert!(
output.contains("example2"),
"Second package not found in repository"
);
// Clean up
cleanup_test_dir(&test_dir);

View file

@ -1,15 +1,15 @@
use anyhow::{anyhow, Result};
use clap::{Parser, Subcommand};
use serde::Serialize;
use std::convert::TryFrom;
use std::path::PathBuf;
use serde::Serialize;
use libips::repository::{FileBackend, ReadableRepository, RepositoryVersion, WritableRepository};
#[cfg(test)]
mod tests;
#[cfg(test)]
mod e2e_tests;
#[cfg(test)]
mod tests;
// Wrapper structs for JSON serialization
#[derive(Serialize)]
@ -307,8 +307,14 @@ fn main() -> Result<()> {
let cli = App::parse();
match &cli.command {
Commands::Create { repo_version, uri_or_path } => {
println!("Creating repository version {} at {}", repo_version, uri_or_path);
Commands::Create {
repo_version,
uri_or_path,
} => {
println!(
"Creating repository version {} at {}",
repo_version, uri_or_path
);
// Convert repo_version to RepositoryVersion
let repo_version_enum = RepositoryVersion::try_from(*repo_version)?;
@ -318,9 +324,15 @@ fn main() -> Result<()> {
println!("Repository created successfully at {}", repo.path.display());
Ok(())
},
Commands::AddPublisher { repo_uri_or_path, publisher } => {
println!("Adding publishers {:?} to repository {}", publisher, repo_uri_or_path);
}
Commands::AddPublisher {
repo_uri_or_path,
publisher,
} => {
println!(
"Adding publishers {:?} to repository {}",
publisher, repo_uri_or_path
);
// Open the repository
let mut repo = FileBackend::open(repo_uri_or_path)?;
@ -333,9 +345,17 @@ fn main() -> Result<()> {
println!("Publishers added successfully");
Ok(())
},
Commands::RemovePublisher { repo_uri_or_path, dry_run, synchronous, publisher } => {
println!("Removing publishers {:?} from repository {}", publisher, repo_uri_or_path);
}
Commands::RemovePublisher {
repo_uri_or_path,
dry_run,
synchronous,
publisher,
} => {
println!(
"Removing publishers {:?} from repository {}",
publisher, repo_uri_or_path
);
println!("Dry run: {}, Synchronous: {}", dry_run, synchronous);
// Open the repository
@ -361,8 +381,15 @@ fn main() -> Result<()> {
}
Ok(())
},
Commands::Get { repo_uri_or_path, format, omit_headers, publisher, section_property, .. } => {
}
Commands::Get {
repo_uri_or_path,
format,
omit_headers,
publisher,
section_property,
..
} => {
println!("Getting properties from repository {}", repo_uri_or_path);
// Open the repository
@ -443,7 +470,7 @@ fn main() -> Result<()> {
println!("{:<10} {:<10} {:<20}", "", key, value);
}
}
},
}
"json" => {
// Create a JSON representation of the properties using serde_json
let properties_output = PropertiesOutput {
@ -455,7 +482,7 @@ fn main() -> Result<()> {
.unwrap_or_else(|e| format!("{{\"error\": \"{}\"}}", e));
println!("{}", json_output);
},
}
"tsv" => {
// Print headers if not omitted
if !omit_headers {
@ -471,15 +498,21 @@ fn main() -> Result<()> {
println!("\t{}\t{}", key, value);
}
}
},
}
_ => {
return Err(anyhow!("Unsupported output format: {}", output_format));
}
}
Ok(())
},
Commands::Info { repo_uri_or_path, format, omit_headers, publisher, .. } => {
}
Commands::Info {
repo_uri_or_path,
format,
omit_headers,
publisher,
..
} => {
println!("Displaying info for repository {}", repo_uri_or_path);
// Open the repository
@ -513,19 +546,23 @@ fn main() -> Result<()> {
"table" => {
// Print headers if not omitted
if !omit_headers {
println!("{:<10} {:<8} {:<6} {:<30}", "PUBLISHER", "PACKAGES", "STATUS", "UPDATED");
println!(
"{:<10} {:<8} {:<6} {:<30}",
"PUBLISHER", "PACKAGES", "STATUS", "UPDATED"
);
}
// Print repository info
for publisher_info in repo_info.publishers {
println!("{:<10} {:<8} {:<6} {:<30}",
println!(
"{:<10} {:<8} {:<6} {:<30}",
publisher_info.name,
publisher_info.package_count,
publisher_info.status,
publisher_info.updated
);
}
},
}
"json" => {
// Create a JSON representation of the repository info using serde_json
let info_output = InfoOutput {
@ -537,7 +574,7 @@ fn main() -> Result<()> {
.unwrap_or_else(|e| format!("{{\"error\": \"{}\"}}", e));
println!("{}", json_output);
},
}
"tsv" => {
// Print headers if not omitted
if !omit_headers {
@ -546,22 +583,30 @@ fn main() -> Result<()> {
// Print repository info as tab-separated values
for publisher_info in repo_info.publishers {
println!("{}\t{}\t{}\t{}",
println!(
"{}\t{}\t{}\t{}",
publisher_info.name,
publisher_info.package_count,
publisher_info.status,
publisher_info.updated
);
}
},
}
_ => {
return Err(anyhow!("Unsupported output format: {}", output_format));
}
}
Ok(())
},
Commands::List { repo_uri_or_path, format, omit_headers, publisher, pkg_fmri_pattern, .. } => {
}
Commands::List {
repo_uri_or_path,
format,
omit_headers,
publisher,
pkg_fmri_pattern,
..
} => {
println!("Listing packages in repository {}", repo_uri_or_path);
// Open the repository
@ -614,21 +659,24 @@ fn main() -> Result<()> {
None => String::new(),
};
println!("{:<30} {:<15} {:<10}", pkg_info.fmri.stem(), version_str, publisher_str);
println!(
"{:<30} {:<15} {:<10}",
pkg_info.fmri.stem(),
version_str,
publisher_str
);
}
},
}
"json" => {
// Create a JSON representation of the packages using serde_json
let packages_output = PackagesOutput {
packages,
};
let packages_output = PackagesOutput { packages };
// Serialize to pretty-printed JSON
let json_output = serde_json::to_string_pretty(&packages_output)
.unwrap_or_else(|e| format!("{{\"error\": \"{}\"}}", e));
println!("{}", json_output);
},
}
"tsv" => {
// Print headers if not omitted
if !omit_headers {
@ -645,17 +693,29 @@ fn main() -> Result<()> {
None => String::new(),
};
println!("{}\t{}\t{}", pkg_info.fmri.stem(), version_str, publisher_str);
println!(
"{}\t{}\t{}",
pkg_info.fmri.stem(),
version_str,
publisher_str
);
}
},
}
_ => {
return Err(anyhow!("Unsupported output format: {}", output_format));
}
}
Ok(())
},
Commands::Contents { repo_uri_or_path, manifest, action_type, publisher, pkg_fmri_pattern, .. } => {
}
Commands::Contents {
repo_uri_or_path,
manifest,
action_type,
publisher,
pkg_fmri_pattern,
..
} => {
println!("Showing contents in repository {}", repo_uri_or_path);
// Open the repository
@ -686,7 +746,8 @@ fn main() -> Result<()> {
};
// Show contents
let contents = repo.show_contents(pub_option, pattern_option, action_type.as_deref())?;
let contents =
repo.show_contents(pub_option, pattern_option, action_type.as_deref())?;
// Print contents
for pkg_contents in contents {
@ -698,7 +759,10 @@ fn main() -> Result<()> {
println!("file path={} type={}", path, pkg_contents.package_id);
} else {
// Otherwise, print in table format
println!("{:<40} {:<30} {:<10}", pkg_contents.package_id, path, "file");
println!(
"{:<40} {:<30} {:<10}",
pkg_contents.package_id, path, "file"
);
}
}
}
@ -724,7 +788,10 @@ fn main() -> Result<()> {
println!("link path={} type={}", path, pkg_contents.package_id);
} else {
// Otherwise, print in table format
println!("{:<40} {:<30} {:<10}", pkg_contents.package_id, path, "link");
println!(
"{:<40} {:<30} {:<10}",
pkg_contents.package_id, path, "link"
);
}
}
}
@ -737,7 +804,10 @@ fn main() -> Result<()> {
println!("depend path={} type={}", path, pkg_contents.package_id);
} else {
// Otherwise, print in table format
println!("{:<40} {:<30} {:<10}", pkg_contents.package_id, path, "depend");
println!(
"{:<40} {:<30} {:<10}",
pkg_contents.package_id, path, "depend"
);
}
}
}
@ -750,15 +820,24 @@ fn main() -> Result<()> {
println!("license path={} type={}", path, pkg_contents.package_id);
} else {
// Otherwise, print in table format
println!("{:<40} {:<30} {:<10}", pkg_contents.package_id, path, "license");
println!(
"{:<40} {:<30} {:<10}",
pkg_contents.package_id, path, "license"
);
}
}
}
}
Ok(())
},
Commands::Rebuild { repo_uri_or_path, publisher, no_catalog, no_index, .. } => {
}
Commands::Rebuild {
repo_uri_or_path,
publisher,
no_catalog,
no_index,
..
} => {
println!("Rebuilding repository {}", repo_uri_or_path);
// Open the repository
@ -782,8 +861,14 @@ fn main() -> Result<()> {
println!("Repository rebuilt successfully");
Ok(())
},
Commands::Refresh { repo_uri_or_path, publisher, no_catalog, no_index, .. } => {
}
Commands::Refresh {
repo_uri_or_path,
publisher,
no_catalog,
no_index,
..
} => {
println!("Refreshing repository {}", repo_uri_or_path);
// Open the repository
@ -807,8 +892,12 @@ fn main() -> Result<()> {
println!("Repository refreshed successfully");
Ok(())
},
Commands::Set { repo_uri_or_path, publisher, property_value } => {
}
Commands::Set {
repo_uri_or_path,
publisher,
property_value,
} => {
println!("Setting properties for repository {}", repo_uri_or_path);
// Open the repository
@ -827,7 +916,10 @@ fn main() -> Result<()> {
// If a publisher is specified, set the publisher property
if let Some(pub_name) = publisher {
println!("Setting publisher property {}/{} = {}", pub_name, property, value);
println!(
"Setting publisher property {}/{} = {}",
pub_name, property, value
);
repo.set_publisher_property(pub_name, property, value)?;
} else {
// Otherwise, set the repository property
@ -838,8 +930,16 @@ fn main() -> Result<()> {
println!("Properties set successfully");
Ok(())
},
Commands::Search { repo_uri_or_path, format, omit_headers, publisher, limit, query , .. } => {
}
Commands::Search {
repo_uri_or_path,
format,
omit_headers,
publisher,
limit,
query,
..
} => {
println!("Searching for packages in repository {}", repo_uri_or_path);
// Open the repository
@ -881,9 +981,14 @@ fn main() -> Result<()> {
None => String::new(),
};
println!("{:<30} {:<15} {:<10}", pkg_info.fmri.stem(), version_str, publisher_str);
println!(
"{:<30} {:<15} {:<10}",
pkg_info.fmri.stem(),
version_str,
publisher_str
);
}
},
}
"json" => {
// Create a JSON representation of the search results using serde_json
let search_output = SearchOutput {
@ -896,7 +1001,7 @@ fn main() -> Result<()> {
.unwrap_or_else(|e| format!("{{\"error\": \"{}\"}}", e));
println!("{}", json_output);
},
}
"tsv" => {
// Print headers if not omitted
if !omit_headers {
@ -913,15 +1018,20 @@ fn main() -> Result<()> {
None => String::new(),
};
println!("{}\t{}\t{}", pkg_info.fmri.stem(), version_str, publisher_str);
println!(
"{}\t{}\t{}",
pkg_info.fmri.stem(),
version_str,
publisher_str
);
}
},
}
_ => {
return Err(anyhow!("Unsupported output format: {}", output_format));
}
}
Ok(())
},
}
}
}

View file

@ -1,8 +1,11 @@
#[cfg(test)]
mod tests {
use libips::repository::{ReadableRepository, WritableRepository, RepositoryVersion, FileBackend, REPOSITORY_CONFIG_FILENAME, PublisherInfo, RepositoryInfo};
use std::path::PathBuf;
use libips::repository::{
FileBackend, ReadableRepository, RepositoryVersion,
WritableRepository, REPOSITORY_CONFIG_FILENAME,
};
use std::fs;
use std::path::PathBuf;
// These tests interact with real repositories in a known location
// instead of using temporary directories. This allows for better
@ -94,8 +97,14 @@ mod tests {
// Check that the publisher directories were created
let catalog_dir = repo_path.join("catalog").join("example.com");
let pkg_dir = repo_path.join("pkg").join("example.com");
assert!(catalog_dir.exists(), "Catalog directory should exist after adding publisher");
assert!(pkg_dir.exists(), "Package directory should exist after adding publisher");
assert!(
catalog_dir.exists(),
"Catalog directory should exist after adding publisher"
);
assert!(
pkg_dir.exists(),
"Package directory should exist after adding publisher"
);
// Remove the publisher
repo.remove_publisher("example.com", false).unwrap();
@ -104,8 +113,14 @@ mod tests {
assert!(!repo.config.publishers.contains(&"example.com".to_string()));
// Check that the publisher directories were removed
assert!(!catalog_dir.exists(), "Catalog directory should not exist after removing publisher");
assert!(!pkg_dir.exists(), "Package directory should not exist after removing publisher");
assert!(
!catalog_dir.exists(),
"Catalog directory should not exist after removing publisher"
);
assert!(
!pkg_dir.exists(),
"Package directory should not exist after removing publisher"
);
// Clean up
cleanup_test_dir(&test_dir);
@ -121,10 +136,14 @@ mod tests {
let mut repo = FileBackend::create(&repo_path, RepositoryVersion::V4).unwrap();
// Set a property
repo.set_property("publisher/prefix", "example.com").unwrap();
repo.set_property("publisher/prefix", "example.com")
.unwrap();
// Check that the property was set
assert_eq!(repo.config.properties.get("publisher/prefix").unwrap(), "example.com");
assert_eq!(
repo.config.properties.get("publisher/prefix").unwrap(),
"example.com"
);
// Clean up
cleanup_test_dir(&test_dir);

View file

@ -1,15 +1,13 @@
use anyhow::Result;
use pest::Parser;
use pest_derive::Parser;
use std::collections::HashMap;
use thiserror::Error;
use anyhow::Result;
#[derive(Debug, Error)]
pub enum MacroParserError {
#[error("macro does not exist: {macro_name}")]
DoesNotExist {
macro_name: String,
}
DoesNotExist { macro_name: String },
}
#[derive(Parser)]
@ -18,17 +16,17 @@ struct InternalMacroParser;
#[derive(Default, Debug)]
pub struct MacroParser {
pub macros: HashMap<String, String>
pub macros: HashMap<String, String>,
}
#[derive(Default, Debug)]
pub struct Macro {
pub name: String,
pub parameters: Vec<String>
pub parameters: Vec<String>,
}
impl MacroParser {
pub fn parse(&self ,raw_string: String) -> Result<String> {
pub fn parse(&self, raw_string: String) -> Result<String> {
let mut return_string = String::new();
for (i, line) in raw_string.lines().enumerate() {
@ -91,9 +89,10 @@ impl MacroParser {
fn get_variable(&self, macro_name: &str) -> Result<&str> {
if self.macros.contains_key(macro_name) {
return Ok(self.macros[macro_name].as_str())
return Ok(self.macros[macro_name].as_str());
}
Err(MacroParserError::DoesNotExist {macro_name: macro_name.into()})?
Err(MacroParserError::DoesNotExist {
macro_name: macro_name.into(),
})?
}
}