chore(fmt): format with cargo fmt

This commit is contained in:
Till Wegmueller 2025-07-26 12:54:01 +02:00
parent f5b80a7d12
commit a33a3246b6
No known key found for this signature in database
17 changed files with 1012 additions and 644 deletions

View file

@ -2,94 +2,110 @@ use diff::Diff;
use libips::actions::File;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use libips::payload::Payload;
#[derive(Serialize, Deserialize, Debug, Clone, Diff)]
#[diff(attr(
#[derive(Debug, PartialEq)]
))]
struct Manifest {
files: HashMap<String, File>
files: HashMap<String, File>,
}
fn main() {
let base = Manifest{files: HashMap::from([
("0dh5".to_string(), File{
payload: None,
path: "var/file".to_string(),
group: "bin".to_string(),
owner: "root".to_string(),
mode: "0755".to_string(),
preserve: false,
overlay: false,
original_name: "".to_string(),
revert_tag: "".to_string(),
sys_attr: "".to_string(),
properties: vec![],
facets: Default::default(),
}),
("12ds3".to_string(), File{
payload: None,
path: "var/file1".to_string(),
group: "bin".to_string(),
owner: "root".to_string(),
mode: "0755".to_string(),
preserve: false,
overlay: false,
original_name: "".to_string(),
revert_tag: "".to_string(),
sys_attr: "".to_string(),
properties: vec![],
facets: Default::default(),
}),
("654".to_string(), File{
payload: None,
path: "var/file1".to_string(),
group: "bin".to_string(),
owner: "root".to_string(),
mode: "0755".to_string(),
preserve: false,
overlay: false,
original_name: "".to_string(),
revert_tag: "".to_string(),
sys_attr: "".to_string(),
properties: vec![],
facets: Default::default(),
})
])};
let base = Manifest {
files: HashMap::from([
(
"0dh5".to_string(),
File {
payload: None,
path: "var/file".to_string(),
group: "bin".to_string(),
owner: "root".to_string(),
mode: "0755".to_string(),
preserve: false,
overlay: false,
original_name: "".to_string(),
revert_tag: "".to_string(),
sys_attr: "".to_string(),
properties: vec![],
facets: Default::default(),
},
),
(
"12ds3".to_string(),
File {
payload: None,
path: "var/file1".to_string(),
group: "bin".to_string(),
owner: "root".to_string(),
mode: "0755".to_string(),
preserve: false,
overlay: false,
original_name: "".to_string(),
revert_tag: "".to_string(),
sys_attr: "".to_string(),
properties: vec![],
facets: Default::default(),
},
),
(
"654".to_string(),
File {
payload: None,
path: "var/file1".to_string(),
group: "bin".to_string(),
owner: "root".to_string(),
mode: "0755".to_string(),
preserve: false,
overlay: false,
original_name: "".to_string(),
revert_tag: "".to_string(),
sys_attr: "".to_string(),
properties: vec![],
facets: Default::default(),
},
),
]),
};
let new_set = Manifest{files: HashMap::from([
("0dh5".to_string(), File{
payload: None,
path: "var/file".to_string(),
group: "bin".to_string(),
owner: "root".to_string(),
mode: "0755".to_string(),
preserve: false,
overlay: false,
original_name: "".to_string(),
revert_tag: "".to_string(),
sys_attr: "".to_string(),
properties: vec![],
facets: Default::default(),
}),
("654".to_string(), File{
payload: None,
path: "var/file1".to_string(),
group: "bin".to_string(),
owner: "root".to_string(),
mode: "0755".to_string(),
preserve: false,
overlay: false,
original_name: "".to_string(),
revert_tag: "".to_string(),
sys_attr: "".to_string(),
properties: vec![],
facets: Default::default(),
})
])};
let new_set = Manifest {
files: HashMap::from([
(
"0dh5".to_string(),
File {
payload: None,
path: "var/file".to_string(),
group: "bin".to_string(),
owner: "root".to_string(),
mode: "0755".to_string(),
preserve: false,
overlay: false,
original_name: "".to_string(),
revert_tag: "".to_string(),
sys_attr: "".to_string(),
properties: vec![],
facets: Default::default(),
},
),
(
"654".to_string(),
File {
payload: None,
path: "var/file1".to_string(),
group: "bin".to_string(),
owner: "root".to_string(),
mode: "0755".to_string(),
preserve: false,
overlay: false,
original_name: "".to_string(),
revert_tag: "".to_string(),
sys_attr: "".to_string(),
properties: vec![],
facets: Default::default(),
},
),
]),
};
let d = base.diff(&new_set);
println!("{:#?}", d);
}

View file

@ -8,16 +8,16 @@
use crate::digest::Digest;
use crate::fmri::Fmri;
use crate::payload::{Payload, PayloadError};
use diff::Diff;
use pest::Parser;
use pest_derive::Parser;
use serde::{Deserialize, Serialize};
use std::clone::Clone;
use std::collections::HashMap;
use std::fs::read_to_string;
use std::path::Path;
use std::result::Result as StdResult;
use std::str::FromStr;
use diff::Diff;
use serde::{Deserialize, Serialize};
use thiserror::Error;
type Result<T> = StdResult<T, ActionError>;
@ -289,23 +289,19 @@ impl From<Action> for Dependency {
}
for prop in props {
match prop.key.as_str() {
"fmri" => {
match Fmri::parse(&prop.value) {
Ok(fmri) => dep.fmri = Some(fmri),
Err(err) => {
eprintln!("Error parsing FMRI '{}': {}", prop.value, err);
dep.fmri = None;
}
"fmri" => match Fmri::parse(&prop.value) {
Ok(fmri) => dep.fmri = Some(fmri),
Err(err) => {
eprintln!("Error parsing FMRI '{}': {}", prop.value, err);
dep.fmri = None;
}
},
"type" => dep.dependency_type = prop.value,
"predicate" => {
match Fmri::parse(&prop.value) {
Ok(fmri) => dep.predicate = Some(fmri),
Err(err) => {
eprintln!("Error parsing predicate FMRI '{}': {}", prop.value, err);
dep.predicate = None;
}
"predicate" => match Fmri::parse(&prop.value) {
Ok(fmri) => dep.predicate = Some(fmri),
Err(err) => {
eprintln!("Error parsing predicate FMRI '{}': {}", prop.value, err);
dep.predicate = None;
}
},
"root-image" => dep.root_image = prop.value,

View file

@ -3,14 +3,14 @@
// MPL was not distributed with this file, You can
// obtain one at https://mozilla.org/MPL/2.0/.
use diff::Diff;
use serde::{Deserialize, Serialize};
use sha2::Digest as Sha2Digest;
#[allow(unused_imports)]
use sha3::Digest as Sha3Digest;
use std::fmt::Display;
use std::str::FromStr;
use std::{convert::TryInto, result::Result as StdResult};
use diff::Diff;
use serde::{Deserialize, Serialize};
use strum::{Display as StrumDisplay, EnumString};
use thiserror::Error;
@ -19,7 +19,9 @@ type Result<T> = StdResult<T, DigestError>;
#[allow(dead_code)]
static DEFAULT_ALGORITHM: DigestAlgorithm = DigestAlgorithm::SHA512;
#[derive(Debug, PartialEq, Clone, StrumDisplay, EnumString, Default, Deserialize, Serialize, Diff)]
#[derive(
Debug, PartialEq, Clone, StrumDisplay, EnumString, Default, Deserialize, Serialize, Diff,
)]
#[diff(attr(
#[derive(Debug, PartialEq)]
))]
@ -41,7 +43,9 @@ pub enum DigestAlgorithm {
SHA3512, // Sha3 version of sha512t
}
#[derive(Debug, PartialEq, Clone, StrumDisplay, EnumString, Default, Deserialize, Serialize, Diff)]
#[derive(
Debug, PartialEq, Clone, StrumDisplay, EnumString, Default, Deserialize, Serialize, Diff,
)]
#[diff(attr(
#[derive(Debug, PartialEq)]
))]

View file

@ -57,11 +57,11 @@
//! assert_eq!(version.timestamp, Some("20200421T195136Z".to_string()));
//! ```
use diff::Diff;
use serde::{Deserialize, Serialize};
use std::fmt;
use std::str::FromStr;
use thiserror::Error;
use serde::{Serialize, Deserialize};
use diff::Diff;
/// Errors that can occur when parsing an FMRI
#[derive(Debug, Error, PartialEq)]
@ -146,7 +146,7 @@ impl Version {
timestamp: None,
}
}
/// Helper method to pad a version string to ensure it has at least MAJOR.MINOR.PATCH components
///
/// This method takes a dot-separated version string and ensures it has at least three components
@ -161,7 +161,7 @@ impl Version {
_ => format!("{}.{}.{}", parts[0], parts[1], parts[2]), // Use only the first three parts
}
}
/// Convert the release component to a semver::Version
///
/// This method attempts to parse the release component as a semver::Version.
@ -172,7 +172,7 @@ impl Version {
let version_str = Self::pad_version_string(&self.release);
version_str.parse()
}
/// Convert the branch component to a semver::Version
///
/// This method attempts to parse the branch component as a semver::Version.
@ -186,7 +186,7 @@ impl Version {
version_str.parse()
})
}
/// Convert the build component to a semver::Version
///
/// This method attempts to parse the build component as a semver::Version.
@ -200,7 +200,7 @@ impl Version {
version_str.parse()
})
}
/// Create a new Version with the given semver::Version as release
///
/// This method creates a new Version with the given semver::Version as release.
@ -213,7 +213,7 @@ impl Version {
timestamp: None,
}
}
/// Create a Version from semver::Version components
///
/// This method creates a Version from semver::Version components.
@ -231,7 +231,7 @@ impl Version {
timestamp,
}
}
/// Create a new Version with the given semver::Version as release and branch
///
/// This method creates a new Version with the given semver::Version as release and branch.
@ -244,7 +244,7 @@ impl Version {
timestamp: None,
}
}
/// Create a new Version with the given semver::Version as release, branch, and build
///
/// This method creates a new Version with the given semver::Version as release, branch, and build.
@ -261,7 +261,7 @@ impl Version {
timestamp: None,
}
}
/// Create a new Version with the given semver::Version as release, branch, build, and timestamp
///
/// This method creates a new Version with the given semver::Version as release, branch, build, and timestamp.
@ -279,29 +279,35 @@ impl Version {
timestamp: Some(timestamp.to_string()),
}
}
/// Get all version components as semver::Version objects
///
/// This method returns all version components as semver::Version objects.
/// If a component is not present or cannot be parsed, it will be None.
pub fn to_semver(&self) -> (Result<semver::Version, semver::Error>, Option<Result<semver::Version, semver::Error>>, Option<Result<semver::Version, semver::Error>>) {
pub fn to_semver(
&self,
) -> (
Result<semver::Version, semver::Error>,
Option<Result<semver::Version, semver::Error>>,
Option<Result<semver::Version, semver::Error>>,
) {
let release = self.release_to_semver();
let branch = self.branch_to_semver();
let build = self.build_to_semver();
(release, branch, build)
}
/// Check if this version is compatible with semver
///
/// This method checks if all components of this version can be parsed as semver::Version objects.
pub fn is_semver_compatible(&self) -> bool {
let (release, branch, build) = self.to_semver();
let release_ok = release.is_ok();
let branch_ok = branch.map_or(true, |r| r.is_ok());
let build_ok = build.map_or(true, |r| r.is_ok());
release_ok && branch_ok && build_ok
}
@ -364,7 +370,10 @@ impl Version {
if timestamp.is_empty() {
return Err(FmriError::InvalidTimestampFormat);
}
if !timestamp.chars().all(|c| c.is_ascii_hexdigit() || c == 'T' || c == 'Z') {
if !timestamp
.chars()
.all(|c| c.is_ascii_hexdigit() || c == 'T' || c == 'Z')
{
return Err(FmriError::InvalidTimestampFormat);
}
version.timestamp = Some(timestamp.to_string());
@ -431,7 +440,7 @@ impl Version {
if parts.len() >= 3 {
// Create a version string with exactly MAJOR.MINOR.PATCH
let version_str = format!("{}.{}.{}", parts[0], parts[1], parts[2]);
// Try to parse it as a semver version
if let Err(_) = semver::Version::parse(&version_str) {
return false;
@ -519,12 +528,12 @@ impl Fmri {
version,
}
}
/// Get the stem of the FMRI (the package name without version)
pub fn stem(&self) -> &str {
&self.name
}
/// Get the version of the FMRI as a string
pub fn version(&self) -> String {
match &self.version {
@ -562,35 +571,34 @@ impl Fmri {
// Check if there's a scheme with a publisher (pkg://publisher/name)
if let Some(scheme_end) = name_part.find("://") {
fmri.scheme = name_part[0..scheme_end].to_string();
// Extract the rest after the scheme
let rest = &name_part[scheme_end + 3..];
// Check if there's a publisher
if let Some(publisher_end) = rest.find('/') {
// If there's a non-empty publisher, set it
if publisher_end > 0 {
fmri.publisher = Some(rest[0..publisher_end].to_string());
}
// Set the name
fmri.name = rest[publisher_end + 1..].to_string();
} else {
// No publisher, just a name
fmri.name = rest.to_string();
}
}
}
// Check if there's a scheme without a publisher (pkg:/name)
else if let Some(scheme_end) = name_part.find(":/") {
fmri.scheme = name_part[0..scheme_end].to_string();
// Extract the rest after the scheme
let rest = &name_part[scheme_end + 2..];
// Set the name
fmri.name = rest.to_string();
}
else {
} else {
// No scheme, just a name
fmri.name = name_part.to_string();
}
@ -635,7 +643,7 @@ impl FromStr for Fmri {
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_semver_conversion() {
// Test release_to_semver
@ -644,21 +652,21 @@ mod tests {
assert_eq!(semver.major, 5);
assert_eq!(semver.minor, 11);
assert_eq!(semver.patch, 0);
// Test with a full semver version
let version = Version::new("1.2.3");
let semver = version.release_to_semver().unwrap();
assert_eq!(semver.major, 1);
assert_eq!(semver.minor, 2);
assert_eq!(semver.patch, 3);
// Test branch_to_semver
let version = Version::with_branch("5.11", "1");
let semver = version.branch_to_semver().unwrap().unwrap();
assert_eq!(semver.major, 1);
assert_eq!(semver.minor, 0);
assert_eq!(semver.patch, 0);
// Test with a full semver version
let mut version = Version::new("5.11");
version.branch = Some("1.2.3".to_string());
@ -666,7 +674,7 @@ mod tests {
assert_eq!(semver.major, 1);
assert_eq!(semver.minor, 2);
assert_eq!(semver.patch, 3);
// Test build_to_semver
let mut version = Version::new("5.11");
version.build = Some("2020.0.1.0".to_string());
@ -674,20 +682,20 @@ mod tests {
assert_eq!(semver.major, 2020);
assert_eq!(semver.minor, 0);
assert_eq!(semver.patch, 1);
// Test from_semver
let release = semver::Version::new(5, 11, 0);
let branch = Some(semver::Version::new(1, 0, 0));
let build = Some(semver::Version::new(2020, 0, 1));
let timestamp = Some("20200421T195136Z".to_string());
let version = Version::from_semver(release, branch, build, timestamp);
assert_eq!(version.release, "5.11.0");
assert_eq!(version.branch, Some("1.0.0".to_string()));
assert_eq!(version.build, Some("2020.0.1".to_string()));
assert_eq!(version.timestamp, Some("20200421T195136Z".to_string()));
}
#[test]
fn test_new_semver_constructors() {
// Test new_semver
@ -697,7 +705,7 @@ mod tests {
assert_eq!(version.branch, None);
assert_eq!(version.build, None);
assert_eq!(version.timestamp, None);
// Test with_branch_semver
let release = semver::Version::new(5, 11, 0);
let branch = semver::Version::new(1, 0, 0);
@ -706,7 +714,7 @@ mod tests {
assert_eq!(version.branch, Some("1.0.0".to_string()));
assert_eq!(version.build, None);
assert_eq!(version.timestamp, None);
// Test with_build_semver
let release = semver::Version::new(5, 11, 0);
let branch = Some(semver::Version::new(1, 0, 0));
@ -716,7 +724,7 @@ mod tests {
assert_eq!(version.branch, Some("1.0.0".to_string()));
assert_eq!(version.build, Some("2020.0.1".to_string()));
assert_eq!(version.timestamp, None);
// Test with_timestamp_semver
let release = semver::Version::new(5, 11, 0);
let branch = Some(semver::Version::new(1, 0, 0));
@ -728,43 +736,43 @@ mod tests {
assert_eq!(version.build, Some("2020.0.1".to_string()));
assert_eq!(version.timestamp, Some("20200421T195136Z".to_string()));
}
#[test]
fn test_to_semver() {
// Test to_semver with all components
let mut version = Version::new("5.11");
version.branch = Some("1.2.3".to_string());
version.build = Some("2020.0.1".to_string());
let (release, branch, build) = version.to_semver();
assert!(release.is_ok());
let release = release.unwrap();
assert_eq!(release.major, 5);
assert_eq!(release.minor, 11);
assert_eq!(release.patch, 0);
assert!(branch.is_some());
let branch = branch.unwrap().unwrap();
assert_eq!(branch.major, 1);
assert_eq!(branch.minor, 2);
assert_eq!(branch.patch, 3);
assert!(build.is_some());
let build = build.unwrap().unwrap();
assert_eq!(build.major, 2020);
assert_eq!(build.minor, 0);
assert_eq!(build.patch, 1);
// Test is_semver_compatible
assert!(version.is_semver_compatible());
// Test with invalid semver
let mut version = Version::new("5.11");
version.branch = Some("invalid".to_string());
assert!(!version.is_semver_compatible());
}
#[test]
fn test_semver_validation() {
// Test valid dot-separated vectors
@ -772,14 +780,14 @@ mod tests {
assert!(Version::is_valid_dot_vector("5.11"));
assert!(Version::is_valid_dot_vector("5.11.0"));
assert!(Version::is_valid_dot_vector("2020.0.1.0"));
// Test invalid dot-separated vectors
assert!(!Version::is_valid_dot_vector(""));
assert!(!Version::is_valid_dot_vector(".11"));
assert!(!Version::is_valid_dot_vector("5."));
assert!(!Version::is_valid_dot_vector("5..11"));
assert!(!Version::is_valid_dot_vector("5a.11"));
// Test semver validation
assert!(Version::is_valid_dot_vector("1.2.3"));
assert!(Version::is_valid_dot_vector("0.0.0"));
@ -857,7 +865,8 @@ mod tests {
assert_eq!(version.to_string(), "5.11-2020.0.1.0");
// Test displaying a release, branch, build, and timestamp
let version = Version::with_timestamp("5.11", Some("1"), Some("2020.0.1.0"), "20200421T195136Z");
let version =
Version::with_timestamp("5.11", Some("1"), Some("2020.0.1.0"), "20200421T195136Z");
assert_eq!(version.to_string(), "5.11,1-2020.0.1.0:20200421T195136Z");
// Test displaying a release and timestamp (no branch or build)
@ -908,7 +917,10 @@ mod tests {
assert_eq!(fmri.version, None);
// Test parsing with scheme, publisher, and version
let fmri = Fmri::parse("pkg://openindiana.org/web/server/nginx@1.18.0,5.11-2020.0.1.0:20200421T195136Z").unwrap();
let fmri = Fmri::parse(
"pkg://openindiana.org/web/server/nginx@1.18.0,5.11-2020.0.1.0:20200421T195136Z",
)
.unwrap();
assert_eq!(fmri.scheme, "pkg");
assert_eq!(fmri.publisher, Some("openindiana.org".to_string()));
assert_eq!(fmri.name, "web/server/nginx");
@ -947,14 +959,22 @@ mod tests {
// Test displaying a name and version
let version = Version::with_timestamp("5.11", Some("1"), None, "20200421T195136Z");
let fmri = Fmri::with_version("sunos/coreutils", version);
assert_eq!(fmri.to_string(), "pkg:///sunos/coreutils@5.11,1:20200421T195136Z");
assert_eq!(
fmri.to_string(),
"pkg:///sunos/coreutils@5.11,1:20200421T195136Z"
);
// Test displaying with publisher
let fmri = Fmri::with_publisher("openindiana.org", "web/server/nginx", None);
assert_eq!(fmri.to_string(), "pkg://openindiana.org/web/server/nginx");
// Test displaying with publisher and version
let version = Version::with_timestamp("1.18.0", Some("5.11"), Some("2020.0.1.0"), "20200421T195136Z");
let version = Version::with_timestamp(
"1.18.0",
Some("5.11"),
Some("2020.0.1.0"),
"20200421T195136Z",
);
let fmri = Fmri::with_publisher("openindiana.org", "web/server/nginx", Some(version));
assert_eq!(
fmri.to_string(),
@ -968,31 +988,76 @@ mod tests {
assert_eq!(Version::parse(""), Err(FmriError::InvalidReleaseFormat));
assert_eq!(Version::parse(".11"), Err(FmriError::InvalidReleaseFormat));
assert_eq!(Version::parse("5."), Err(FmriError::InvalidReleaseFormat));
assert_eq!(Version::parse("5..11"), Err(FmriError::InvalidReleaseFormat));
assert_eq!(Version::parse("5a.11"), Err(FmriError::InvalidReleaseFormat));
assert_eq!(
Version::parse("5..11"),
Err(FmriError::InvalidReleaseFormat)
);
assert_eq!(
Version::parse("5a.11"),
Err(FmriError::InvalidReleaseFormat)
);
// Test invalid branch format
assert_eq!(Version::parse("5.11,"), Err(FmriError::InvalidBranchFormat));
assert_eq!(Version::parse("5.11,.1"), Err(FmriError::InvalidBranchFormat));
assert_eq!(Version::parse("5.11,1."), Err(FmriError::InvalidBranchFormat));
assert_eq!(Version::parse("5.11,1..2"), Err(FmriError::InvalidBranchFormat));
assert_eq!(Version::parse("5.11,1a.2"), Err(FmriError::InvalidBranchFormat));
assert_eq!(
Version::parse("5.11,.1"),
Err(FmriError::InvalidBranchFormat)
);
assert_eq!(
Version::parse("5.11,1."),
Err(FmriError::InvalidBranchFormat)
);
assert_eq!(
Version::parse("5.11,1..2"),
Err(FmriError::InvalidBranchFormat)
);
assert_eq!(
Version::parse("5.11,1a.2"),
Err(FmriError::InvalidBranchFormat)
);
// Test invalid build format
assert_eq!(Version::parse("5.11-"), Err(FmriError::InvalidBuildFormat));
assert_eq!(Version::parse("5.11-.1"), Err(FmriError::InvalidBuildFormat));
assert_eq!(Version::parse("5.11-1."), Err(FmriError::InvalidBuildFormat));
assert_eq!(Version::parse("5.11-1..2"), Err(FmriError::InvalidBuildFormat));
assert_eq!(Version::parse("5.11-1a.2"), Err(FmriError::InvalidBuildFormat));
assert_eq!(
Version::parse("5.11-.1"),
Err(FmriError::InvalidBuildFormat)
);
assert_eq!(
Version::parse("5.11-1."),
Err(FmriError::InvalidBuildFormat)
);
assert_eq!(
Version::parse("5.11-1..2"),
Err(FmriError::InvalidBuildFormat)
);
assert_eq!(
Version::parse("5.11-1a.2"),
Err(FmriError::InvalidBuildFormat)
);
// Test invalid timestamp format
assert_eq!(Version::parse("5.11:"), Err(FmriError::InvalidTimestampFormat));
assert_eq!(Version::parse("5.11:xyz"), Err(FmriError::InvalidTimestampFormat));
assert_eq!(
Version::parse("5.11:"),
Err(FmriError::InvalidTimestampFormat)
);
assert_eq!(
Version::parse("5.11:xyz"),
Err(FmriError::InvalidTimestampFormat)
);
// Test invalid version format
assert_eq!(Version::parse("5.11,1,2"), Err(FmriError::InvalidVersionFormat));
assert_eq!(Version::parse("5.11-1-2"), Err(FmriError::InvalidVersionFormat));
assert_eq!(Version::parse("5.11:1:2"), Err(FmriError::InvalidVersionFormat));
assert_eq!(
Version::parse("5.11,1,2"),
Err(FmriError::InvalidVersionFormat)
);
assert_eq!(
Version::parse("5.11-1-2"),
Err(FmriError::InvalidVersionFormat)
);
assert_eq!(
Version::parse("5.11:1:2"),
Err(FmriError::InvalidVersionFormat)
);
}
#[test]
@ -1001,14 +1066,29 @@ mod tests {
assert_eq!(Fmri::parse(""), Err(FmriError::InvalidFormat));
assert_eq!(Fmri::parse("pkg://"), Err(FmriError::InvalidFormat));
assert_eq!(Fmri::parse("pkg:///"), Err(FmriError::InvalidFormat));
assert_eq!(Fmri::parse("pkg://publisher/"), Err(FmriError::InvalidFormat));
assert_eq!(
Fmri::parse("pkg://publisher/"),
Err(FmriError::InvalidFormat)
);
assert_eq!(Fmri::parse("@5.11"), Err(FmriError::InvalidFormat));
assert_eq!(Fmri::parse("name@version@extra"), Err(FmriError::InvalidFormat));
assert_eq!(
Fmri::parse("name@version@extra"),
Err(FmriError::InvalidFormat)
);
// Test invalid version
assert_eq!(Fmri::parse("name@"), Err(FmriError::InvalidReleaseFormat));
assert_eq!(Fmri::parse("name@5.11,"), Err(FmriError::InvalidBranchFormat));
assert_eq!(Fmri::parse("name@5.11-"), Err(FmriError::InvalidBuildFormat));
assert_eq!(Fmri::parse("name@5.11:"), Err(FmriError::InvalidTimestampFormat));
assert_eq!(
Fmri::parse("name@5.11,"),
Err(FmriError::InvalidBranchFormat)
);
assert_eq!(
Fmri::parse("name@5.11-"),
Err(FmriError::InvalidBuildFormat)
);
assert_eq!(
Fmri::parse("name@5.11:"),
Err(FmriError::InvalidTimestampFormat)
);
}
}
}

View file

@ -1,10 +1,10 @@
mod properties;
use properties::*;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::fs::File;
use properties::*;
use std::path::{Path, PathBuf};
use serde::{Deserialize, Serialize};
use thiserror::Error;
#[derive(Debug, Error)]
@ -29,7 +29,7 @@ pub struct Image {
impl Image {
pub fn new<P: Into<PathBuf>>(path: P) -> Image {
Image{
Image {
path: path.into(),
version: 5,
variants: HashMap::new(),
@ -55,4 +55,4 @@ impl Image {
Image::new(path.as_ref())
}
}
}
}

View file

@ -8,4 +8,4 @@ pub enum ImageProperty {
None,
Array(Vec<ImageProperty>),
Integer(i32),
}
}

View file

@ -7,8 +7,8 @@
pub mod actions;
pub mod digest;
pub mod fmri;
pub mod payload;
pub mod image;
pub mod payload;
pub mod repository;
#[cfg(test)]
@ -987,12 +987,17 @@ depend facet.version-lock.system/mozilla-nss=true fmri=system/mozilla-nss@3.51.1
..Dependency::default()
},
Dependency {
fmri: Some(Fmri::parse("pkg:/system/file-system/nfs@0.5.11,5.11-2020.0.1.19951").unwrap()),
fmri: Some(
Fmri::parse("pkg:/system/file-system/nfs@0.5.11,5.11-2020.0.1.19951").unwrap(),
),
dependency_type: "incorporate".to_string(),
..Dependency::default()
},
Dependency {
fmri: Some(Fmri::parse("pkg:/system/data/hardware-registry@2020.2.22,5.11-2020.0.1.19951").unwrap()),
fmri: Some(
Fmri::parse("pkg:/system/data/hardware-registry@2020.2.22,5.11-2020.0.1.19951")
.unwrap(),
),
dependency_type: "incorporate".to_string(),
facets: hashmap! {
"version-lock.system/data/hardware-registry".to_string() => Facet{
@ -1038,7 +1043,7 @@ depend facet.version-lock.system/mozilla-nss=true fmri=system/mozilla-nss@3.51.1
} else {
assert_eq!(dependency.fmri.is_none(), test_results[pos].fmri.is_none());
}
assert_eq!(
dependency.dependency_type,
test_results[pos].dependency_type

View file

@ -4,12 +4,12 @@
// obtain one at https://mozilla.org/MPL/2.0/.
use crate::digest::{Digest, DigestAlgorithm, DigestError, DigestSource};
use diff::Diff;
use object::Object;
use serde::{Deserialize, Serialize};
use std::io::Error as IOError;
use std::path::Path;
use std::result::Result as StdResult;
use diff::Diff;
use serde::{Deserialize, Serialize};
use thiserror::Error;
type Result<T> = StdResult<T, PayloadError>;

View file

@ -4,7 +4,7 @@
// obtain one at https://mozilla.org/MPL/2.0/.
use anyhow::Result;
use serde::{Serialize, Deserialize};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::fs;
use std::path::{Path, PathBuf};
@ -27,11 +27,12 @@ fn convert_system_time_to_datetime(time: &SystemTime) -> chrono::DateTime<chrono
let secs = duration.as_secs() as i64;
let nanos = duration.subsec_nanos();
chrono::DateTime::from_timestamp(secs, nanos)
.unwrap_or_else(|| chrono::DateTime::<chrono::Utc>::from_naive_utc_and_offset(
chrono::DateTime::from_timestamp(secs, nanos).unwrap_or_else(|| {
chrono::DateTime::<chrono::Utc>::from_naive_utc_and_offset(
chrono::NaiveDateTime::default(),
chrono::Utc,
))
)
})
}
/// Catalog version
@ -52,7 +53,7 @@ pub struct CatalogPartInfo {
/// Last modified timestamp in ISO-8601 'basic format' date in UTC
#[serde(rename = "last-modified")]
pub last_modified: String,
/// Optional SHA-1 signature of the catalog part
#[serde(rename = "signature-sha-1", skip_serializing_if = "Option::is_none")]
pub signature_sha1: Option<String>,
@ -64,7 +65,7 @@ pub struct UpdateLogInfo {
/// Last modified timestamp in ISO-8601 'basic format' date in UTC
#[serde(rename = "last-modified")]
pub last_modified: String,
/// Optional SHA-1 signature of the update log
#[serde(rename = "signature-sha-1", skip_serializing_if = "Option::is_none")]
pub signature_sha1: Option<String>,
@ -76,29 +77,29 @@ pub struct CatalogAttrs {
/// Optional signature information
#[serde(rename = "_SIGNATURE", skip_serializing_if = "Option::is_none")]
pub signature: Option<HashMap<String, String>>,
/// Creation timestamp in ISO-8601 'basic format' date in UTC
pub created: String,
/// Last modified timestamp in ISO-8601 'basic format' date in UTC
#[serde(rename = "last-modified")]
pub last_modified: String,
/// Number of unique package stems in the catalog
#[serde(rename = "package-count")]
pub package_count: usize,
/// Number of unique package versions in the catalog
#[serde(rename = "package-version-count")]
pub package_version_count: usize,
/// Available catalog parts
pub parts: HashMap<String, CatalogPartInfo>,
/// Available update logs
#[serde(skip_serializing_if = "HashMap::is_empty")]
pub updates: HashMap<String, UpdateLogInfo>,
/// Catalog version
pub version: u32,
}
@ -108,7 +109,7 @@ impl CatalogAttrs {
pub fn new() -> Self {
let now = SystemTime::now();
let timestamp = format_iso8601_basic(&now);
CatalogAttrs {
signature: None,
created: timestamp.clone(),
@ -120,14 +121,14 @@ impl CatalogAttrs {
version: CatalogVersion::V1 as u32,
}
}
/// Save catalog attributes to a file
pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<()> {
let json = serde_json::to_string_pretty(self)?;
fs::write(path, json)?;
Ok(())
}
/// Load catalog attributes from a file
pub fn load<P: AsRef<Path>>(path: P) -> Result<Self> {
let json = fs::read_to_string(path)?;
@ -141,11 +142,11 @@ impl CatalogAttrs {
pub struct PackageVersionEntry {
/// Package version string
pub version: String,
/// Optional actions associated with this package version
#[serde(skip_serializing_if = "Option::is_none")]
pub actions: Option<Vec<String>>,
/// Optional SHA-1 signature of the package manifest
#[serde(rename = "signature-sha-1", skip_serializing_if = "Option::is_none")]
pub signature_sha1: Option<String>,
@ -157,7 +158,7 @@ pub struct CatalogPart {
/// Optional signature information
#[serde(rename = "_SIGNATURE", skip_serializing_if = "Option::is_none")]
pub signature: Option<HashMap<String, String>>,
/// Packages by publisher and stem
pub packages: HashMap<String, HashMap<String, Vec<PackageVersionEntry>>>,
}
@ -170,12 +171,23 @@ impl CatalogPart {
packages: HashMap::new(),
}
}
/// Add a package to the catalog part
pub fn add_package(&mut self, publisher: &str, fmri: &Fmri, actions: Option<Vec<String>>, signature: Option<String>) {
let publisher_packages = self.packages.entry(publisher.to_string()).or_insert_with(HashMap::new);
let stem_versions = publisher_packages.entry(fmri.stem().to_string()).or_insert_with(Vec::new);
pub fn add_package(
&mut self,
publisher: &str,
fmri: &Fmri,
actions: Option<Vec<String>>,
signature: Option<String>,
) {
let publisher_packages = self
.packages
.entry(publisher.to_string())
.or_insert_with(HashMap::new);
let stem_versions = publisher_packages
.entry(fmri.stem().to_string())
.or_insert_with(Vec::new);
// Check if this version already exists
for entry in stem_versions.iter_mut() {
if !fmri.version().is_empty() && entry.version == fmri.version() {
@ -189,25 +201,25 @@ impl CatalogPart {
return;
}
}
// Add a new entry
stem_versions.push(PackageVersionEntry {
version: fmri.version(),
actions,
signature_sha1: signature,
});
// Sort versions (should be in ascending order)
stem_versions.sort_by(|a, b| a.version.cmp(&b.version));
}
/// Save a catalog part to a file
pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<()> {
let json = serde_json::to_string_pretty(self)?;
fs::write(path, json)?;
Ok(())
}
/// Load catalog part from a file
pub fn load<P: AsRef<Path>>(path: P) -> Result<Self> {
let json = fs::read_to_string(path)?;
@ -231,18 +243,18 @@ pub struct PackageUpdateEntry {
/// Type of operation (add or remove)
#[serde(rename = "op-type")]
pub op_type: CatalogOperationType,
/// Timestamp of the operation in ISO-8601 'basic format' date in UTC
#[serde(rename = "op-time")]
pub op_time: String,
/// Package version string
pub version: String,
/// Catalog part entries
#[serde(flatten)]
pub catalog_parts: HashMap<String, HashMap<String, Vec<String>>>,
/// Optional SHA-1 signature of the package manifest
#[serde(rename = "signature-sha-1", skip_serializing_if = "Option::is_none")]
pub signature_sha1: Option<String>,
@ -254,7 +266,7 @@ pub struct UpdateLog {
/// Optional signature information
#[serde(rename = "_SIGNATURE", skip_serializing_if = "Option::is_none")]
pub signature: Option<HashMap<String, String>>,
/// Updates by publisher and stem
pub updates: HashMap<String, HashMap<String, Vec<PackageUpdateEntry>>>,
}
@ -267,7 +279,7 @@ impl UpdateLog {
updates: HashMap::new(),
}
}
/// Add a package update to the log
pub fn add_update(
&mut self,
@ -277,12 +289,17 @@ impl UpdateLog {
catalog_parts: HashMap<String, HashMap<String, Vec<String>>>,
signature: Option<String>,
) {
let publisher_updates = self.updates.entry(publisher.to_string()).or_insert_with(HashMap::new);
let stem_updates = publisher_updates.entry(fmri.stem().to_string()).or_insert_with(Vec::new);
let publisher_updates = self
.updates
.entry(publisher.to_string())
.or_insert_with(HashMap::new);
let stem_updates = publisher_updates
.entry(fmri.stem().to_string())
.or_insert_with(Vec::new);
let now = SystemTime::now();
let timestamp = format_iso8601_basic(&now);
stem_updates.push(PackageUpdateEntry {
op_type,
op_time: timestamp,
@ -291,14 +308,14 @@ impl UpdateLog {
signature_sha1: signature,
});
}
/// Save update log to a file
pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<()> {
let json = serde_json::to_string_pretty(self)?;
fs::write(path, json)?;
Ok(())
}
/// Load update log from a file
pub fn load<P: AsRef<Path>>(path: P) -> Result<Self> {
let json = fs::read_to_string(path)?;
@ -311,13 +328,13 @@ impl UpdateLog {
pub struct CatalogManager {
/// Path to the catalog directory
catalog_dir: PathBuf,
/// Catalog attributes
attrs: CatalogAttrs,
/// Catalog parts
parts: HashMap<String, CatalogPart>,
/// Update logs
update_logs: HashMap<String, UpdateLog>,
}
@ -326,12 +343,12 @@ impl CatalogManager {
/// Create a new catalog manager
pub fn new<P: AsRef<Path>>(catalog_dir: P) -> Result<Self> {
let catalog_dir = catalog_dir.as_ref().to_path_buf();
// Create catalog directory if it doesn't exist
if !catalog_dir.exists() {
fs::create_dir_all(&catalog_dir)?;
}
// Try to load existing catalog attributes
let attrs_path = catalog_dir.join("catalog.attrs");
let attrs = if attrs_path.exists() {
@ -339,7 +356,7 @@ impl CatalogManager {
} else {
CatalogAttrs::new()
};
Ok(CatalogManager {
catalog_dir,
attrs,
@ -347,27 +364,27 @@ impl CatalogManager {
update_logs: HashMap::new(),
})
}
/// Get catalog attributes
pub fn attrs(&self) -> &CatalogAttrs {
&self.attrs
}
/// Get mutable catalog attributes
pub fn attrs_mut(&mut self) -> &mut CatalogAttrs {
&mut self.attrs
}
/// Get a catalog part
pub fn get_part(&self, name: &str) -> Option<&CatalogPart> {
self.parts.get(name)
}
/// Get a mutable catalog part
pub fn get_part_mut(&mut self, name: &str) -> Option<&mut CatalogPart> {
self.parts.get_mut(name)
}
/// Load a catalog part
pub fn load_part(&mut self, name: &str) -> Result<()> {
let part_path = self.catalog_dir.join(name);
@ -379,7 +396,7 @@ impl CatalogManager {
Err(anyhow::anyhow!("Catalog part does not exist: {}", name))
}
}
/// Save a catalog part
pub fn save_part(&self, name: &str) -> Result<()> {
if let Some(part) = self.parts.get(name) {
@ -390,49 +407,56 @@ impl CatalogManager {
Err(anyhow::anyhow!("Catalog part not loaded: {}", name))
}
}
/// Create a new catalog part
pub fn create_part(&mut self, name: &str) -> &mut CatalogPart {
self.parts.entry(name.to_string()).or_insert_with(CatalogPart::new)
self.parts
.entry(name.to_string())
.or_insert_with(CatalogPart::new)
}
/// Save catalog attributes
pub fn save_attrs(&self) -> Result<()> {
let attrs_path = self.catalog_dir.join("catalog.attrs");
self.attrs.save(&attrs_path)?;
Ok(())
}
/// Create a new update log
pub fn create_update_log(&mut self, name: &str) -> &mut UpdateLog {
self.update_logs.entry(name.to_string()).or_insert_with(UpdateLog::new)
self.update_logs
.entry(name.to_string())
.or_insert_with(UpdateLog::new)
}
/// Save an update log
pub fn save_update_log(&self, name: &str) -> Result<()> {
if let Some(log) = self.update_logs.get(name) {
let log_path = self.catalog_dir.join(name);
log.save(&log_path)?;
// Update catalog attributes
let now = SystemTime::now();
let timestamp = format_iso8601_basic(&now);
let mut attrs = self.attrs.clone();
attrs.updates.insert(name.to_string(), UpdateLogInfo {
last_modified: timestamp,
signature_sha1: None,
});
attrs.updates.insert(
name.to_string(),
UpdateLogInfo {
last_modified: timestamp,
signature_sha1: None,
},
);
let attrs_path = self.catalog_dir.join("catalog.attrs");
attrs.save(&attrs_path)?;
Ok(())
} else {
Err(anyhow::anyhow!("Update log not loaded: {}", name))
}
}
/// Load an update log
pub fn load_update_log(&mut self, name: &str) -> Result<()> {
let log_path = self.catalog_dir.join(name);
@ -444,14 +468,14 @@ impl CatalogManager {
Err(anyhow::anyhow!("Update log does not exist: {}", name))
}
}
/// Get an update log
pub fn get_update_log(&self, name: &str) -> Option<&UpdateLog> {
self.update_logs.get(name)
}
/// Get a mutable update log
pub fn get_update_log_mut(&mut self, name: &str) -> Option<&mut UpdateLog> {
self.update_logs.get_mut(name)
}
}
}

View file

@ -431,7 +431,7 @@ impl Transaction {
// Move the manifest to its final location in the repository
// Store in both the pkg directory and the trans directory as required
// Extract package name from manifest
let mut package_name = String::from("unknown");
for attr in &self.manifest.attributes {
@ -442,7 +442,7 @@ impl Transaction {
}
}
}
// Determine the pkg directory path based on publisher
let pkg_manifest_path = if let Some(publisher) = &self.publisher {
// Create publisher directory if it doesn't exist
@ -450,14 +450,14 @@ impl Transaction {
if !publisher_dir.exists() {
fs::create_dir_all(&publisher_dir)?;
}
// Store in publisher-specific directory with package name
publisher_dir.join(format!("{}.manifest", package_name))
} else {
// Store in root pkg directory (legacy behavior)
self.repo.join("pkg").join("manifest")
};
let trans_manifest_path = self
.repo
.join("trans")
@ -623,7 +623,8 @@ impl ReadableRepository for FileBackend {
let path = entry.path();
// Skip directories, only process files with .manifest extension
if path.is_file() && path.extension().map_or(false, |ext| ext == "manifest") {
if path.is_file() && path.extension().map_or(false, |ext| ext == "manifest")
{
// Parse the manifest file to get real package information
match Manifest::parse_file(&path) {
Ok(manifest) => {
@ -765,7 +766,8 @@ impl ReadableRepository for FileBackend {
let path = entry.path();
// Skip directories, only process files with .manifest extension
if path.is_file() && path.extension().map_or(false, |ext| ext == "manifest") {
if path.is_file() && path.extension().map_or(false, |ext| ext == "manifest")
{
// Parse the manifest file to get package information
match Manifest::parse_file(&path) {
Ok(manifest) => {
@ -1260,7 +1262,9 @@ impl FileBackend {
}
/// Get or initialize the catalog manager
pub fn get_catalog_manager(&mut self) -> Result<&mut crate::repository::catalog::CatalogManager> {
pub fn get_catalog_manager(
&mut self,
) -> Result<&mut crate::repository::catalog::CatalogManager> {
if self.catalog_manager.is_none() {
let catalog_dir = self.path.join("catalog");
self.catalog_manager = Some(crate::repository::catalog::CatalogManager::new(

View file

@ -4,18 +4,18 @@
// obtain one at https://mozilla.org/MPL/2.0/.
use anyhow::Result;
use std::path::Path;
use std::collections::HashMap;
use std::path::Path;
mod catalog;
mod file_backend;
mod rest_backend;
mod catalog;
#[cfg(test)]
mod tests;
pub use catalog::{CatalogAttrs, CatalogManager, CatalogOperationType, CatalogPart, UpdateLog};
pub use file_backend::FileBackend;
pub use rest_backend::RestBackend;
pub use catalog::{CatalogManager, CatalogAttrs, CatalogPart, UpdateLog, CatalogOperationType};
/// Repository configuration filename
pub const REPOSITORY_CONFIG_FILENAME: &str = "pkg6.repository";
@ -110,62 +110,85 @@ impl Default for RepositoryConfig {
/// Repository trait for read-only operations
pub trait ReadableRepository {
/// Open an existing repository
fn open<P: AsRef<Path>>(path: P) -> Result<Self> where Self: Sized;
fn open<P: AsRef<Path>>(path: P) -> Result<Self>
where
Self: Sized;
/// Get repository information
fn get_info(&self) -> Result<RepositoryInfo>;
/// List packages in the repository
fn list_packages(&self, publisher: Option<&str>, pattern: Option<&str>) -> Result<Vec<PackageInfo>>;
fn list_packages(
&self,
publisher: Option<&str>,
pattern: Option<&str>,
) -> Result<Vec<PackageInfo>>;
/// Show contents of packages
fn show_contents(&self, publisher: Option<&str>, pattern: Option<&str>, action_types: Option<&[String]>) -> Result<Vec<PackageContents>>;
fn show_contents(
&self,
publisher: Option<&str>,
pattern: Option<&str>,
action_types: Option<&[String]>,
) -> Result<Vec<PackageContents>>;
/// Search for packages in the repository
///
///
/// This method searches for packages in the repository using the search index.
/// It returns a list of packages that match the search query.
///
///
/// # Arguments
///
///
/// * `query` - The search query
/// * `publisher` - Optional publisher to limit the search to
/// * `limit` - Optional maximum number of results to return
fn search(&self, query: &str, publisher: Option<&str>, limit: Option<usize>) -> Result<Vec<PackageInfo>>;
fn search(
&self,
query: &str,
publisher: Option<&str>,
limit: Option<usize>,
) -> Result<Vec<PackageInfo>>;
}
/// Repository trait for write operations
pub trait WritableRepository {
/// Create a new repository at the specified path
fn create<P: AsRef<Path>>(path: P, version: RepositoryVersion) -> Result<Self> where Self: Sized;
fn create<P: AsRef<Path>>(path: P, version: RepositoryVersion) -> Result<Self>
where
Self: Sized;
/// Save the repository configuration
fn save_config(&self) -> Result<()>;
/// Add a publisher to the repository
fn add_publisher(&mut self, publisher: &str) -> Result<()>;
/// Remove a publisher from the repository
fn remove_publisher(&mut self, publisher: &str, dry_run: bool) -> Result<()>;
/// Set a repository property
fn set_property(&mut self, property: &str, value: &str) -> Result<()>;
/// Set a publisher property
fn set_publisher_property(&mut self, publisher: &str, property: &str, value: &str) -> Result<()>;
fn set_publisher_property(
&mut self,
publisher: &str,
property: &str,
value: &str,
) -> Result<()>;
/// Rebuild repository metadata
fn rebuild(&self, publisher: Option<&str>, no_catalog: bool, no_index: bool) -> Result<()>;
/// Refresh repository metadata
fn refresh(&self, publisher: Option<&str>, no_catalog: bool, no_index: bool) -> Result<()>;
/// Set the default publisher for the repository
fn set_default_publisher(&mut self, publisher: &str) -> Result<()>;
}
/// Repository trait defining the interface for all repository backends
///
///
/// This trait combines both ReadableRepository and WritableRepository traits
/// for backward compatibility.
pub trait Repository: ReadableRepository + WritableRepository {}
pub trait Repository: ReadableRepository + WritableRepository {}

View file

@ -8,8 +8,8 @@ mod tests {
use crate::actions::Manifest;
use crate::fmri::Fmri;
use crate::repository::{
CatalogManager, FileBackend, ReadableRepository,
RepositoryVersion, WritableRepository, REPOSITORY_CONFIG_FILENAME,
CatalogManager, FileBackend, ReadableRepository, RepositoryVersion, WritableRepository,
REPOSITORY_CONFIG_FILENAME,
};
use std::fs;
use std::path::PathBuf;
@ -65,7 +65,7 @@ mod tests {
PathBuf::from("/tmp/pkg6_test/manifests"),
)
}
// Helper function to publish a package to a repository
fn publish_package(
repo: &mut FileBackend,
@ -73,26 +73,38 @@ mod tests {
prototype_dir: &PathBuf,
publisher: &str,
) -> Result<(), anyhow::Error> {
println!("Publishing package from manifest: {}", manifest_path.display());
println!(
"Publishing package from manifest: {}",
manifest_path.display()
);
println!("Prototype directory: {}", prototype_dir.display());
println!("Publisher: {}", publisher);
// Check if the manifest file exists
if !manifest_path.exists() {
println!("Error: Manifest file does not exist");
return Err(anyhow::anyhow!("Manifest file does not exist: {}", manifest_path.display()));
return Err(anyhow::anyhow!(
"Manifest file does not exist: {}",
manifest_path.display()
));
}
// Check if the prototype directory exists
if !prototype_dir.exists() {
println!("Error: Prototype directory does not exist");
return Err(anyhow::anyhow!("Prototype directory does not exist: {}", prototype_dir.display()));
return Err(anyhow::anyhow!(
"Prototype directory does not exist: {}",
prototype_dir.display()
));
}
// Parse the manifest file
println!("Parsing manifest file...");
let manifest = Manifest::parse_file(manifest_path)?;
println!("Manifest parsed successfully. Files: {}", manifest.files.len());
println!(
"Manifest parsed successfully. Files: {}",
manifest.files.len()
);
// Begin a transaction
println!("Beginning transaction...");
@ -103,13 +115,16 @@ mod tests {
for file_action in manifest.files.iter() {
// Construct the full path to the file in the prototype directory
let file_path = prototype_dir.join(&file_action.path);
// Check if the file exists
if !file_path.exists() {
println!("Warning: File does not exist in prototype directory: {}", file_path.display());
println!(
"Warning: File does not exist in prototype directory: {}",
file_path.display()
);
continue;
}
// Add the file to the transaction
println!("Adding file: {}", file_action.path);
transaction.add_file(file_action.clone(), &file_path)?;
@ -127,14 +142,17 @@ mod tests {
println!("Committing transaction...");
transaction.commit()?;
println!("Transaction committed successfully");
// Debug: Check if the package manifest was stored in the correct location
let publisher_pkg_dir = repo.path.join("pkg").join(publisher);
println!("Publisher package directory: {}", publisher_pkg_dir.display());
println!(
"Publisher package directory: {}",
publisher_pkg_dir.display()
);
if publisher_pkg_dir.exists() {
println!("Publisher directory exists");
// List files in the publisher directory
if let Ok(entries) = std::fs::read_dir(&publisher_pkg_dir) {
println!("Files in publisher directory:");
@ -257,15 +275,15 @@ mod tests {
// Check that the files were published
assert!(repo_path.join("file").exists());
// Get repository information
let repo_info = repo.get_info().unwrap();
// Check that the publisher information is correct
assert_eq!(repo_info.publishers.len(), 1);
let publisher_info = &repo_info.publishers[0];
assert_eq!(publisher_info.name, "test");
// Clean up
cleanup_test_dir(&test_dir);
}
@ -291,13 +309,13 @@ mod tests {
// List packages
let packages = repo.list_packages(Some("test"), None).unwrap();
// Check that packages were listed
assert!(!packages.is_empty());
// Check that the package name is correct
assert_eq!(packages[0].fmri.name, "example");
// Clean up
cleanup_test_dir(&test_dir);
}
@ -323,20 +341,22 @@ mod tests {
// Show contents
let contents = repo.show_contents(Some("test"), None, None).unwrap();
// Check that contents were shown
assert!(!contents.is_empty());
// Check that the contents include the expected files
let package_contents = &contents[0];
assert!(package_contents.files.is_some());
let files = package_contents.files.as_ref().unwrap();
// Check for specific files
assert!(files.iter().any(|f| f.contains("usr/bin/hello")));
assert!(files.iter().any(|f| f.contains("usr/share/doc/example/README.txt")));
assert!(files
.iter()
.any(|f| f.contains("usr/share/doc/example/README.txt")));
assert!(files.iter().any(|f| f.contains("etc/config/example.conf")));
// Clean up
cleanup_test_dir(&test_dir);
}
@ -365,14 +385,14 @@ mod tests {
// Search for packages
let results = repo.search("example", Some("test"), None).unwrap();
// Check that search results were returned
assert!(!results.is_empty());
// Check that the package name is correct
assert_eq!(results[0].fmri.name, "example");
// Clean up
cleanup_test_dir(&test_dir);
}
}
}

View file

@ -1,8 +1,8 @@
use clap::{Parser, Subcommand};
use libips::actions::{ActionError, File, Manifest};
use libips::repository::{ReadableRepository, WritableRepository, FileBackend};
use libips::repository::{FileBackend, ReadableRepository, WritableRepository};
use anyhow::{Result, anyhow};
use anyhow::{anyhow, Result};
use std::collections::HashMap;
use std::fs::{read_dir, OpenOptions};
use std::io::Write;
@ -325,12 +325,18 @@ fn publish_package(
) -> Result<()> {
// Check if the manifest file exists
if !manifest_path.exists() {
return Err(anyhow!("Manifest file does not exist: {}", manifest_path.display()));
return Err(anyhow!(
"Manifest file does not exist: {}",
manifest_path.display()
));
}
// Check if the prototype directory exists
if !prototype_dir.exists() {
return Err(anyhow!("Prototype directory does not exist: {}", prototype_dir.display()));
return Err(anyhow!(
"Prototype directory does not exist: {}",
prototype_dir.display()
));
}
// Parse the manifest file
@ -368,17 +374,23 @@ fn publish_package(
let mut transaction = repo.begin_transaction()?;
// Add files from the prototype directory to the transaction
println!("Adding files from prototype directory: {}", prototype_dir.display());
println!(
"Adding files from prototype directory: {}",
prototype_dir.display()
);
for file_action in manifest.files.iter() {
// Construct the full path to the file in the prototype directory
let file_path = prototype_dir.join(&file_action.path);
// Check if the file exists
if !file_path.exists() {
println!("Warning: File does not exist in prototype directory: {}", file_path.display());
println!(
"Warning: File does not exist in prototype directory: {}",
file_path.display()
);
continue;
}
// Add the file to the transaction
println!("Adding file: {}", file_action.path);
transaction.add_file(file_action.clone(), &file_path)?;

View file

@ -103,7 +103,11 @@ mod e2e_tests {
// Create a repository using pkg6repo
let result = run_pkg6repo(&["create", "--repo-version", "4", repo_path.to_str().unwrap()]);
assert!(result.is_ok(), "Failed to create repository: {:?}", result.err());
assert!(
result.is_ok(),
"Failed to create repository: {:?}",
result.err()
);
// Check that the repository was created
assert!(repo_path.exists());
@ -126,15 +130,19 @@ mod e2e_tests {
// Create a repository using pkg6repo
let result = run_pkg6repo(&["create", "--repo-version", "4", repo_path.to_str().unwrap()]);
assert!(result.is_ok(), "Failed to create repository: {:?}", result.err());
assert!(
result.is_ok(),
"Failed to create repository: {:?}",
result.err()
);
// Add a publisher using pkg6repo
let result = run_pkg6repo(&[
"add-publisher",
repo_path.to_str().unwrap(),
"example.com",
]);
assert!(result.is_ok(), "Failed to add publisher: {:?}", result.err());
let result = run_pkg6repo(&["add-publisher", repo_path.to_str().unwrap(), "example.com"]);
assert!(
result.is_ok(),
"Failed to add publisher: {:?}",
result.err()
);
// Check that the publisher was added
assert!(repo_path.join("catalog").join("example.com").exists());
@ -155,15 +163,19 @@ mod e2e_tests {
// Create a repository using pkg6repo
let result = run_pkg6repo(&["create", "--repo-version", "4", repo_path.to_str().unwrap()]);
assert!(result.is_ok(), "Failed to create repository: {:?}", result.err());
assert!(
result.is_ok(),
"Failed to create repository: {:?}",
result.err()
);
// Add a publisher using pkg6repo
let result = run_pkg6repo(&[
"add-publisher",
repo_path.to_str().unwrap(),
"test",
]);
assert!(result.is_ok(), "Failed to add publisher: {:?}", result.err());
let result = run_pkg6repo(&["add-publisher", repo_path.to_str().unwrap(), "test"]);
assert!(
result.is_ok(),
"Failed to add publisher: {:?}",
result.err()
);
// Publish a package using pkg6dev
let manifest_path = manifest_dir.join("example.p5m");
@ -173,17 +185,25 @@ mod e2e_tests {
prototype_dir.to_str().unwrap(),
repo_path.to_str().unwrap(),
]);
assert!(result.is_ok(), "Failed to publish package: {:?}", result.err());
assert!(
result.is_ok(),
"Failed to publish package: {:?}",
result.err()
);
// Check that the package was published
let result = run_pkg6repo(&[
"list",
repo_path.to_str().unwrap(),
]);
assert!(result.is_ok(), "Failed to list packages: {:?}", result.err());
let result = run_pkg6repo(&["list", repo_path.to_str().unwrap()]);
assert!(
result.is_ok(),
"Failed to list packages: {:?}",
result.err()
);
let output = result.unwrap();
assert!(output.contains("example"), "Package not found in repository");
assert!(
output.contains("example"),
"Package not found in repository"
);
// Clean up
cleanup_test_dir(&test_dir);
@ -200,15 +220,19 @@ mod e2e_tests {
// Create a repository using pkg6repo
let result = run_pkg6repo(&["create", "--repo-version", "4", repo_path.to_str().unwrap()]);
assert!(result.is_ok(), "Failed to create repository: {:?}", result.err());
assert!(
result.is_ok(),
"Failed to create repository: {:?}",
result.err()
);
// Add a publisher using pkg6repo
let result = run_pkg6repo(&[
"add-publisher",
repo_path.to_str().unwrap(),
"test",
]);
assert!(result.is_ok(), "Failed to add publisher: {:?}", result.err());
let result = run_pkg6repo(&["add-publisher", repo_path.to_str().unwrap(), "test"]);
assert!(
result.is_ok(),
"Failed to add publisher: {:?}",
result.err()
);
// Publish a package using pkg6dev
let manifest_path = manifest_dir.join("example.p5m");
@ -218,20 +242,33 @@ mod e2e_tests {
prototype_dir.to_str().unwrap(),
repo_path.to_str().unwrap(),
]);
assert!(result.is_ok(), "Failed to publish package: {:?}", result.err());
assert!(
result.is_ok(),
"Failed to publish package: {:?}",
result.err()
);
// Show package contents using pkg6repo
let result = run_pkg6repo(&[
"contents",
repo_path.to_str().unwrap(),
"example",
]);
assert!(result.is_ok(), "Failed to show package contents: {:?}", result.err());
let result = run_pkg6repo(&["contents", repo_path.to_str().unwrap(), "example"]);
assert!(
result.is_ok(),
"Failed to show package contents: {:?}",
result.err()
);
let output = result.unwrap();
assert!(output.contains("usr/bin/hello"), "File not found in package contents");
assert!(output.contains("usr/share/doc/example/README.txt"), "File not found in package contents");
assert!(output.contains("etc/config/example.conf"), "File not found in package contents");
assert!(
output.contains("usr/bin/hello"),
"File not found in package contents"
);
assert!(
output.contains("usr/share/doc/example/README.txt"),
"File not found in package contents"
);
assert!(
output.contains("etc/config/example.conf"),
"File not found in package contents"
);
// Clean up
cleanup_test_dir(&test_dir);
@ -248,15 +285,19 @@ mod e2e_tests {
// Create a repository using pkg6repo
let result = run_pkg6repo(&["create", "--repo-version", "4", repo_path.to_str().unwrap()]);
assert!(result.is_ok(), "Failed to create repository: {:?}", result.err());
assert!(
result.is_ok(),
"Failed to create repository: {:?}",
result.err()
);
// Add a publisher using pkg6repo
let result = run_pkg6repo(&[
"add-publisher",
repo_path.to_str().unwrap(),
"test",
]);
assert!(result.is_ok(), "Failed to add publisher: {:?}", result.err());
let result = run_pkg6repo(&["add-publisher", repo_path.to_str().unwrap(), "test"]);
assert!(
result.is_ok(),
"Failed to add publisher: {:?}",
result.err()
);
// Publish the first package using pkg6dev
let manifest_path1 = manifest_dir.join("example.p5m");
@ -266,7 +307,11 @@ mod e2e_tests {
prototype_dir.to_str().unwrap(),
repo_path.to_str().unwrap(),
]);
assert!(result.is_ok(), "Failed to publish first package: {:?}", result.err());
assert!(
result.is_ok(),
"Failed to publish first package: {:?}",
result.err()
);
// Publish the second package using pkg6dev
let manifest_path2 = manifest_dir.join("example2.p5m");
@ -276,20 +321,31 @@ mod e2e_tests {
prototype_dir.to_str().unwrap(),
repo_path.to_str().unwrap(),
]);
assert!(result.is_ok(), "Failed to publish second package: {:?}", result.err());
assert!(
result.is_ok(),
"Failed to publish second package: {:?}",
result.err()
);
// List packages using pkg6repo
let result = run_pkg6repo(&[
"list",
repo_path.to_str().unwrap(),
]);
assert!(result.is_ok(), "Failed to list packages: {:?}", result.err());
let result = run_pkg6repo(&["list", repo_path.to_str().unwrap()]);
assert!(
result.is_ok(),
"Failed to list packages: {:?}",
result.err()
);
let output = result.unwrap();
assert!(output.contains("example"), "First package not found in repository");
assert!(output.contains("example2"), "Second package not found in repository");
assert!(
output.contains("example"),
"First package not found in repository"
);
assert!(
output.contains("example2"),
"Second package not found in repository"
);
// Clean up
cleanup_test_dir(&test_dir);
}
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,28 +1,31 @@
#[cfg(test)]
mod tests {
use libips::repository::{ReadableRepository, WritableRepository, RepositoryVersion, FileBackend, REPOSITORY_CONFIG_FILENAME, PublisherInfo, RepositoryInfo};
use std::path::PathBuf;
use libips::repository::{
FileBackend, ReadableRepository, RepositoryVersion,
WritableRepository, REPOSITORY_CONFIG_FILENAME,
};
use std::fs;
use std::path::PathBuf;
// These tests interact with real repositories in a known location
// instead of using temporary directories. This allows for better
// debugging and inspection of the repositories during testing.
// The base directory for all test repositories
const TEST_REPO_BASE_DIR: &str = "/tmp/pkg6repo_test";
// Helper function to create a unique test directory
fn create_test_dir(test_name: &str) -> PathBuf {
let test_dir = PathBuf::from(format!("{}/{}", TEST_REPO_BASE_DIR, test_name));
// Clean up any existing directory
if test_dir.exists() {
fs::remove_dir_all(&test_dir).unwrap();
}
// Create the directory
fs::create_dir_all(&test_dir).unwrap();
test_dir
}
@ -38,10 +41,10 @@ mod tests {
// Create a real test directory
let test_dir = create_test_dir("create_repository");
let repo_path = test_dir.join("repo");
// Create a repository
let _ = FileBackend::create(&repo_path, RepositoryVersion::V4).unwrap();
// Check that the repository was created
assert!(repo_path.exists());
assert!(repo_path.join("catalog").exists());
@ -50,109 +53,125 @@ mod tests {
assert!(repo_path.join("pkg").exists());
assert!(repo_path.join("trans").exists());
assert!(repo_path.join(REPOSITORY_CONFIG_FILENAME).exists());
// Clean up
cleanup_test_dir(&test_dir);
}
#[test]
fn test_add_publisher() {
// Create a real test directory
let test_dir = create_test_dir("add_publisher");
let repo_path = test_dir.join("repo");
// Create a repository
let mut repo = FileBackend::create(&repo_path, RepositoryVersion::V4).unwrap();
// Add a publisher
repo.add_publisher("example.com").unwrap();
// Check that the publisher was added
assert!(repo.config.publishers.contains(&"example.com".to_string()));
assert!(repo_path.join("catalog").join("example.com").exists());
assert!(repo_path.join("pkg").join("example.com").exists());
// Clean up
cleanup_test_dir(&test_dir);
}
#[test]
fn test_remove_publisher() {
// Create a real test directory
let test_dir = create_test_dir("remove_publisher");
let repo_path = test_dir.join("repo");
// Create a repository
let mut repo = FileBackend::create(&repo_path, RepositoryVersion::V4).unwrap();
// Add a publisher
repo.add_publisher("example.com").unwrap();
// Check that the publisher was added
assert!(repo.config.publishers.contains(&"example.com".to_string()));
// Check that the publisher directories were created
let catalog_dir = repo_path.join("catalog").join("example.com");
let pkg_dir = repo_path.join("pkg").join("example.com");
assert!(catalog_dir.exists(), "Catalog directory should exist after adding publisher");
assert!(pkg_dir.exists(), "Package directory should exist after adding publisher");
assert!(
catalog_dir.exists(),
"Catalog directory should exist after adding publisher"
);
assert!(
pkg_dir.exists(),
"Package directory should exist after adding publisher"
);
// Remove the publisher
repo.remove_publisher("example.com", false).unwrap();
// Check that the publisher was removed from the configuration
assert!(!repo.config.publishers.contains(&"example.com".to_string()));
// Check that the publisher directories were removed
assert!(!catalog_dir.exists(), "Catalog directory should not exist after removing publisher");
assert!(!pkg_dir.exists(), "Package directory should not exist after removing publisher");
assert!(
!catalog_dir.exists(),
"Catalog directory should not exist after removing publisher"
);
assert!(
!pkg_dir.exists(),
"Package directory should not exist after removing publisher"
);
// Clean up
cleanup_test_dir(&test_dir);
}
#[test]
fn test_set_property() {
// Create a real test directory
let test_dir = create_test_dir("set_property");
let repo_path = test_dir.join("repo");
// Create a repository
let mut repo = FileBackend::create(&repo_path, RepositoryVersion::V4).unwrap();
// Set a property
repo.set_property("publisher/prefix", "example.com").unwrap();
repo.set_property("publisher/prefix", "example.com")
.unwrap();
// Check that the property was set
assert_eq!(repo.config.properties.get("publisher/prefix").unwrap(), "example.com");
assert_eq!(
repo.config.properties.get("publisher/prefix").unwrap(),
"example.com"
);
// Clean up
cleanup_test_dir(&test_dir);
}
#[test]
fn test_get_info() {
// Create a real test directory
let test_dir = create_test_dir("get_info");
let repo_path = test_dir.join("repo");
// Create a repository
let mut repo = FileBackend::create(&repo_path, RepositoryVersion::V4).unwrap();
// Add a publisher
repo.add_publisher("example.com").unwrap();
// Get repository information
let repo_info = repo.get_info().unwrap();
// Check that the information is correct
assert_eq!(repo_info.publishers.len(), 1);
let publisher_info = &repo_info.publishers[0];
assert_eq!(publisher_info.name, "example.com");
assert_eq!(publisher_info.package_count, 0); // No packages yet
assert_eq!(publisher_info.status, "online");
// Clean up
cleanup_test_dir(&test_dir);
}
}
}

View file

@ -1,15 +1,13 @@
use anyhow::Result;
use pest::Parser;
use pest_derive::Parser;
use std::collections::HashMap;
use thiserror::Error;
use anyhow::Result;
#[derive(Debug, Error)]
pub enum MacroParserError {
#[error("macro does not exist: {macro_name}")]
DoesNotExist {
macro_name: String,
}
DoesNotExist { macro_name: String },
}
#[derive(Parser)]
@ -18,17 +16,17 @@ struct InternalMacroParser;
#[derive(Default, Debug)]
pub struct MacroParser {
pub macros: HashMap<String, String>
pub macros: HashMap<String, String>,
}
#[derive(Default, Debug)]
pub struct Macro {
pub name: String,
pub parameters: Vec<String>
pub parameters: Vec<String>,
}
impl MacroParser {
pub fn parse(&self ,raw_string: String) -> Result<String> {
pub fn parse(&self, raw_string: String) -> Result<String> {
let mut return_string = String::new();
for (i, line) in raw_string.lines().enumerate() {
@ -91,9 +89,10 @@ impl MacroParser {
fn get_variable(&self, macro_name: &str) -> Result<&str> {
if self.macros.contains_key(macro_name) {
return Ok(self.macros[macro_name].as_str())
return Ok(self.macros[macro_name].as_str());
}
Err(MacroParserError::DoesNotExist {macro_name: macro_name.into()})?
Err(MacroParserError::DoesNotExist {
macro_name: macro_name.into(),
})?
}
}