Add pkg6recv package for receiving IPS repositories.

- Introduced `pkg6recv` for downloading packages from IPS repositories with support for recursive dependency fetching.
- Implemented CLI commands with `clap` for source/destination setup, package selection, and publisher defaults.
- Added `ConsoleProgressReporter` for detailed receive progress visibility.
- Enhanced the `Manifest` parser to support JSON format alongside IPS text format.
- Updated `FileBackend` and `RestBackend` repositories to fetch manifests and payloads in new operations.
- Extended `Digest` utilities for payload integrity checks.
- Added tests to verify basic functionality and manifest format preservation.
This commit is contained in:
Till Wegmueller 2026-01-20 20:16:58 +01:00
parent 22178cffd7
commit 1c0619ca55
No known key found for this signature in database
15 changed files with 661 additions and 137 deletions

17
Cargo.lock generated
View file

@ -2136,6 +2136,21 @@ dependencies = [
"tracing-subscriber", "tracing-subscriber",
] ]
[[package]]
name = "pkg6recv"
version = "0.5.3"
dependencies = [
"clap",
"libips",
"miette 7.6.0",
"serde",
"serde_json",
"tempfile",
"thiserror 2.0.17",
"tracing",
"tracing-subscriber",
]
[[package]] [[package]]
name = "pkg6repo" name = "pkg6repo"
version = "0.5.3" version = "0.5.3"
@ -2460,6 +2475,7 @@ version = "0.12.24"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f" checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f"
dependencies = [ dependencies = [
"async-compression",
"base64 0.22.1", "base64 0.22.1",
"bytes", "bytes",
"encoding_rs", "encoding_rs",
@ -2490,6 +2506,7 @@ dependencies = [
"tokio", "tokio",
"tokio-native-tls", "tokio-native-tls",
"tokio-rustls", "tokio-rustls",
"tokio-util",
"tower", "tower",
"tower-http", "tower-http",
"tower-service", "tower-service",

View file

@ -8,6 +8,7 @@ members = [
"specfile", "specfile",
"ports", "ports",
"pkg6", "pkg6",
"pkg6recv",
"pkgtree", "pkgtree",
"xtask", "xtask",
] ]

View file

@ -43,7 +43,7 @@ walkdir = "2.4.0"
redb = { version = "3" } redb = { version = "3" }
bincode = { version = "2", features = ["serde"] } bincode = { version = "2", features = ["serde"] }
rust-ini = "0.21" rust-ini = "0.21"
reqwest = { version = "0.12", features = ["blocking", "json"] } reqwest = { version = "0.12", features = ["blocking", "json", "gzip", "deflate"] }
resolvo = "0.10" resolvo = "0.10"
[features] [features]

View file

@ -15,19 +15,7 @@ action_driver = @{"driver"}
action_group = @{"group"} action_group = @{"group"}
action_user = @{"user"} action_user = @{"user"}
action_legacy = @{"legacy"} action_legacy = @{"legacy"}
action_name = @{ action_name = @{ ASCII_ALPHA+ }
action_set |
action_depend |
action_dir |
action_file |
action_license |
action_hardlink |
action_link |
action_driver |
action_group |
action_user |
action_legacy
}
quoted_string = @{ quoted_string = @{
"\"" "\""
~ quoted_character* ~ quoted_character*

View file

@ -885,8 +885,11 @@ impl Manifest {
ActionKind::Transform => { ActionKind::Transform => {
self.transforms.push(act.into()); self.transforms.push(act.into());
} }
ActionKind::Signature => {
debug!("signature action encountered, skipping for now");
}
ActionKind::Unknown { action } => { ActionKind::Unknown { action } => {
panic!("action {:?} not known", action) debug!("action {:?} not known, skipping", action);
} }
} }
} }
@ -909,6 +912,19 @@ impl Manifest {
} }
pub fn parse_string(content: String) -> Result<Manifest> { pub fn parse_string(content: String) -> Result<Manifest> {
// Try to parse as JSON first
if content.trim_start().starts_with('{') {
match serde_json::from_str::<Manifest>(&content) {
Ok(manifest) => return Ok(manifest),
Err(err) => {
debug!(
"Manifest::parse_string: Error in JSON deserialization: {}. Continuing with mtree like format parsing",
err
);
}
}
}
let mut m = Manifest::new(); let mut m = Manifest::new();
let pairs = ManifestParser::parse(Rule::manifest, &content)?; let pairs = ManifestParser::parse(Rule::manifest, &content)?;
@ -993,6 +1009,7 @@ pub enum ActionKind {
Legacy, Legacy,
Unknown { action: String }, Unknown { action: String },
Transform, Transform,
Signature,
} }
impl Default for ActionKind { impl Default for ActionKind {
@ -1042,7 +1059,8 @@ fn get_action_kind(act: &str) -> ActionKind {
"group" => ActionKind::Group, "group" => ActionKind::Group,
"user" => ActionKind::User, "user" => ActionKind::User,
"legacy" => ActionKind::Legacy, "legacy" => ActionKind::Legacy,
"<transform" => ActionKind::Transform, "<transform" | "transform" => ActionKind::Transform,
"signature" => ActionKind::Signature,
_ => ActionKind::Unknown { action: act.into() }, _ => ActionKind::Unknown { action: act.into() },
} }
} }

View file

@ -111,6 +111,10 @@ impl FromStr for Digest {
impl Digest { impl Digest {
pub fn from_bytes(b: &[u8], algo: DigestAlgorithm, src: DigestSource) -> Result<Self> { pub fn from_bytes(b: &[u8], algo: DigestAlgorithm, src: DigestSource) -> Result<Self> {
let hash = match algo { let hash = match algo {
DigestAlgorithm::SHA1 => {
use sha1::Sha1;
format!("{:x}", Sha1::digest(b))
}
DigestAlgorithm::SHA256 => { DigestAlgorithm::SHA256 => {
format!("{:x}", sha2::Sha256::digest(b)) format!("{:x}", sha2::Sha256::digest(b))
} }
@ -139,6 +143,62 @@ impl Digest {
hash, hash,
}) })
} }
pub fn from_reader<R: std::io::Read>(
mut r: R,
algo: DigestAlgorithm,
src: DigestSource,
) -> Result<Self> {
let hash = match algo {
DigestAlgorithm::SHA1 => {
use sha1::{Digest as _, Sha1};
let mut hasher = Sha1::new();
std::io::copy(&mut r, &mut hasher).map_err(DigestError::from)?;
format!("{:x}", hasher.finalize())
}
DigestAlgorithm::SHA256 => {
use sha2::{Digest as _, Sha256};
let mut hasher = Sha256::new();
std::io::copy(&mut r, &mut hasher).map_err(DigestError::from)?;
format!("{:x}", hasher.finalize())
}
DigestAlgorithm::SHA512Half => {
use sha2::{Digest as _, Sha512_256};
let mut hasher = Sha512_256::new();
std::io::copy(&mut r, &mut hasher).map_err(DigestError::from)?;
format!("{:x}", hasher.finalize())
}
DigestAlgorithm::SHA512 => {
use sha2::{Digest as _, Sha512};
let mut hasher = Sha512::new();
std::io::copy(&mut r, &mut hasher).map_err(DigestError::from)?;
format!("{:x}", hasher.finalize())
}
DigestAlgorithm::SHA3512Half | DigestAlgorithm::SHA3256 => {
use sha3::{Digest as _, Sha3_256};
let mut hasher = Sha3_256::new();
std::io::copy(&mut r, &mut hasher).map_err(DigestError::from)?;
format!("{:x}", hasher.finalize())
}
DigestAlgorithm::SHA3512 => {
use sha3::{Digest as _, Sha3_512};
let mut hasher = Sha3_512::new();
std::io::copy(&mut r, &mut hasher).map_err(DigestError::from)?;
format!("{:x}", hasher.finalize())
}
x => {
return Err(DigestError::UnknownAlgorithm {
algorithm: x.to_string(),
});
}
};
Ok(Digest {
source: src,
algorithm: algo,
hash,
})
}
} }
impl Display for Digest { impl Display for Digest {
@ -164,4 +224,8 @@ pub enum DigestError {
help("Digest should be in the format: source:algorithm:hash") help("Digest should be in the format: source:algorithm:hash")
)] )]
InvalidDigestFormat { digest: String, details: String }, InvalidDigestFormat { digest: String, details: String },
#[error("I/O error: {0}")]
#[diagnostic(code(ips::digest_error::io))]
IoError(#[from] std::io::Error),
} }

View file

@ -151,7 +151,7 @@ pub enum FmriError {
/// let version = Version::new_semver(semver_version); /// let version = Version::new_semver(semver_version);
/// assert_eq!(version.release, "1.2.3"); /// assert_eq!(version.release, "1.2.3");
/// ``` /// ```
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Diff)] #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, Diff)]
#[diff(attr( #[diff(attr(
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
))] ))]
@ -513,7 +513,7 @@ impl FromStr for Version {
/// ///
/// An FMRI is a unique identifier for a package in the IPS system. /// An FMRI is a unique identifier for a package in the IPS system.
/// It follows the format: pkg://publisher/package_name@version /// It follows the format: pkg://publisher/package_name@version
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Diff)] #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, Diff)]
#[diff(attr( #[diff(attr(
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
))] ))]
@ -984,14 +984,14 @@ mod tests {
fn test_fmri_display() { fn test_fmri_display() {
// Test displaying a name only // Test displaying a name only
let fmri = Fmri::new("sunos/coreutils"); let fmri = Fmri::new("sunos/coreutils");
assert_eq!(fmri.to_string(), "pkg:///sunos/coreutils"); assert_eq!(fmri.to_string(), "pkg:/sunos/coreutils");
// Test displaying a name and version // Test displaying a name and version
let version = Version::with_timestamp("5.11", Some("1"), None, "20200421T195136Z"); let version = Version::with_timestamp("5.11", Some("1"), None, "20200421T195136Z");
let fmri = Fmri::with_version("sunos/coreutils", version); let fmri = Fmri::with_version("sunos/coreutils", version);
assert_eq!( assert_eq!(
fmri.to_string(), fmri.to_string(),
"pkg:///sunos/coreutils@5.11,1:20200421T195136Z" "pkg:/sunos/coreutils@5.11,1:20200421T195136Z"
); );
// Test displaying with publisher // Test displaying with publisher

View file

@ -507,7 +507,7 @@ impl Image {
if origin.starts_with("file://") { if origin.starts_with("file://") {
let path_str = origin.trim_start_matches("file://"); let path_str = origin.trim_start_matches("file://");
let path = std::path::PathBuf::from(path_str); let path = std::path::PathBuf::from(path_str);
let repo = crate::repository::FileBackend::open(&path)?; let mut repo = crate::repository::FileBackend::open(&path)?;
repo.fetch_manifest_text(&publisher_name, fmri)? repo.fetch_manifest_text(&publisher_name, fmri)?
} else { } else {
let mut repo = crate::repository::RestBackend::open(origin)?; let mut repo = crate::repository::RestBackend::open(origin)?;

View file

@ -12,6 +12,7 @@ pub mod fmri;
pub mod image; pub mod image;
pub mod payload; pub mod payload;
pub mod publisher; pub mod publisher;
pub mod recv;
pub mod repository; pub mod repository;
pub mod solver; pub mod solver;
mod test_json_manifest; mod test_json_manifest;

284
libips/src/recv.rs Normal file
View file

@ -0,0 +1,284 @@
// This Source Code Form is subject to the terms of
// the Mozilla Public License, v. 2.0. If a copy of the
// MPL was not distributed with this file, You can
// obtain one at https://mozilla.org/MPL/2.0/.
use crate::repository::{ReadableRepository, FileBackend, RepositoryError, Result, WritableRepository, ProgressReporter, ProgressInfo, NoopProgressReporter};
use crate::fmri::Fmri;
use crate::actions::Manifest;
use std::collections::HashSet;
use tempfile::tempdir;
use tracing::{info, debug};
/// PackageReceiver handles downloading packages from a source repository
/// and storing them in a destination repository.
pub struct PackageReceiver<'a, S: ReadableRepository> {
source: &'a mut S,
dest: FileBackend,
progress: Option<&'a dyn ProgressReporter>,
}
impl<'a, S: ReadableRepository> PackageReceiver<'a, S> {
/// Create a new PackageReceiver
pub fn new(source: &'a mut S, dest: FileBackend) -> Self {
Self { source, dest, progress: None }
}
/// Set the progress reporter
pub fn with_progress(mut self, progress: &'a dyn ProgressReporter) -> Self {
self.progress = Some(progress);
self
}
/// Receive packages from the source repository
///
/// # Arguments
///
/// * `default_publisher` - The default publisher name if not specified in FMRI
/// * `fmris` - List of FMRIs to receive
/// * `recursive` - Whether to receive dependencies recursively
pub fn receive(&mut self, default_publisher: Option<&str>, fmris: &[Fmri], recursive: bool) -> Result<()> {
let mut processed = HashSet::new();
let mut queue: Vec<Fmri> = fmris.to_vec();
let mut updated_publishers = HashSet::new();
let mut queued: HashSet<Fmri> = fmris.iter().cloned().collect();
let progress = self.progress.unwrap_or(&NoopProgressReporter);
let mut overall_progress = ProgressInfo::new("Receiving packages");
progress.start(&overall_progress);
let mut total_packages = queue.len() as u64;
let mut packages_done = 0u64;
while let Some(fmri) = queue.pop() {
// If the FMRI doesn't have a version, we need to find the newest one
let fmris_to_fetch = if fmri.version.is_none() {
let publisher = fmri.publisher.as_deref().or(default_publisher).ok_or_else(|| {
RepositoryError::Other(format!("No publisher specified for package {}", fmri.name))
})?;
overall_progress = overall_progress.with_context(format!("Looking up newest version for {}", fmri.name));
progress.update(&overall_progress);
debug!("No version specified for {}, looking up newest", fmri.name);
let pkgs = self.source.list_packages(Some(publisher), Some(&fmri.name))?;
// Group by package name to find the newest version for each
let mut by_name: std::collections::HashMap<String, Vec<crate::repository::PackageInfo>> = std::collections::HashMap::new();
for pi in pkgs {
by_name.entry(pi.fmri.name.clone()).or_default().push(pi);
}
let mut results = Vec::new();
for (name, versions) in by_name {
let newest = versions.into_iter().max_by(|a, b| {
a.fmri.to_string().cmp(&b.fmri.to_string())
});
if let Some(pi) = newest {
results.push(pi.fmri);
} else {
info!("Package {} not found in source for publisher {}", name, publisher);
}
}
if results.is_empty() {
info!("Package {} not found in source for publisher {}", fmri.name, publisher);
continue;
}
// Update total_packages: remove the wildcard FMRI we just popped, and add actual results
total_packages = total_packages.saturating_sub(1) + results.len() as u64;
results
} else {
vec![fmri]
};
for fmri_to_fetch in fmris_to_fetch {
let publisher_name = fmri_to_fetch.publisher.as_deref().or(default_publisher).ok_or_else(|| {
RepositoryError::Other(format!("No publisher specified for package {}", fmri_to_fetch.name))
})?.to_string();
if !processed.insert(fmri_to_fetch.clone()) {
// If we already processed it (possibly as a dependency), don't count it again
// and decrement total if we just added it from wildcard expansion
continue;
}
packages_done += 1;
overall_progress = overall_progress
.with_total(total_packages)
.with_current(packages_done)
.with_context(format!("Receiving {}", fmri_to_fetch));
progress.update(&overall_progress);
info!("Receiving package {} from publisher {}", fmri_to_fetch, publisher_name);
let manifest = self.receive_one(&publisher_name, &fmri_to_fetch)?;
updated_publishers.insert(publisher_name.clone());
if recursive {
for dep in manifest.dependencies {
if let Some(mut dep_fmri) = dep.fmri {
// Ensure it has the publisher if not specified
if dep_fmri.publisher.is_none() {
dep_fmri.publisher = Some(publisher_name.clone());
}
if !processed.contains(&dep_fmri) && queued.insert(dep_fmri.clone()) {
total_packages += 1;
queue.push(dep_fmri);
}
}
}
}
}
}
for pub_name in updated_publishers {
info!("Rebuilding metadata for publisher {}", pub_name);
overall_progress = overall_progress.with_context(format!("Rebuilding metadata for {}", pub_name));
progress.update(&overall_progress);
self.dest.rebuild(Some(&pub_name), false, false)?;
}
progress.finish(&overall_progress);
Ok(())
}
/// Receive a single package
fn receive_one(&mut self, publisher: &str, fmri: &Fmri) -> Result<Manifest> {
let progress = self.progress.unwrap_or(&NoopProgressReporter);
let manifest_text = self.source.fetch_manifest_text(publisher, fmri)?;
let manifest = Manifest::parse_string(manifest_text.clone()).map_err(RepositoryError::from)?;
// Ensure publisher exists in destination
let dest_info = self.dest.get_info()?;
if !dest_info.publishers.iter().any(|p| p.name == publisher) {
info!("Adding publisher {} to destination repository", publisher);
self.dest.add_publisher(publisher)?;
}
let mut txn = self.dest.begin_transaction()?;
txn.set_publisher(publisher);
txn.set_legacy_manifest(manifest_text);
let temp_dir = tempdir().map_err(RepositoryError::IoError)?;
let payload_files: Vec<_> = manifest.files.iter().filter(|f| f.payload.is_some()).collect();
let total_files = payload_files.len() as u64;
for (i, file) in payload_files.into_iter().enumerate() {
if let Some(payload) = &file.payload {
let files_done = (i + 1) as u64;
let digest = &payload.primary_identifier.hash;
progress.update(&ProgressInfo::new(format!("Receiving payloads for {}", fmri.name))
.with_total(total_files)
.with_current(files_done)
.with_context(format!("Payload: {}", digest)));
let temp_file_path = temp_dir.path().join(digest);
debug!("Fetching payload {} to {}", digest, temp_file_path.display());
self.source.fetch_payload(publisher, digest, &temp_file_path)?;
txn.add_file(file.clone(), &temp_file_path)?;
}
}
txn.update_manifest(manifest.clone());
txn.commit()?;
Ok(manifest)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::repository::{FileBackend, RepositoryVersion};
use crate::actions::Attr;
use tempfile::tempdir;
#[test]
fn test_receive_basic() -> Result<()> {
let source_dir = tempdir().map_err(RepositoryError::IoError)?;
let dest_dir = tempdir().map_err(RepositoryError::IoError)?;
// Create source repo with one package
let mut source_repo = FileBackend::create(source_dir.path(), RepositoryVersion::V4)?;
source_repo.add_publisher("test")?;
let fmri = Fmri::parse("pkg://test/pkgA@1.0").unwrap();
let mut manifest = Manifest::new();
manifest.attributes.push(Attr {
key: "pkg.fmri".to_string(),
values: vec![fmri.to_string()],
..Default::default()
});
let mut txn = source_repo.begin_transaction()?;
txn.set_publisher("test");
txn.update_manifest(manifest);
txn.commit()?;
source_repo.rebuild(Some("test"), false, false)?;
// Create dest repo
let dest_repo = FileBackend::create(dest_dir.path(), RepositoryVersion::V4)?;
let mut receiver = PackageReceiver::new(&mut source_repo, dest_repo);
receiver.receive(Some("test"), &[Fmri::new("pkgA")], false)?;
// Verify dest repo has the package
let dest_repo_check = FileBackend::open(dest_dir.path())?;
let pkgs = dest_repo_check.list_packages(Some("test"), Some("pkgA"))?;
assert_eq!(pkgs.len(), 1);
assert_eq!(pkgs[0].fmri.name, "pkgA");
assert_eq!(pkgs[0].fmri.version.as_ref().unwrap().release, "1.0");
Ok(())
}
#[test]
fn test_receive_preserves_manifest_format() -> Result<()> {
let source_dir = tempdir().map_err(RepositoryError::IoError)?;
let dest_dir = tempdir().map_err(RepositoryError::IoError)?;
// Create source repo
let mut source_repo = FileBackend::create(source_dir.path(), RepositoryVersion::V4)?;
source_repo.add_publisher("test")?;
let fmri = Fmri::parse("pkg://test/pkgA@1.0").unwrap();
let manifest_content = "set name=pkg.fmri value=pkg://test/pkgA@1.0\nset name=pkg.summary value=test\n";
// Manually write the manifest in IPS format to the source repo
let manifest_path = FileBackend::construct_manifest_path(source_dir.path(), "test", "pkgA", "1.0");
std::fs::create_dir_all(manifest_path.parent().unwrap()).map_err(RepositoryError::IoError)?;
std::fs::write(&manifest_path, manifest_content).map_err(RepositoryError::IoError)?;
// Rebuild source repo to recognize the package
source_repo.rebuild(Some("test"), false, false)?;
// Create dest repo
let dest_repo = FileBackend::create(dest_dir.path(), RepositoryVersion::V4)?;
let mut receiver = PackageReceiver::new(&mut source_repo, dest_repo);
receiver.receive(Some("test"), &[Fmri::new("pkgA")], false)?;
// Verify dest repo has the package and the manifest is in IPS format
let dest_manifest_path = FileBackend::construct_manifest_path(dest_dir.path(), "test", "pkgA", "1.0");
let content = std::fs::read_to_string(&dest_manifest_path).map_err(RepositoryError::IoError)?;
assert_eq!(content, manifest_content);
assert!(!content.starts_with('{'), "Manifest should not be JSON");
// Also verify the .json version exists and IS JSON
let mut json_path = dest_manifest_path.clone();
let mut filename = json_path.file_name().unwrap().to_os_string();
filename.push(".json");
json_path.set_file_name(filename);
assert!(json_path.exists(), "JSON manifest should exist at {}", json_path.display());
let json_content = std::fs::read_to_string(&json_path).map_err(RepositoryError::IoError)?;
assert!(json_content.starts_with('{'), "JSON manifest should be JSON");
Ok(())
}
}

View file

@ -1674,6 +1674,60 @@ impl ReadableRepository for FileBackend {
))) )))
} }
fn fetch_manifest_text(
&mut self,
publisher: &str,
fmri: &Fmri,
) -> Result<String> {
// Require a concrete version
let version = fmri.version();
if version.is_empty() {
return Err(RepositoryError::Other(
"FMRI must include a version to fetch manifest".into(),
));
}
// Preferred path: publisher-scoped manifest path
let path = Self::construct_manifest_path(&self.path, publisher, fmri.stem(), &version);
if path.exists() {
return std::fs::read_to_string(&path)
.map_err(|e| RepositoryError::FileReadError {
path,
source: e,
});
}
// Fallbacks: global pkg layout without publisher
let encoded_stem = Self::url_encode(fmri.stem());
let encoded_version = Self::url_encode(&version);
let alt1 = self
.path
.join("pkg")
.join(&encoded_stem)
.join(&encoded_version);
if alt1.exists() {
return std::fs::read_to_string(&alt1).map_err(|e| RepositoryError::FileReadError {
path: alt1,
source: e,
});
}
let alt2 = self
.path
.join("publisher")
.join(publisher)
.join("pkg")
.join(&encoded_stem)
.join(&encoded_version);
if alt2.exists() {
return std::fs::read_to_string(&alt2).map_err(|e| RepositoryError::FileReadError {
path: alt2,
source: e,
});
}
Err(RepositoryError::NotFound(format!(
"manifest for {} not found",
fmri
)))
}
/// Search for packages in the repository /// Search for packages in the repository
fn search( fn search(
&self, &self,
@ -2099,52 +2153,7 @@ impl FileBackend {
let _ = super::catalog_writer::write_update_log(&path, &mut log)?; let _ = super::catalog_writer::write_update_log(&path, &mut log)?;
Ok(path) Ok(path)
} }
pub fn fetch_manifest_text(&self, publisher: &str, fmri: &Fmri) -> Result<String> {
// Require a concrete version
let version = fmri.version();
if version.is_empty() {
return Err(RepositoryError::Other(
"FMRI must include a version to fetch manifest".into(),
));
}
// Preferred path: publisher-scoped manifest path
let path = Self::construct_manifest_path(&self.path, publisher, fmri.stem(), &version);
if path.exists() {
return std::fs::read_to_string(&path)
.map_err(|e| RepositoryError::FileReadError { path, source: e });
}
// Fallbacks: global pkg layout without publisher
let encoded_stem = Self::url_encode(fmri.stem());
let encoded_version = Self::url_encode(&version);
let alt1 = self
.path
.join("pkg")
.join(&encoded_stem)
.join(&encoded_version);
if alt1.exists() {
return std::fs::read_to_string(&alt1).map_err(|e| RepositoryError::FileReadError {
path: alt1,
source: e,
});
}
let alt2 = self
.path
.join("publisher")
.join(publisher)
.join("pkg")
.join(&encoded_stem)
.join(&encoded_version);
if alt2.exists() {
return std::fs::read_to_string(&alt2).map_err(|e| RepositoryError::FileReadError {
path: alt2,
source: e,
});
}
Err(RepositoryError::NotFound(format!(
"manifest for {} not found",
fmri
)))
}
/// Fetch catalog file path /// Fetch catalog file path
pub fn get_catalog_file_path(&self, publisher: &str, filename: &str) -> Result<PathBuf> { pub fn get_catalog_file_path(&self, publisher: &str, filename: &str) -> Result<PathBuf> {
if filename.contains('/') || filename.contains('\\') { if filename.contains('/') || filename.contains('\\') {

View file

@ -384,6 +384,13 @@ pub trait ReadableRepository {
fmri: &crate::fmri::Fmri, fmri: &crate::fmri::Fmri,
) -> Result<crate::actions::Manifest>; ) -> Result<crate::actions::Manifest>;
/// Fetch a package manifest as raw text by FMRI from the repository.
fn fetch_manifest_text(
&mut self,
publisher: &str,
fmri: &crate::fmri::Fmri,
) -> Result<String>;
/// Search for packages in the repository /// Search for packages in the repository
/// ///
/// This method searches for packages in the repository using the search index. /// This method searches for packages in the repository using the search index.

View file

@ -3,9 +3,9 @@
// MPL was not distributed with this file, You can // MPL was not distributed with this file, You can
// obtain one at https://mozilla.org/MPL/2.0/. // obtain one at https://mozilla.org/MPL/2.0/.
use std::collections::HashMap; use std::collections::{HashMap, HashSet};
use std::fs::{self, File}; use std::fs::{self, File};
use std::io::Write; use std::io::{BufRead, BufReader, Write};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::str::FromStr; use std::str::FromStr;
use tracing::{debug, info, warn}; use tracing::{debug, info, warn};
@ -64,7 +64,7 @@ impl WritableRepository for RestBackend {
// This is a stub implementation // This is a stub implementation
// In a real implementation, we would make a REST API call to create the repository // In a real implementation, we would make a REST API call to create the repository
let uri_str = uri.as_ref().to_string_lossy().to_string(); let uri_str = uri.as_ref().to_string_lossy().trim_end_matches('/').to_string();
// Create the repository configuration // Create the repository configuration
let config = RepositoryConfig { let config = RepositoryConfig {
@ -323,7 +323,7 @@ impl WritableRepository for RestBackend {
impl ReadableRepository for RestBackend { impl ReadableRepository for RestBackend {
/// Open an existing repository /// Open an existing repository
fn open<P: AsRef<Path>>(uri: P) -> Result<Self> { fn open<P: AsRef<Path>>(uri: P) -> Result<Self> {
let uri_str = uri.as_ref().to_string_lossy().to_string(); let uri_str = uri.as_ref().to_string_lossy().trim_end_matches('/').to_string();
// Create an HTTP client // Create an HTTP client
let client = Client::new(); let client = Client::new();
@ -344,13 +344,20 @@ impl ReadableRepository for RestBackend {
match response.json::<Value>() { match response.json::<Value>() {
Ok(json) => { Ok(json) => {
// Extract publisher information // Extract publisher information
if let Some(publishers) = if let Some(publishers) = json.get("publishers") {
json.get("publishers").and_then(|p| p.as_object()) if let Some(publishers_obj) = publishers.as_object() {
{ for (name, _) in publishers_obj {
for (name, _) in publishers {
debug!("Found publisher: {}", name); debug!("Found publisher: {}", name);
config.publishers.push(name.clone()); config.publishers.push(name.clone());
} }
} else if let Some(publishers_arr) = publishers.as_array() {
for p in publishers_arr {
if let Some(name) = p.get("name").and_then(|n| n.as_str()) {
debug!("Found publisher: {}", name);
config.publishers.push(name.to_string());
}
}
}
} }
} }
Err(e) => { Err(e) => {
@ -369,9 +376,9 @@ impl ReadableRepository for RestBackend {
} }
} }
// If we couldn't get any publishers, add a default one // If we couldn't get any publishers, warn the user
if config.publishers.is_empty() { if config.publishers.is_empty() {
config.publishers.push("openindiana.org".to_string()); warn!("No publishers discovered for repository: {}", uri_str);
} }
// Create the repository instance // Create the repository instance
@ -417,35 +424,64 @@ impl ReadableRepository for RestBackend {
fn list_packages( fn list_packages(
&self, &self,
publisher: Option<&str>, publisher: Option<&str>,
_pattern: Option<&str>, pattern: Option<&str>,
) -> Result<Vec<PackageInfo>> { ) -> Result<Vec<PackageInfo>> {
// This is a stub implementation let pattern = pattern.unwrap_or("*");
// In a real implementation, we would make a REST API call to list packages
let packages = Vec::new(); // Use search API to find packages
// URL: /search/0/<pattern>
let url = format!("{}/search/0/{}", self.uri, pattern);
debug!("Listing packages via search: {}", url);
// Filter publishers if specified let mut packages = Vec::new();
let publishers = if let Some(pub_name) = publisher { let mut seen_fmris = HashSet::new();
if !self.config.publishers.contains(&pub_name.to_string()) {
return Err(RepositoryError::PublisherNotFound(pub_name.to_string())); match self.client.get(&url).send() {
Ok(resp) => {
let resp = match resp.error_for_status() {
Ok(r) => r,
Err(e) if e.status() == Some(reqwest::StatusCode::NOT_FOUND) => {
return Ok(Vec::new());
}
Err(e) => {
return Err(RepositoryError::Other(format!("Search API error: {} for {}", e, url)));
} }
vec![pub_name.to_string()]
} else {
self.config.publishers.clone()
}; };
// For each publisher, list packages let reader = BufReader::new(resp);
for _pub_name in publishers { for line in reader.lines() {
// In a real implementation, we would make a REST API call to get package information let line = line.map_err(|e| {
// The API call would return a list of packages with their names, versions, and other metadata RepositoryError::Other(format!("Failed to read search response line: {}", e))
// We would then parse this information and create PackageInfo structs })?;
// Line format: <attr> <fmri> <value_type> <value>
// Example: pkg.fmri pkg:/system/rsyslog@8.2508.0,5.11-151056.0:20251023T180542Z set omnios/system/rsyslog
let parts: Vec<&str> = line.split_whitespace().collect();
if parts.len() >= 2 && parts[0] == "pkg.fmri" {
if let Ok(fmri) = crate::fmri::Fmri::parse(parts[1]) {
// Filter by publisher if requested
if let Some(pub_name) = publisher {
if let Some(fmri_pub) = fmri.publisher.as_deref() {
if fmri_pub != pub_name {
continue;
}
}
// If FMRI has no publisher, we assume it matches the requested publisher
// as it's being served by this repository.
}
// For now, we return an empty list since we don't want to return placeholder data if seen_fmris.insert(fmri.to_string()) {
// and we don't have a real API to call packages.push(PackageInfo { fmri });
}
// If pattern filtering is needed, it would be applied here to the results from the API }
// When implementing, use the regex crate to handle user-provided regexp patterns properly, }
// similar to the implementation in file_backend.rs }
}
Err(e) => {
return Err(RepositoryError::Other(format!(
"Failed to connect to search API: {} for {}",
e, url
)));
}
} }
Ok(packages) Ok(packages)
@ -567,17 +603,9 @@ impl ReadableRepository for RestBackend {
return Err(RepositoryError::Other("Empty digest provided".to_string())); return Err(RepositoryError::Other("Empty digest provided".to_string()));
} }
let shard = if hash.len() >= 2 {
&hash[0..2]
} else {
&hash[..]
};
let candidates = vec![ let candidates = vec![
format!("{}/file/{}/{}", self.uri, shard, hash), format!("{}/file/0/{}", self.uri, hash),
format!( format!("{}/publisher/{}/file/0/{}", self.uri, publisher, hash),
"{}/publisher/{}/file/{}/{}",
self.uri, publisher, shard, hash
),
]; ];
// Ensure destination directory exists // Ensure destination directory exists
@ -589,35 +617,36 @@ impl ReadableRepository for RestBackend {
for url in candidates { for url in candidates {
match self.client.get(&url).send() { match self.client.get(&url).send() {
Ok(resp) if resp.status().is_success() => { Ok(resp) if resp.status().is_success() => {
let body = resp.bytes().map_err(|e| { let mut resp = resp;
RepositoryError::Other(format!("Failed to read payload body: {}", e)) // Write atomically
let tmp_path = dest.with_extension("tmp");
let mut tmp_file = File::create(&tmp_path)?;
std::io::copy(&mut resp, &mut tmp_file).map_err(|e| {
RepositoryError::Other(format!("Failed to download payload: {}", e))
})?; })?;
drop(tmp_file);
// Verify digest if algorithm is known // Verify digest if algorithm is known
if let Some(alg) = algo.clone() { if let Some(alg) = algo.clone() {
match crate::digest::Digest::from_bytes( let f = File::open(&tmp_path)?;
&body, let comp = crate::digest::Digest::from_reader(
f,
alg, alg,
crate::digest::DigestSource::PrimaryPayloadHash, crate::digest::DigestSource::PrimaryPayloadHash,
) { )
Ok(comp) => { .map_err(|e| RepositoryError::DigestError(format!("{}", e)))?;
if comp.hash != hash { if comp.hash != hash {
let _ = fs::remove_file(&tmp_path);
return Err(RepositoryError::DigestError(format!( return Err(RepositoryError::DigestError(format!(
"Digest mismatch: expected {}, got {}", "Digest mismatch for {}: expected {}, got {}",
hash, comp.hash url, hash, comp.hash
))); )));
} }
} }
Err(e) => return Err(RepositoryError::DigestError(format!("{}", e))),
}
}
// Write atomically fs::rename(&tmp_path, dest)?;
let tmp = dest.with_extension("tmp");
let mut f = File::create(&tmp)?;
f.write_all(&body)?;
drop(f);
fs::rename(&tmp, dest)?;
return Ok(()); return Ok(());
} }
Ok(resp) => { Ok(resp) => {
@ -651,10 +680,8 @@ impl ReadableRepository for RestBackend {
) -> Result<Vec<PackageInfo>> { ) -> Result<Vec<PackageInfo>> {
todo!() todo!()
} }
}
impl RestBackend { fn fetch_manifest_text(
pub fn fetch_manifest_text(
&mut self, &mut self,
publisher: &str, publisher: &str,
fmri: &crate::fmri::Fmri, fmri: &crate::fmri::Fmri,
@ -720,6 +747,9 @@ impl RestBackend {
last_err.unwrap_or_else(|| "manifest not found".to_string()), last_err.unwrap_or_else(|| "manifest not found".to_string()),
)) ))
} }
}
impl RestBackend {
/// Sets the local path where catalog files will be cached. /// Sets the local path where catalog files will be cached.
/// ///
/// This method creates the directory if it doesn't exist. The local cache path /// This method creates the directory if it doesn't exist. The local cache path

21
pkg6recv/Cargo.toml Normal file
View file

@ -0,0 +1,21 @@
[package]
name = "pkg6recv"
description.workspace = true
version.workspace = true
authors.workspace = true
edition.workspace = true
license-file.workspace = true
repository.workspace = true
readme.workspace = true
keywords.workspace = true
[dependencies]
clap = { version = "4", features = ["derive"] }
miette = { version = "7", features = ["fancy"] }
thiserror = "2"
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
libips = { path = "../libips", version = "*"}
serde = { version = "1", features = ["derive"] }
serde_json = "1"
tempfile = "3.8"

84
pkg6recv/src/main.rs Normal file
View file

@ -0,0 +1,84 @@
use clap::Parser;
use miette::{IntoDiagnostic, Result};
use libips::repository::{FileBackend, RestBackend, ReadableRepository, ProgressReporter, ProgressInfo};
use libips::recv::PackageReceiver;
use libips::fmri::Fmri;
use std::path::PathBuf;
use tracing::info;
use tracing_subscriber::{EnvFilter, fmt};
struct ConsoleProgressReporter;
impl ProgressReporter for ConsoleProgressReporter {
fn start(&self, info: &ProgressInfo) {
info!("{}", info);
}
fn update(&self, info: &ProgressInfo) {
info!("{}", info);
}
fn finish(&self, info: &ProgressInfo) {
info!("DONE: {}", info.operation);
}
}
#[derive(Parser)]
#[command(name = "pkg6recv")]
#[command(about = "Receive packages from a repository", long_about = None)]
struct Cli {
/// Source repository URI or path
#[arg(short = 's', long)]
source: String,
/// Destination repository path
#[arg(short = 'd', long)]
dest: PathBuf,
/// Packages to receive (FMRIs)
packages: Vec<String>,
/// Receive dependencies recursively
#[arg(short = 'r', long)]
recursive: bool,
/// Default publisher name if not specified in FMRI
#[arg(short = 'p', long)]
publisher: Option<String>,
}
fn main() -> Result<()> {
// Initialize tracing
fmt()
.with_env_filter(EnvFilter::from_default_env().add_directive(tracing::Level::INFO.into()))
.init();
let cli = Cli::parse();
// Open destination repository
// We'll open it inside each branch to avoid borrow checker issues with moves
let fmris: Vec<Fmri> = cli.packages.iter()
.map(|s| Fmri::parse(s))
.collect::<std::result::Result<Vec<_>, _>>()
.into_diagnostic()?;
let progress = ConsoleProgressReporter;
// Determine if source is a URL or a path and receive packages
if cli.source.starts_with("http://") || cli.source.starts_with("https://") {
let mut source_repo = RestBackend::open(&cli.source).into_diagnostic()?;
let dest_repo = FileBackend::open(&cli.dest).into_diagnostic()?;
let mut receiver = PackageReceiver::new(&mut source_repo, dest_repo);
receiver = receiver.with_progress(&progress);
receiver.receive(cli.publisher.as_deref(), &fmris, cli.recursive).into_diagnostic()?;
} else {
let mut source_repo = FileBackend::open(&cli.source).into_diagnostic()?;
let dest_repo = FileBackend::open(&cli.dest).into_diagnostic()?;
let mut receiver = PackageReceiver::new(&mut source_repo, dest_repo);
receiver = receiver.with_progress(&progress);
receiver.receive(cli.publisher.as_deref(), &fmris, cli.recursive).into_diagnostic()?;
}
info!("Package receive complete.");
Ok(())
}