mirror of
https://codeberg.org/Toasterson/ips.git
synced 2026-04-10 13:20:42 +00:00
Add pkg6recv package for receiving IPS repositories.
- Introduced `pkg6recv` for downloading packages from IPS repositories with support for recursive dependency fetching. - Implemented CLI commands with `clap` for source/destination setup, package selection, and publisher defaults. - Added `ConsoleProgressReporter` for detailed receive progress visibility. - Enhanced the `Manifest` parser to support JSON format alongside IPS text format. - Updated `FileBackend` and `RestBackend` repositories to fetch manifests and payloads in new operations. - Extended `Digest` utilities for payload integrity checks. - Added tests to verify basic functionality and manifest format preservation.
This commit is contained in:
parent
22178cffd7
commit
1c0619ca55
15 changed files with 661 additions and 137 deletions
17
Cargo.lock
generated
17
Cargo.lock
generated
|
|
@ -2136,6 +2136,21 @@ dependencies = [
|
|||
"tracing-subscriber",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pkg6recv"
|
||||
version = "0.5.3"
|
||||
dependencies = [
|
||||
"clap",
|
||||
"libips",
|
||||
"miette 7.6.0",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
"thiserror 2.0.17",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pkg6repo"
|
||||
version = "0.5.3"
|
||||
|
|
@ -2460,6 +2475,7 @@ version = "0.12.24"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f"
|
||||
dependencies = [
|
||||
"async-compression",
|
||||
"base64 0.22.1",
|
||||
"bytes",
|
||||
"encoding_rs",
|
||||
|
|
@ -2490,6 +2506,7 @@ dependencies = [
|
|||
"tokio",
|
||||
"tokio-native-tls",
|
||||
"tokio-rustls",
|
||||
"tokio-util",
|
||||
"tower",
|
||||
"tower-http",
|
||||
"tower-service",
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ members = [
|
|||
"specfile",
|
||||
"ports",
|
||||
"pkg6",
|
||||
"pkg6recv",
|
||||
"pkgtree",
|
||||
"xtask",
|
||||
]
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ walkdir = "2.4.0"
|
|||
redb = { version = "3" }
|
||||
bincode = { version = "2", features = ["serde"] }
|
||||
rust-ini = "0.21"
|
||||
reqwest = { version = "0.12", features = ["blocking", "json"] }
|
||||
reqwest = { version = "0.12", features = ["blocking", "json", "gzip", "deflate"] }
|
||||
resolvo = "0.10"
|
||||
|
||||
[features]
|
||||
|
|
|
|||
|
|
@ -15,19 +15,7 @@ action_driver = @{"driver"}
|
|||
action_group = @{"group"}
|
||||
action_user = @{"user"}
|
||||
action_legacy = @{"legacy"}
|
||||
action_name = @{
|
||||
action_set |
|
||||
action_depend |
|
||||
action_dir |
|
||||
action_file |
|
||||
action_license |
|
||||
action_hardlink |
|
||||
action_link |
|
||||
action_driver |
|
||||
action_group |
|
||||
action_user |
|
||||
action_legacy
|
||||
}
|
||||
action_name = @{ ASCII_ALPHA+ }
|
||||
quoted_string = @{
|
||||
"\""
|
||||
~ quoted_character*
|
||||
|
|
|
|||
|
|
@ -885,8 +885,11 @@ impl Manifest {
|
|||
ActionKind::Transform => {
|
||||
self.transforms.push(act.into());
|
||||
}
|
||||
ActionKind::Signature => {
|
||||
debug!("signature action encountered, skipping for now");
|
||||
}
|
||||
ActionKind::Unknown { action } => {
|
||||
panic!("action {:?} not known", action)
|
||||
debug!("action {:?} not known, skipping", action);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -909,6 +912,19 @@ impl Manifest {
|
|||
}
|
||||
|
||||
pub fn parse_string(content: String) -> Result<Manifest> {
|
||||
// Try to parse as JSON first
|
||||
if content.trim_start().starts_with('{') {
|
||||
match serde_json::from_str::<Manifest>(&content) {
|
||||
Ok(manifest) => return Ok(manifest),
|
||||
Err(err) => {
|
||||
debug!(
|
||||
"Manifest::parse_string: Error in JSON deserialization: {}. Continuing with mtree like format parsing",
|
||||
err
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut m = Manifest::new();
|
||||
|
||||
let pairs = ManifestParser::parse(Rule::manifest, &content)?;
|
||||
|
|
@ -993,6 +1009,7 @@ pub enum ActionKind {
|
|||
Legacy,
|
||||
Unknown { action: String },
|
||||
Transform,
|
||||
Signature,
|
||||
}
|
||||
|
||||
impl Default for ActionKind {
|
||||
|
|
@ -1042,7 +1059,8 @@ fn get_action_kind(act: &str) -> ActionKind {
|
|||
"group" => ActionKind::Group,
|
||||
"user" => ActionKind::User,
|
||||
"legacy" => ActionKind::Legacy,
|
||||
"<transform" => ActionKind::Transform,
|
||||
"<transform" | "transform" => ActionKind::Transform,
|
||||
"signature" => ActionKind::Signature,
|
||||
_ => ActionKind::Unknown { action: act.into() },
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -111,6 +111,10 @@ impl FromStr for Digest {
|
|||
impl Digest {
|
||||
pub fn from_bytes(b: &[u8], algo: DigestAlgorithm, src: DigestSource) -> Result<Self> {
|
||||
let hash = match algo {
|
||||
DigestAlgorithm::SHA1 => {
|
||||
use sha1::Sha1;
|
||||
format!("{:x}", Sha1::digest(b))
|
||||
}
|
||||
DigestAlgorithm::SHA256 => {
|
||||
format!("{:x}", sha2::Sha256::digest(b))
|
||||
}
|
||||
|
|
@ -139,6 +143,62 @@ impl Digest {
|
|||
hash,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn from_reader<R: std::io::Read>(
|
||||
mut r: R,
|
||||
algo: DigestAlgorithm,
|
||||
src: DigestSource,
|
||||
) -> Result<Self> {
|
||||
let hash = match algo {
|
||||
DigestAlgorithm::SHA1 => {
|
||||
use sha1::{Digest as _, Sha1};
|
||||
let mut hasher = Sha1::new();
|
||||
std::io::copy(&mut r, &mut hasher).map_err(DigestError::from)?;
|
||||
format!("{:x}", hasher.finalize())
|
||||
}
|
||||
DigestAlgorithm::SHA256 => {
|
||||
use sha2::{Digest as _, Sha256};
|
||||
let mut hasher = Sha256::new();
|
||||
std::io::copy(&mut r, &mut hasher).map_err(DigestError::from)?;
|
||||
format!("{:x}", hasher.finalize())
|
||||
}
|
||||
DigestAlgorithm::SHA512Half => {
|
||||
use sha2::{Digest as _, Sha512_256};
|
||||
let mut hasher = Sha512_256::new();
|
||||
std::io::copy(&mut r, &mut hasher).map_err(DigestError::from)?;
|
||||
format!("{:x}", hasher.finalize())
|
||||
}
|
||||
DigestAlgorithm::SHA512 => {
|
||||
use sha2::{Digest as _, Sha512};
|
||||
let mut hasher = Sha512::new();
|
||||
std::io::copy(&mut r, &mut hasher).map_err(DigestError::from)?;
|
||||
format!("{:x}", hasher.finalize())
|
||||
}
|
||||
DigestAlgorithm::SHA3512Half | DigestAlgorithm::SHA3256 => {
|
||||
use sha3::{Digest as _, Sha3_256};
|
||||
let mut hasher = Sha3_256::new();
|
||||
std::io::copy(&mut r, &mut hasher).map_err(DigestError::from)?;
|
||||
format!("{:x}", hasher.finalize())
|
||||
}
|
||||
DigestAlgorithm::SHA3512 => {
|
||||
use sha3::{Digest as _, Sha3_512};
|
||||
let mut hasher = Sha3_512::new();
|
||||
std::io::copy(&mut r, &mut hasher).map_err(DigestError::from)?;
|
||||
format!("{:x}", hasher.finalize())
|
||||
}
|
||||
x => {
|
||||
return Err(DigestError::UnknownAlgorithm {
|
||||
algorithm: x.to_string(),
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Digest {
|
||||
source: src,
|
||||
algorithm: algo,
|
||||
hash,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Digest {
|
||||
|
|
@ -164,4 +224,8 @@ pub enum DigestError {
|
|||
help("Digest should be in the format: source:algorithm:hash")
|
||||
)]
|
||||
InvalidDigestFormat { digest: String, details: String },
|
||||
|
||||
#[error("I/O error: {0}")]
|
||||
#[diagnostic(code(ips::digest_error::io))]
|
||||
IoError(#[from] std::io::Error),
|
||||
}
|
||||
|
|
|
|||
|
|
@ -151,7 +151,7 @@ pub enum FmriError {
|
|||
/// let version = Version::new_semver(semver_version);
|
||||
/// assert_eq!(version.release, "1.2.3");
|
||||
/// ```
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Diff)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, Diff)]
|
||||
#[diff(attr(
|
||||
#[derive(Debug, PartialEq)]
|
||||
))]
|
||||
|
|
@ -513,7 +513,7 @@ impl FromStr for Version {
|
|||
///
|
||||
/// An FMRI is a unique identifier for a package in the IPS system.
|
||||
/// It follows the format: pkg://publisher/package_name@version
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Diff)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, Diff)]
|
||||
#[diff(attr(
|
||||
#[derive(Debug, PartialEq)]
|
||||
))]
|
||||
|
|
@ -984,14 +984,14 @@ mod tests {
|
|||
fn test_fmri_display() {
|
||||
// Test displaying a name only
|
||||
let fmri = Fmri::new("sunos/coreutils");
|
||||
assert_eq!(fmri.to_string(), "pkg:///sunos/coreutils");
|
||||
assert_eq!(fmri.to_string(), "pkg:/sunos/coreutils");
|
||||
|
||||
// Test displaying a name and version
|
||||
let version = Version::with_timestamp("5.11", Some("1"), None, "20200421T195136Z");
|
||||
let fmri = Fmri::with_version("sunos/coreutils", version);
|
||||
assert_eq!(
|
||||
fmri.to_string(),
|
||||
"pkg:///sunos/coreutils@5.11,1:20200421T195136Z"
|
||||
"pkg:/sunos/coreutils@5.11,1:20200421T195136Z"
|
||||
);
|
||||
|
||||
// Test displaying with publisher
|
||||
|
|
|
|||
|
|
@ -507,7 +507,7 @@ impl Image {
|
|||
if origin.starts_with("file://") {
|
||||
let path_str = origin.trim_start_matches("file://");
|
||||
let path = std::path::PathBuf::from(path_str);
|
||||
let repo = crate::repository::FileBackend::open(&path)?;
|
||||
let mut repo = crate::repository::FileBackend::open(&path)?;
|
||||
repo.fetch_manifest_text(&publisher_name, fmri)?
|
||||
} else {
|
||||
let mut repo = crate::repository::RestBackend::open(origin)?;
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ pub mod fmri;
|
|||
pub mod image;
|
||||
pub mod payload;
|
||||
pub mod publisher;
|
||||
pub mod recv;
|
||||
pub mod repository;
|
||||
pub mod solver;
|
||||
mod test_json_manifest;
|
||||
|
|
|
|||
284
libips/src/recv.rs
Normal file
284
libips/src/recv.rs
Normal file
|
|
@ -0,0 +1,284 @@
|
|||
// This Source Code Form is subject to the terms of
|
||||
// the Mozilla Public License, v. 2.0. If a copy of the
|
||||
// MPL was not distributed with this file, You can
|
||||
// obtain one at https://mozilla.org/MPL/2.0/.
|
||||
|
||||
use crate::repository::{ReadableRepository, FileBackend, RepositoryError, Result, WritableRepository, ProgressReporter, ProgressInfo, NoopProgressReporter};
|
||||
use crate::fmri::Fmri;
|
||||
use crate::actions::Manifest;
|
||||
use std::collections::HashSet;
|
||||
use tempfile::tempdir;
|
||||
use tracing::{info, debug};
|
||||
|
||||
/// PackageReceiver handles downloading packages from a source repository
|
||||
/// and storing them in a destination repository.
|
||||
pub struct PackageReceiver<'a, S: ReadableRepository> {
|
||||
source: &'a mut S,
|
||||
dest: FileBackend,
|
||||
progress: Option<&'a dyn ProgressReporter>,
|
||||
}
|
||||
|
||||
impl<'a, S: ReadableRepository> PackageReceiver<'a, S> {
|
||||
/// Create a new PackageReceiver
|
||||
pub fn new(source: &'a mut S, dest: FileBackend) -> Self {
|
||||
Self { source, dest, progress: None }
|
||||
}
|
||||
|
||||
/// Set the progress reporter
|
||||
pub fn with_progress(mut self, progress: &'a dyn ProgressReporter) -> Self {
|
||||
self.progress = Some(progress);
|
||||
self
|
||||
}
|
||||
|
||||
/// Receive packages from the source repository
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `default_publisher` - The default publisher name if not specified in FMRI
|
||||
/// * `fmris` - List of FMRIs to receive
|
||||
/// * `recursive` - Whether to receive dependencies recursively
|
||||
pub fn receive(&mut self, default_publisher: Option<&str>, fmris: &[Fmri], recursive: bool) -> Result<()> {
|
||||
let mut processed = HashSet::new();
|
||||
let mut queue: Vec<Fmri> = fmris.to_vec();
|
||||
let mut updated_publishers = HashSet::new();
|
||||
let mut queued: HashSet<Fmri> = fmris.iter().cloned().collect();
|
||||
|
||||
let progress = self.progress.unwrap_or(&NoopProgressReporter);
|
||||
let mut overall_progress = ProgressInfo::new("Receiving packages");
|
||||
progress.start(&overall_progress);
|
||||
|
||||
let mut total_packages = queue.len() as u64;
|
||||
let mut packages_done = 0u64;
|
||||
|
||||
while let Some(fmri) = queue.pop() {
|
||||
// If the FMRI doesn't have a version, we need to find the newest one
|
||||
let fmris_to_fetch = if fmri.version.is_none() {
|
||||
let publisher = fmri.publisher.as_deref().or(default_publisher).ok_or_else(|| {
|
||||
RepositoryError::Other(format!("No publisher specified for package {}", fmri.name))
|
||||
})?;
|
||||
|
||||
overall_progress = overall_progress.with_context(format!("Looking up newest version for {}", fmri.name));
|
||||
progress.update(&overall_progress);
|
||||
|
||||
debug!("No version specified for {}, looking up newest", fmri.name);
|
||||
let pkgs = self.source.list_packages(Some(publisher), Some(&fmri.name))?;
|
||||
|
||||
// Group by package name to find the newest version for each
|
||||
let mut by_name: std::collections::HashMap<String, Vec<crate::repository::PackageInfo>> = std::collections::HashMap::new();
|
||||
for pi in pkgs {
|
||||
by_name.entry(pi.fmri.name.clone()).or_default().push(pi);
|
||||
}
|
||||
|
||||
let mut results = Vec::new();
|
||||
for (name, versions) in by_name {
|
||||
let newest = versions.into_iter().max_by(|a, b| {
|
||||
a.fmri.to_string().cmp(&b.fmri.to_string())
|
||||
});
|
||||
if let Some(pi) = newest {
|
||||
results.push(pi.fmri);
|
||||
} else {
|
||||
info!("Package {} not found in source for publisher {}", name, publisher);
|
||||
}
|
||||
}
|
||||
|
||||
if results.is_empty() {
|
||||
info!("Package {} not found in source for publisher {}", fmri.name, publisher);
|
||||
continue;
|
||||
}
|
||||
// Update total_packages: remove the wildcard FMRI we just popped, and add actual results
|
||||
total_packages = total_packages.saturating_sub(1) + results.len() as u64;
|
||||
results
|
||||
} else {
|
||||
vec![fmri]
|
||||
};
|
||||
|
||||
for fmri_to_fetch in fmris_to_fetch {
|
||||
let publisher_name = fmri_to_fetch.publisher.as_deref().or(default_publisher).ok_or_else(|| {
|
||||
RepositoryError::Other(format!("No publisher specified for package {}", fmri_to_fetch.name))
|
||||
})?.to_string();
|
||||
|
||||
if !processed.insert(fmri_to_fetch.clone()) {
|
||||
// If we already processed it (possibly as a dependency), don't count it again
|
||||
// and decrement total if we just added it from wildcard expansion
|
||||
continue;
|
||||
}
|
||||
|
||||
packages_done += 1;
|
||||
overall_progress = overall_progress
|
||||
.with_total(total_packages)
|
||||
.with_current(packages_done)
|
||||
.with_context(format!("Receiving {}", fmri_to_fetch));
|
||||
progress.update(&overall_progress);
|
||||
|
||||
info!("Receiving package {} from publisher {}", fmri_to_fetch, publisher_name);
|
||||
let manifest = self.receive_one(&publisher_name, &fmri_to_fetch)?;
|
||||
updated_publishers.insert(publisher_name.clone());
|
||||
|
||||
if recursive {
|
||||
for dep in manifest.dependencies {
|
||||
if let Some(mut dep_fmri) = dep.fmri {
|
||||
// Ensure it has the publisher if not specified
|
||||
if dep_fmri.publisher.is_none() {
|
||||
dep_fmri.publisher = Some(publisher_name.clone());
|
||||
}
|
||||
|
||||
if !processed.contains(&dep_fmri) && queued.insert(dep_fmri.clone()) {
|
||||
total_packages += 1;
|
||||
queue.push(dep_fmri);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for pub_name in updated_publishers {
|
||||
info!("Rebuilding metadata for publisher {}", pub_name);
|
||||
overall_progress = overall_progress.with_context(format!("Rebuilding metadata for {}", pub_name));
|
||||
progress.update(&overall_progress);
|
||||
self.dest.rebuild(Some(&pub_name), false, false)?;
|
||||
}
|
||||
|
||||
progress.finish(&overall_progress);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Receive a single package
|
||||
fn receive_one(&mut self, publisher: &str, fmri: &Fmri) -> Result<Manifest> {
|
||||
let progress = self.progress.unwrap_or(&NoopProgressReporter);
|
||||
|
||||
let manifest_text = self.source.fetch_manifest_text(publisher, fmri)?;
|
||||
let manifest = Manifest::parse_string(manifest_text.clone()).map_err(RepositoryError::from)?;
|
||||
|
||||
// Ensure publisher exists in destination
|
||||
let dest_info = self.dest.get_info()?;
|
||||
if !dest_info.publishers.iter().any(|p| p.name == publisher) {
|
||||
info!("Adding publisher {} to destination repository", publisher);
|
||||
self.dest.add_publisher(publisher)?;
|
||||
}
|
||||
|
||||
let mut txn = self.dest.begin_transaction()?;
|
||||
txn.set_publisher(publisher);
|
||||
txn.set_legacy_manifest(manifest_text);
|
||||
|
||||
let temp_dir = tempdir().map_err(RepositoryError::IoError)?;
|
||||
|
||||
let payload_files: Vec<_> = manifest.files.iter().filter(|f| f.payload.is_some()).collect();
|
||||
let total_files = payload_files.len() as u64;
|
||||
|
||||
for (i, file) in payload_files.into_iter().enumerate() {
|
||||
if let Some(payload) = &file.payload {
|
||||
let files_done = (i + 1) as u64;
|
||||
let digest = &payload.primary_identifier.hash;
|
||||
|
||||
progress.update(&ProgressInfo::new(format!("Receiving payloads for {}", fmri.name))
|
||||
.with_total(total_files)
|
||||
.with_current(files_done)
|
||||
.with_context(format!("Payload: {}", digest)));
|
||||
|
||||
let temp_file_path = temp_dir.path().join(digest);
|
||||
debug!("Fetching payload {} to {}", digest, temp_file_path.display());
|
||||
self.source.fetch_payload(publisher, digest, &temp_file_path)?;
|
||||
txn.add_file(file.clone(), &temp_file_path)?;
|
||||
}
|
||||
}
|
||||
|
||||
txn.update_manifest(manifest.clone());
|
||||
txn.commit()?;
|
||||
|
||||
Ok(manifest)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::repository::{FileBackend, RepositoryVersion};
|
||||
use crate::actions::Attr;
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[test]
|
||||
fn test_receive_basic() -> Result<()> {
|
||||
let source_dir = tempdir().map_err(RepositoryError::IoError)?;
|
||||
let dest_dir = tempdir().map_err(RepositoryError::IoError)?;
|
||||
|
||||
// Create source repo with one package
|
||||
let mut source_repo = FileBackend::create(source_dir.path(), RepositoryVersion::V4)?;
|
||||
source_repo.add_publisher("test")?;
|
||||
|
||||
let fmri = Fmri::parse("pkg://test/pkgA@1.0").unwrap();
|
||||
let mut manifest = Manifest::new();
|
||||
manifest.attributes.push(Attr {
|
||||
key: "pkg.fmri".to_string(),
|
||||
values: vec![fmri.to_string()],
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
let mut txn = source_repo.begin_transaction()?;
|
||||
txn.set_publisher("test");
|
||||
txn.update_manifest(manifest);
|
||||
txn.commit()?;
|
||||
source_repo.rebuild(Some("test"), false, false)?;
|
||||
|
||||
// Create dest repo
|
||||
let dest_repo = FileBackend::create(dest_dir.path(), RepositoryVersion::V4)?;
|
||||
|
||||
let mut receiver = PackageReceiver::new(&mut source_repo, dest_repo);
|
||||
receiver.receive(Some("test"), &[Fmri::new("pkgA")], false)?;
|
||||
|
||||
// Verify dest repo has the package
|
||||
let dest_repo_check = FileBackend::open(dest_dir.path())?;
|
||||
let pkgs = dest_repo_check.list_packages(Some("test"), Some("pkgA"))?;
|
||||
assert_eq!(pkgs.len(), 1);
|
||||
assert_eq!(pkgs[0].fmri.name, "pkgA");
|
||||
assert_eq!(pkgs[0].fmri.version.as_ref().unwrap().release, "1.0");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_receive_preserves_manifest_format() -> Result<()> {
|
||||
let source_dir = tempdir().map_err(RepositoryError::IoError)?;
|
||||
let dest_dir = tempdir().map_err(RepositoryError::IoError)?;
|
||||
|
||||
// Create source repo
|
||||
let mut source_repo = FileBackend::create(source_dir.path(), RepositoryVersion::V4)?;
|
||||
source_repo.add_publisher("test")?;
|
||||
|
||||
let fmri = Fmri::parse("pkg://test/pkgA@1.0").unwrap();
|
||||
let manifest_content = "set name=pkg.fmri value=pkg://test/pkgA@1.0\nset name=pkg.summary value=test\n";
|
||||
|
||||
// Manually write the manifest in IPS format to the source repo
|
||||
let manifest_path = FileBackend::construct_manifest_path(source_dir.path(), "test", "pkgA", "1.0");
|
||||
std::fs::create_dir_all(manifest_path.parent().unwrap()).map_err(RepositoryError::IoError)?;
|
||||
std::fs::write(&manifest_path, manifest_content).map_err(RepositoryError::IoError)?;
|
||||
|
||||
// Rebuild source repo to recognize the package
|
||||
source_repo.rebuild(Some("test"), false, false)?;
|
||||
|
||||
// Create dest repo
|
||||
let dest_repo = FileBackend::create(dest_dir.path(), RepositoryVersion::V4)?;
|
||||
|
||||
let mut receiver = PackageReceiver::new(&mut source_repo, dest_repo);
|
||||
receiver.receive(Some("test"), &[Fmri::new("pkgA")], false)?;
|
||||
|
||||
// Verify dest repo has the package and the manifest is in IPS format
|
||||
let dest_manifest_path = FileBackend::construct_manifest_path(dest_dir.path(), "test", "pkgA", "1.0");
|
||||
let content = std::fs::read_to_string(&dest_manifest_path).map_err(RepositoryError::IoError)?;
|
||||
|
||||
assert_eq!(content, manifest_content);
|
||||
assert!(!content.starts_with('{'), "Manifest should not be JSON");
|
||||
|
||||
// Also verify the .json version exists and IS JSON
|
||||
let mut json_path = dest_manifest_path.clone();
|
||||
let mut filename = json_path.file_name().unwrap().to_os_string();
|
||||
filename.push(".json");
|
||||
json_path.set_file_name(filename);
|
||||
assert!(json_path.exists(), "JSON manifest should exist at {}", json_path.display());
|
||||
let json_content = std::fs::read_to_string(&json_path).map_err(RepositoryError::IoError)?;
|
||||
assert!(json_content.starts_with('{'), "JSON manifest should be JSON");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
@ -1674,6 +1674,60 @@ impl ReadableRepository for FileBackend {
|
|||
)))
|
||||
}
|
||||
|
||||
fn fetch_manifest_text(
|
||||
&mut self,
|
||||
publisher: &str,
|
||||
fmri: &Fmri,
|
||||
) -> Result<String> {
|
||||
// Require a concrete version
|
||||
let version = fmri.version();
|
||||
if version.is_empty() {
|
||||
return Err(RepositoryError::Other(
|
||||
"FMRI must include a version to fetch manifest".into(),
|
||||
));
|
||||
}
|
||||
// Preferred path: publisher-scoped manifest path
|
||||
let path = Self::construct_manifest_path(&self.path, publisher, fmri.stem(), &version);
|
||||
if path.exists() {
|
||||
return std::fs::read_to_string(&path)
|
||||
.map_err(|e| RepositoryError::FileReadError {
|
||||
path,
|
||||
source: e,
|
||||
});
|
||||
}
|
||||
// Fallbacks: global pkg layout without publisher
|
||||
let encoded_stem = Self::url_encode(fmri.stem());
|
||||
let encoded_version = Self::url_encode(&version);
|
||||
let alt1 = self
|
||||
.path
|
||||
.join("pkg")
|
||||
.join(&encoded_stem)
|
||||
.join(&encoded_version);
|
||||
if alt1.exists() {
|
||||
return std::fs::read_to_string(&alt1).map_err(|e| RepositoryError::FileReadError {
|
||||
path: alt1,
|
||||
source: e,
|
||||
});
|
||||
}
|
||||
let alt2 = self
|
||||
.path
|
||||
.join("publisher")
|
||||
.join(publisher)
|
||||
.join("pkg")
|
||||
.join(&encoded_stem)
|
||||
.join(&encoded_version);
|
||||
if alt2.exists() {
|
||||
return std::fs::read_to_string(&alt2).map_err(|e| RepositoryError::FileReadError {
|
||||
path: alt2,
|
||||
source: e,
|
||||
});
|
||||
}
|
||||
Err(RepositoryError::NotFound(format!(
|
||||
"manifest for {} not found",
|
||||
fmri
|
||||
)))
|
||||
}
|
||||
|
||||
/// Search for packages in the repository
|
||||
fn search(
|
||||
&self,
|
||||
|
|
@ -2099,52 +2153,7 @@ impl FileBackend {
|
|||
let _ = super::catalog_writer::write_update_log(&path, &mut log)?;
|
||||
Ok(path)
|
||||
}
|
||||
pub fn fetch_manifest_text(&self, publisher: &str, fmri: &Fmri) -> Result<String> {
|
||||
// Require a concrete version
|
||||
let version = fmri.version();
|
||||
if version.is_empty() {
|
||||
return Err(RepositoryError::Other(
|
||||
"FMRI must include a version to fetch manifest".into(),
|
||||
));
|
||||
}
|
||||
// Preferred path: publisher-scoped manifest path
|
||||
let path = Self::construct_manifest_path(&self.path, publisher, fmri.stem(), &version);
|
||||
if path.exists() {
|
||||
return std::fs::read_to_string(&path)
|
||||
.map_err(|e| RepositoryError::FileReadError { path, source: e });
|
||||
}
|
||||
// Fallbacks: global pkg layout without publisher
|
||||
let encoded_stem = Self::url_encode(fmri.stem());
|
||||
let encoded_version = Self::url_encode(&version);
|
||||
let alt1 = self
|
||||
.path
|
||||
.join("pkg")
|
||||
.join(&encoded_stem)
|
||||
.join(&encoded_version);
|
||||
if alt1.exists() {
|
||||
return std::fs::read_to_string(&alt1).map_err(|e| RepositoryError::FileReadError {
|
||||
path: alt1,
|
||||
source: e,
|
||||
});
|
||||
}
|
||||
let alt2 = self
|
||||
.path
|
||||
.join("publisher")
|
||||
.join(publisher)
|
||||
.join("pkg")
|
||||
.join(&encoded_stem)
|
||||
.join(&encoded_version);
|
||||
if alt2.exists() {
|
||||
return std::fs::read_to_string(&alt2).map_err(|e| RepositoryError::FileReadError {
|
||||
path: alt2,
|
||||
source: e,
|
||||
});
|
||||
}
|
||||
Err(RepositoryError::NotFound(format!(
|
||||
"manifest for {} not found",
|
||||
fmri
|
||||
)))
|
||||
}
|
||||
|
||||
/// Fetch catalog file path
|
||||
pub fn get_catalog_file_path(&self, publisher: &str, filename: &str) -> Result<PathBuf> {
|
||||
if filename.contains('/') || filename.contains('\\') {
|
||||
|
|
|
|||
|
|
@ -384,6 +384,13 @@ pub trait ReadableRepository {
|
|||
fmri: &crate::fmri::Fmri,
|
||||
) -> Result<crate::actions::Manifest>;
|
||||
|
||||
/// Fetch a package manifest as raw text by FMRI from the repository.
|
||||
fn fetch_manifest_text(
|
||||
&mut self,
|
||||
publisher: &str,
|
||||
fmri: &crate::fmri::Fmri,
|
||||
) -> Result<String>;
|
||||
|
||||
/// Search for packages in the repository
|
||||
///
|
||||
/// This method searches for packages in the repository using the search index.
|
||||
|
|
|
|||
|
|
@ -3,9 +3,9 @@
|
|||
// MPL was not distributed with this file, You can
|
||||
// obtain one at https://mozilla.org/MPL/2.0/.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::fs::{self, File};
|
||||
use std::io::Write;
|
||||
use std::io::{BufRead, BufReader, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::str::FromStr;
|
||||
use tracing::{debug, info, warn};
|
||||
|
|
@ -64,7 +64,7 @@ impl WritableRepository for RestBackend {
|
|||
// This is a stub implementation
|
||||
// In a real implementation, we would make a REST API call to create the repository
|
||||
|
||||
let uri_str = uri.as_ref().to_string_lossy().to_string();
|
||||
let uri_str = uri.as_ref().to_string_lossy().trim_end_matches('/').to_string();
|
||||
|
||||
// Create the repository configuration
|
||||
let config = RepositoryConfig {
|
||||
|
|
@ -323,7 +323,7 @@ impl WritableRepository for RestBackend {
|
|||
impl ReadableRepository for RestBackend {
|
||||
/// Open an existing repository
|
||||
fn open<P: AsRef<Path>>(uri: P) -> Result<Self> {
|
||||
let uri_str = uri.as_ref().to_string_lossy().to_string();
|
||||
let uri_str = uri.as_ref().to_string_lossy().trim_end_matches('/').to_string();
|
||||
|
||||
// Create an HTTP client
|
||||
let client = Client::new();
|
||||
|
|
@ -344,12 +344,19 @@ impl ReadableRepository for RestBackend {
|
|||
match response.json::<Value>() {
|
||||
Ok(json) => {
|
||||
// Extract publisher information
|
||||
if let Some(publishers) =
|
||||
json.get("publishers").and_then(|p| p.as_object())
|
||||
{
|
||||
for (name, _) in publishers {
|
||||
debug!("Found publisher: {}", name);
|
||||
config.publishers.push(name.clone());
|
||||
if let Some(publishers) = json.get("publishers") {
|
||||
if let Some(publishers_obj) = publishers.as_object() {
|
||||
for (name, _) in publishers_obj {
|
||||
debug!("Found publisher: {}", name);
|
||||
config.publishers.push(name.clone());
|
||||
}
|
||||
} else if let Some(publishers_arr) = publishers.as_array() {
|
||||
for p in publishers_arr {
|
||||
if let Some(name) = p.get("name").and_then(|n| n.as_str()) {
|
||||
debug!("Found publisher: {}", name);
|
||||
config.publishers.push(name.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -369,9 +376,9 @@ impl ReadableRepository for RestBackend {
|
|||
}
|
||||
}
|
||||
|
||||
// If we couldn't get any publishers, add a default one
|
||||
// If we couldn't get any publishers, warn the user
|
||||
if config.publishers.is_empty() {
|
||||
config.publishers.push("openindiana.org".to_string());
|
||||
warn!("No publishers discovered for repository: {}", uri_str);
|
||||
}
|
||||
|
||||
// Create the repository instance
|
||||
|
|
@ -417,35 +424,64 @@ impl ReadableRepository for RestBackend {
|
|||
fn list_packages(
|
||||
&self,
|
||||
publisher: Option<&str>,
|
||||
_pattern: Option<&str>,
|
||||
pattern: Option<&str>,
|
||||
) -> Result<Vec<PackageInfo>> {
|
||||
// This is a stub implementation
|
||||
// In a real implementation, we would make a REST API call to list packages
|
||||
let pattern = pattern.unwrap_or("*");
|
||||
|
||||
let packages = Vec::new();
|
||||
// Use search API to find packages
|
||||
// URL: /search/0/<pattern>
|
||||
let url = format!("{}/search/0/{}", self.uri, pattern);
|
||||
debug!("Listing packages via search: {}", url);
|
||||
|
||||
// Filter publishers if specified
|
||||
let publishers = if let Some(pub_name) = publisher {
|
||||
if !self.config.publishers.contains(&pub_name.to_string()) {
|
||||
return Err(RepositoryError::PublisherNotFound(pub_name.to_string()));
|
||||
let mut packages = Vec::new();
|
||||
let mut seen_fmris = HashSet::new();
|
||||
|
||||
match self.client.get(&url).send() {
|
||||
Ok(resp) => {
|
||||
let resp = match resp.error_for_status() {
|
||||
Ok(r) => r,
|
||||
Err(e) if e.status() == Some(reqwest::StatusCode::NOT_FOUND) => {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(RepositoryError::Other(format!("Search API error: {} for {}", e, url)));
|
||||
}
|
||||
};
|
||||
|
||||
let reader = BufReader::new(resp);
|
||||
for line in reader.lines() {
|
||||
let line = line.map_err(|e| {
|
||||
RepositoryError::Other(format!("Failed to read search response line: {}", e))
|
||||
})?;
|
||||
// Line format: <attr> <fmri> <value_type> <value>
|
||||
// Example: pkg.fmri pkg:/system/rsyslog@8.2508.0,5.11-151056.0:20251023T180542Z set omnios/system/rsyslog
|
||||
let parts: Vec<&str> = line.split_whitespace().collect();
|
||||
if parts.len() >= 2 && parts[0] == "pkg.fmri" {
|
||||
if let Ok(fmri) = crate::fmri::Fmri::parse(parts[1]) {
|
||||
// Filter by publisher if requested
|
||||
if let Some(pub_name) = publisher {
|
||||
if let Some(fmri_pub) = fmri.publisher.as_deref() {
|
||||
if fmri_pub != pub_name {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// If FMRI has no publisher, we assume it matches the requested publisher
|
||||
// as it's being served by this repository.
|
||||
}
|
||||
|
||||
if seen_fmris.insert(fmri.to_string()) {
|
||||
packages.push(PackageInfo { fmri });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(RepositoryError::Other(format!(
|
||||
"Failed to connect to search API: {} for {}",
|
||||
e, url
|
||||
)));
|
||||
}
|
||||
vec![pub_name.to_string()]
|
||||
} else {
|
||||
self.config.publishers.clone()
|
||||
};
|
||||
|
||||
// For each publisher, list packages
|
||||
for _pub_name in publishers {
|
||||
// In a real implementation, we would make a REST API call to get package information
|
||||
// The API call would return a list of packages with their names, versions, and other metadata
|
||||
// We would then parse this information and create PackageInfo structs
|
||||
|
||||
// For now, we return an empty list since we don't want to return placeholder data
|
||||
// and we don't have a real API to call
|
||||
|
||||
// If pattern filtering is needed, it would be applied here to the results from the API
|
||||
// When implementing, use the regex crate to handle user-provided regexp patterns properly,
|
||||
// similar to the implementation in file_backend.rs
|
||||
}
|
||||
|
||||
Ok(packages)
|
||||
|
|
@ -567,17 +603,9 @@ impl ReadableRepository for RestBackend {
|
|||
return Err(RepositoryError::Other("Empty digest provided".to_string()));
|
||||
}
|
||||
|
||||
let shard = if hash.len() >= 2 {
|
||||
&hash[0..2]
|
||||
} else {
|
||||
&hash[..]
|
||||
};
|
||||
let candidates = vec![
|
||||
format!("{}/file/{}/{}", self.uri, shard, hash),
|
||||
format!(
|
||||
"{}/publisher/{}/file/{}/{}",
|
||||
self.uri, publisher, shard, hash
|
||||
),
|
||||
format!("{}/file/0/{}", self.uri, hash),
|
||||
format!("{}/publisher/{}/file/0/{}", self.uri, publisher, hash),
|
||||
];
|
||||
|
||||
// Ensure destination directory exists
|
||||
|
|
@ -589,35 +617,36 @@ impl ReadableRepository for RestBackend {
|
|||
for url in candidates {
|
||||
match self.client.get(&url).send() {
|
||||
Ok(resp) if resp.status().is_success() => {
|
||||
let body = resp.bytes().map_err(|e| {
|
||||
RepositoryError::Other(format!("Failed to read payload body: {}", e))
|
||||
let mut resp = resp;
|
||||
// Write atomically
|
||||
let tmp_path = dest.with_extension("tmp");
|
||||
let mut tmp_file = File::create(&tmp_path)?;
|
||||
|
||||
std::io::copy(&mut resp, &mut tmp_file).map_err(|e| {
|
||||
RepositoryError::Other(format!("Failed to download payload: {}", e))
|
||||
})?;
|
||||
drop(tmp_file);
|
||||
|
||||
// Verify digest if algorithm is known
|
||||
if let Some(alg) = algo.clone() {
|
||||
match crate::digest::Digest::from_bytes(
|
||||
&body,
|
||||
let f = File::open(&tmp_path)?;
|
||||
let comp = crate::digest::Digest::from_reader(
|
||||
f,
|
||||
alg,
|
||||
crate::digest::DigestSource::PrimaryPayloadHash,
|
||||
) {
|
||||
Ok(comp) => {
|
||||
if comp.hash != hash {
|
||||
return Err(RepositoryError::DigestError(format!(
|
||||
"Digest mismatch: expected {}, got {}",
|
||||
hash, comp.hash
|
||||
)));
|
||||
}
|
||||
}
|
||||
Err(e) => return Err(RepositoryError::DigestError(format!("{}", e))),
|
||||
)
|
||||
.map_err(|e| RepositoryError::DigestError(format!("{}", e)))?;
|
||||
|
||||
if comp.hash != hash {
|
||||
let _ = fs::remove_file(&tmp_path);
|
||||
return Err(RepositoryError::DigestError(format!(
|
||||
"Digest mismatch for {}: expected {}, got {}",
|
||||
url, hash, comp.hash
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
// Write atomically
|
||||
let tmp = dest.with_extension("tmp");
|
||||
let mut f = File::create(&tmp)?;
|
||||
f.write_all(&body)?;
|
||||
drop(f);
|
||||
fs::rename(&tmp, dest)?;
|
||||
fs::rename(&tmp_path, dest)?;
|
||||
return Ok(());
|
||||
}
|
||||
Ok(resp) => {
|
||||
|
|
@ -651,10 +680,8 @@ impl ReadableRepository for RestBackend {
|
|||
) -> Result<Vec<PackageInfo>> {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
impl RestBackend {
|
||||
pub fn fetch_manifest_text(
|
||||
fn fetch_manifest_text(
|
||||
&mut self,
|
||||
publisher: &str,
|
||||
fmri: &crate::fmri::Fmri,
|
||||
|
|
@ -720,6 +747,9 @@ impl RestBackend {
|
|||
last_err.unwrap_or_else(|| "manifest not found".to_string()),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl RestBackend {
|
||||
/// Sets the local path where catalog files will be cached.
|
||||
///
|
||||
/// This method creates the directory if it doesn't exist. The local cache path
|
||||
|
|
|
|||
21
pkg6recv/Cargo.toml
Normal file
21
pkg6recv/Cargo.toml
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
[package]
|
||||
name = "pkg6recv"
|
||||
description.workspace = true
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license-file.workspace = true
|
||||
repository.workspace = true
|
||||
readme.workspace = true
|
||||
keywords.workspace = true
|
||||
|
||||
[dependencies]
|
||||
clap = { version = "4", features = ["derive"] }
|
||||
miette = { version = "7", features = ["fancy"] }
|
||||
thiserror = "2"
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||
libips = { path = "../libips", version = "*"}
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
tempfile = "3.8"
|
||||
84
pkg6recv/src/main.rs
Normal file
84
pkg6recv/src/main.rs
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
use clap::Parser;
|
||||
use miette::{IntoDiagnostic, Result};
|
||||
use libips::repository::{FileBackend, RestBackend, ReadableRepository, ProgressReporter, ProgressInfo};
|
||||
use libips::recv::PackageReceiver;
|
||||
use libips::fmri::Fmri;
|
||||
use std::path::PathBuf;
|
||||
use tracing::info;
|
||||
use tracing_subscriber::{EnvFilter, fmt};
|
||||
|
||||
struct ConsoleProgressReporter;
|
||||
|
||||
impl ProgressReporter for ConsoleProgressReporter {
|
||||
fn start(&self, info: &ProgressInfo) {
|
||||
info!("{}", info);
|
||||
}
|
||||
fn update(&self, info: &ProgressInfo) {
|
||||
info!("{}", info);
|
||||
}
|
||||
fn finish(&self, info: &ProgressInfo) {
|
||||
info!("DONE: {}", info.operation);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(name = "pkg6recv")]
|
||||
#[command(about = "Receive packages from a repository", long_about = None)]
|
||||
struct Cli {
|
||||
/// Source repository URI or path
|
||||
#[arg(short = 's', long)]
|
||||
source: String,
|
||||
|
||||
/// Destination repository path
|
||||
#[arg(short = 'd', long)]
|
||||
dest: PathBuf,
|
||||
|
||||
/// Packages to receive (FMRIs)
|
||||
packages: Vec<String>,
|
||||
|
||||
/// Receive dependencies recursively
|
||||
#[arg(short = 'r', long)]
|
||||
recursive: bool,
|
||||
|
||||
/// Default publisher name if not specified in FMRI
|
||||
#[arg(short = 'p', long)]
|
||||
publisher: Option<String>,
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
// Initialize tracing
|
||||
fmt()
|
||||
.with_env_filter(EnvFilter::from_default_env().add_directive(tracing::Level::INFO.into()))
|
||||
.init();
|
||||
|
||||
let cli = Cli::parse();
|
||||
|
||||
// Open destination repository
|
||||
// We'll open it inside each branch to avoid borrow checker issues with moves
|
||||
|
||||
let fmris: Vec<Fmri> = cli.packages.iter()
|
||||
.map(|s| Fmri::parse(s))
|
||||
.collect::<std::result::Result<Vec<_>, _>>()
|
||||
.into_diagnostic()?;
|
||||
|
||||
let progress = ConsoleProgressReporter;
|
||||
|
||||
// Determine if source is a URL or a path and receive packages
|
||||
if cli.source.starts_with("http://") || cli.source.starts_with("https://") {
|
||||
let mut source_repo = RestBackend::open(&cli.source).into_diagnostic()?;
|
||||
let dest_repo = FileBackend::open(&cli.dest).into_diagnostic()?;
|
||||
let mut receiver = PackageReceiver::new(&mut source_repo, dest_repo);
|
||||
receiver = receiver.with_progress(&progress);
|
||||
receiver.receive(cli.publisher.as_deref(), &fmris, cli.recursive).into_diagnostic()?;
|
||||
} else {
|
||||
let mut source_repo = FileBackend::open(&cli.source).into_diagnostic()?;
|
||||
let dest_repo = FileBackend::open(&cli.dest).into_diagnostic()?;
|
||||
let mut receiver = PackageReceiver::new(&mut source_repo, dest_repo);
|
||||
receiver = receiver.with_progress(&progress);
|
||||
receiver.receive(cli.publisher.as_deref(), &fmris, cli.recursive).into_diagnostic()?;
|
||||
}
|
||||
|
||||
info!("Package receive complete.");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Loading…
Add table
Reference in a new issue