mirror of
https://codeberg.org/Toasterson/ips.git
synced 2026-04-10 13:20:42 +00:00
feat: Add .p6p archive format support for portable package distribution
Implement ArchiveBackend (read) and ArchiveWriter (create) using zip with zstd compression. Adds archive/import-archive subcommands to pkg6repo and .p6p source support to pkg6recv.
This commit is contained in:
parent
9814635a32
commit
4646d2a9c4
9 changed files with 1481 additions and 8 deletions
249
Cargo.lock
generated
249
Cargo.lock
generated
|
|
@ -17,6 +17,17 @@ version = "2.0.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa"
|
||||
|
||||
[[package]]
|
||||
name = "aes"
|
||||
version = "0.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"cipher",
|
||||
"cpufeatures",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ahash"
|
||||
version = "0.8.12"
|
||||
|
|
@ -125,6 +136,15 @@ version = "1.0.100"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
|
||||
|
||||
[[package]]
|
||||
name = "arbitrary"
|
||||
version = "1.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c3d036a3c4ab069c7b410a2ce876bd74808d2d0888a82667669f8e783a898bf1"
|
||||
dependencies = [
|
||||
"derive_arbitrary",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "arc-swap"
|
||||
version = "1.7.1"
|
||||
|
|
@ -452,12 +472,37 @@ version = "3.19.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43"
|
||||
|
||||
[[package]]
|
||||
name = "byteorder"
|
||||
version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
|
||||
|
||||
[[package]]
|
||||
name = "bytes"
|
||||
version = "1.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3"
|
||||
|
||||
[[package]]
|
||||
name = "bzip2"
|
||||
version = "0.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "49ecfb22d906f800d4fe833b6282cf4dc1c298f5057ca0b5445e5c209735ca47"
|
||||
dependencies = [
|
||||
"bzip2-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bzip2-sys"
|
||||
version = "0.1.13+1.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "225bff33b2141874fe80d71e07d6eec4f85c5c216453dd96388240f96e1acc14"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"pkg-config",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.2.49"
|
||||
|
|
@ -504,6 +549,16 @@ dependencies = [
|
|||
"hashbrown 0.14.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cipher"
|
||||
version = "0.4.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad"
|
||||
dependencies = [
|
||||
"crypto-common",
|
||||
"inout",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.53"
|
||||
|
|
@ -608,6 +663,12 @@ dependencies = [
|
|||
"tiny-keccak",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "constant_time_eq"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6"
|
||||
|
||||
[[package]]
|
||||
name = "core-foundation"
|
||||
version = "0.9.4"
|
||||
|
|
@ -633,6 +694,21 @@ dependencies = [
|
|||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crc"
|
||||
version = "3.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5eb8a2a1cd12ab0d987a5d5e825195d372001a4094a0376319d5a0ad71c1ba0d"
|
||||
dependencies = [
|
||||
"crc-catalog",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crc-catalog"
|
||||
version = "2.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5"
|
||||
|
||||
[[package]]
|
||||
name = "crc32fast"
|
||||
version = "1.5.0"
|
||||
|
|
@ -683,6 +759,32 @@ dependencies = [
|
|||
"typenum",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deflate64"
|
||||
version = "0.1.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "807800ff3288b621186fe0a8f3392c4652068257302709c24efd918c3dffcdc2"
|
||||
|
||||
[[package]]
|
||||
name = "deranged"
|
||||
version = "0.5.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7cd812cc2bc1d69d4764bd80df88b4317eaef9e773c75226407d9bc0876b211c"
|
||||
dependencies = [
|
||||
"powerfmt",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "derive_arbitrary"
|
||||
version = "1.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e567bd82dcff979e4b03460c307b3cdc9e96fde3d73bed1496d2bc75d9dd62a"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.111",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "diff-struct"
|
||||
version = "0.5.3"
|
||||
|
|
@ -719,6 +821,7 @@ checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
|
|||
dependencies = [
|
||||
"block-buffer",
|
||||
"crypto-common",
|
||||
"subtle",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1141,6 +1244,15 @@ version = "0.5.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
|
||||
|
||||
[[package]]
|
||||
name = "hmac"
|
||||
version = "0.12.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e"
|
||||
dependencies = [
|
||||
"digest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "http"
|
||||
version = "1.4.0"
|
||||
|
|
@ -1423,6 +1535,15 @@ dependencies = [
|
|||
"hashbrown 0.16.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "inout"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01"
|
||||
dependencies = [
|
||||
"generic-array",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ipnet"
|
||||
version = "2.11.0"
|
||||
|
|
@ -1565,6 +1686,7 @@ dependencies = [
|
|||
"thiserror 2.0.17",
|
||||
"tracing",
|
||||
"walkdir",
|
||||
"zip",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1640,6 +1762,27 @@ dependencies = [
|
|||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lzma-rs"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "297e814c836ae64db86b36cf2a557ba54368d03f6afcd7d947c266692f71115e"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
"crc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lzma-sys"
|
||||
version = "0.1.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5fda04ab3764e6cde78b9974eec4f779acaba7c4e84b36eca3cf77c581b85d27"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
"pkg-config",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "maplit"
|
||||
version = "1.0.2"
|
||||
|
|
@ -1834,6 +1977,12 @@ dependencies = [
|
|||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-conv"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf97ec579c3c42f953ef76dbf8d55ac91fb219dde70e49aa4a6b7d74e9919050"
|
||||
|
||||
[[package]]
|
||||
name = "num-integer"
|
||||
version = "0.1.46"
|
||||
|
|
@ -2068,6 +2217,16 @@ dependencies = [
|
|||
"windows-link",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pbkdf2"
|
||||
version = "0.12.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2"
|
||||
dependencies = [
|
||||
"digest",
|
||||
"hmac",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "percent-encoding"
|
||||
version = "2.3.2"
|
||||
|
|
@ -2300,6 +2459,12 @@ dependencies = [
|
|||
"zerovec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "powerfmt"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391"
|
||||
|
||||
[[package]]
|
||||
name = "ppv-lite86"
|
||||
version = "0.2.21"
|
||||
|
|
@ -3219,6 +3384,25 @@ dependencies = [
|
|||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "time"
|
||||
version = "0.3.47"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "743bd48c283afc0388f9b8827b976905fb217ad9e647fae3a379a9283c4def2c"
|
||||
dependencies = [
|
||||
"deranged",
|
||||
"num-conv",
|
||||
"powerfmt",
|
||||
"serde_core",
|
||||
"time-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "time-core"
|
||||
version = "0.1.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7694e1cfe791f8d31026952abf09c69ca6f6fa4e1a1229e18988f06a04a12dca"
|
||||
|
||||
[[package]]
|
||||
name = "tiny-keccak"
|
||||
version = "2.0.2"
|
||||
|
|
@ -4078,6 +4262,15 @@ dependencies = [
|
|||
"clap",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "xz2"
|
||||
version = "0.1.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "388c44dc09d76f1536602ead6d325eb532f5c122f17782bd57fb47baeeb767e2"
|
||||
dependencies = [
|
||||
"lzma-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "yoke"
|
||||
version = "0.8.1"
|
||||
|
|
@ -4147,6 +4340,20 @@ name = "zeroize"
|
|||
version = "1.8.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0"
|
||||
dependencies = [
|
||||
"zeroize_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zeroize_derive"
|
||||
version = "1.4.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "85a5b4158499876c763cb03bc4e49185d3cccbabb15b33c627f7884f43db852e"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.111",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerotrie"
|
||||
|
|
@ -4181,6 +4388,48 @@ dependencies = [
|
|||
"syn 2.0.111",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zip"
|
||||
version = "2.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fabe6324e908f85a1c52063ce7aa26b68dcb7eb6dbc83a2d148403c9bc3eba50"
|
||||
dependencies = [
|
||||
"aes",
|
||||
"arbitrary",
|
||||
"bzip2",
|
||||
"constant_time_eq",
|
||||
"crc32fast",
|
||||
"crossbeam-utils",
|
||||
"deflate64",
|
||||
"displaydoc",
|
||||
"flate2",
|
||||
"getrandom 0.3.4",
|
||||
"hmac",
|
||||
"indexmap",
|
||||
"lzma-rs",
|
||||
"memchr",
|
||||
"pbkdf2",
|
||||
"sha1",
|
||||
"thiserror 2.0.17",
|
||||
"time",
|
||||
"xz2",
|
||||
"zeroize",
|
||||
"zopfli",
|
||||
"zstd",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zopfli"
|
||||
version = "0.8.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f05cd8797d63865425ff89b5c4a48804f35ba0ce8d125800027ad6017d2b5249"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"crc32fast",
|
||||
"log",
|
||||
"simd-adler32",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zstd"
|
||||
version = "0.13.3"
|
||||
|
|
|
|||
|
|
@ -44,6 +44,7 @@ rust-ini = "0.21"
|
|||
reqwest = { version = "0.12", features = ["blocking", "json", "gzip", "deflate"] }
|
||||
resolvo = "0.10"
|
||||
rayon = "1.11"
|
||||
zip = { version = "2", features = ["zstd"] }
|
||||
|
||||
[features]
|
||||
default = ["bundled-sqlite"]
|
||||
|
|
|
|||
368
libips/src/repository/archive_backend.rs
Normal file
368
libips/src/repository/archive_backend.rs
Normal file
|
|
@ -0,0 +1,368 @@
|
|||
// This Source Code Form is subject to the terms of
|
||||
// the Mozilla Public License, v. 2.0. If a copy of the
|
||||
// MPL was not distributed with this file, You can
|
||||
// obtain one at https://mozilla.org/MPL/2.0/.
|
||||
|
||||
use crate::actions::Manifest;
|
||||
use crate::fmri::Fmri;
|
||||
use crate::repository::{
|
||||
PackageContents, PackageInfo, ReadableRepository, RepositoryError, RepositoryInfo, Result,
|
||||
};
|
||||
use std::fs;
|
||||
use std::io::Read;
|
||||
use std::path::Path;
|
||||
use std::sync::Mutex;
|
||||
use tracing::debug;
|
||||
use zip::ZipArchive;
|
||||
|
||||
use super::archive_writer::ArchiveMetadata;
|
||||
use super::file_backend::FileBackend;
|
||||
use super::PublisherInfo;
|
||||
|
||||
/// A read-only repository backed by a `.p6p` ZIP archive.
|
||||
pub struct ArchiveBackend {
|
||||
archive: Mutex<ZipArchive<fs::File>>,
|
||||
metadata: ArchiveMetadata,
|
||||
}
|
||||
|
||||
impl ArchiveBackend {
|
||||
/// Construct the manifest path inside the archive for a given publisher/stem/version.
|
||||
fn manifest_path(publisher: &str, stem: &str, version: &str) -> String {
|
||||
let encoded_stem = FileBackend::url_encode(stem);
|
||||
let encoded_version = FileBackend::url_encode(version);
|
||||
format!(
|
||||
"publisher/{}/pkg/{}/{}",
|
||||
publisher, encoded_stem, encoded_version
|
||||
)
|
||||
}
|
||||
|
||||
/// Construct the file path inside the archive for a given publisher/hash.
|
||||
fn file_path(publisher: &str, hash: &str) -> String {
|
||||
if hash.len() < 2 {
|
||||
format!("publisher/{}/file/{}", publisher, hash)
|
||||
} else {
|
||||
let first_two = &hash[0..2];
|
||||
let next_two = if hash.len() >= 4 {
|
||||
&hash[2..4]
|
||||
} else {
|
||||
""
|
||||
};
|
||||
format!(
|
||||
"publisher/{}/file/{}/{}/{}",
|
||||
publisher, first_two, next_two, hash
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ReadableRepository for ArchiveBackend {
|
||||
fn open<P: AsRef<Path>>(path: P) -> Result<Self> {
|
||||
let file = fs::File::open(path.as_ref()).map_err(|e| RepositoryError::FileOpenError {
|
||||
path: path.as_ref().to_path_buf(),
|
||||
source: e,
|
||||
})?;
|
||||
|
||||
let mut archive =
|
||||
ZipArchive::new(file).map_err(|e| RepositoryError::ArchiveError(e.to_string()))?;
|
||||
|
||||
// Read and parse the metadata index
|
||||
let metadata: ArchiveMetadata = {
|
||||
let mut entry = archive
|
||||
.by_name("pkg6.archive.json")
|
||||
.map_err(|e| RepositoryError::ArchiveError(format!("missing pkg6.archive.json: {}", e)))?;
|
||||
let mut buf = String::new();
|
||||
entry
|
||||
.read_to_string(&mut buf)
|
||||
.map_err(|e| RepositoryError::ArchiveError(format!("failed to read pkg6.archive.json: {}", e)))?;
|
||||
serde_json::from_str(&buf)
|
||||
.map_err(|e| RepositoryError::ArchiveError(format!("invalid pkg6.archive.json: {}", e)))?
|
||||
};
|
||||
|
||||
Ok(ArchiveBackend {
|
||||
archive: Mutex::new(archive),
|
||||
metadata,
|
||||
})
|
||||
}
|
||||
|
||||
fn get_info(&self) -> Result<RepositoryInfo> {
|
||||
let mut publishers = Vec::new();
|
||||
for pub_name in &self.metadata.publishers {
|
||||
let pkg_count = self
|
||||
.metadata
|
||||
.packages
|
||||
.iter()
|
||||
.filter(|p| p.publisher == *pub_name)
|
||||
.count();
|
||||
publishers.push(PublisherInfo {
|
||||
name: pub_name.clone(),
|
||||
package_count: pkg_count,
|
||||
status: "online".to_string(),
|
||||
updated: self.metadata.created.clone(),
|
||||
});
|
||||
}
|
||||
|
||||
Ok(RepositoryInfo {
|
||||
publishers,
|
||||
default_publisher: self.metadata.default_publisher.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
fn list_packages(
|
||||
&self,
|
||||
publisher: Option<&str>,
|
||||
pattern: Option<&str>,
|
||||
) -> Result<Vec<PackageInfo>> {
|
||||
let mut results = Vec::new();
|
||||
|
||||
for entry in &self.metadata.packages {
|
||||
// Filter by publisher
|
||||
if let Some(pub_filter) = publisher {
|
||||
if entry.publisher != pub_filter {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Filter by pattern (simple glob)
|
||||
if let Some(pat) = pattern {
|
||||
if !matches_pattern(&entry.fmri, pat) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let fmri = Fmri::parse(&entry.fmri)
|
||||
.map_err(|e| RepositoryError::Other(format!("invalid FMRI in archive: {}", e)))?;
|
||||
results.push(PackageInfo { fmri });
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
fn show_contents(
|
||||
&self,
|
||||
publisher: Option<&str>,
|
||||
pattern: Option<&str>,
|
||||
action_types: Option<&[String]>,
|
||||
) -> Result<Vec<PackageContents>> {
|
||||
let packages = self.list_packages(publisher, pattern)?;
|
||||
let mut results = Vec::new();
|
||||
|
||||
for pkg_info in &packages {
|
||||
let pub_name = pkg_info
|
||||
.fmri
|
||||
.publisher
|
||||
.as_deref()
|
||||
.or(self.metadata.default_publisher.as_deref())
|
||||
.unwrap_or("");
|
||||
let manifest = self.fetch_manifest(pub_name, &pkg_info.fmri)?;
|
||||
|
||||
let mut files = Vec::new();
|
||||
let mut directories = Vec::new();
|
||||
let mut links = Vec::new();
|
||||
let mut dependencies = Vec::new();
|
||||
let mut licenses = Vec::new();
|
||||
|
||||
let show_all = action_types.is_none();
|
||||
let types: Vec<&str> = action_types
|
||||
.map(|t| t.iter().map(|s| s.as_str()).collect())
|
||||
.unwrap_or_default();
|
||||
|
||||
if show_all || types.contains(&"file") {
|
||||
for f in &manifest.files {
|
||||
files.push(f.path.clone());
|
||||
}
|
||||
}
|
||||
if show_all || types.contains(&"dir") {
|
||||
for d in &manifest.directories {
|
||||
directories.push(d.path.clone());
|
||||
}
|
||||
}
|
||||
if show_all || types.contains(&"link") {
|
||||
for l in &manifest.links {
|
||||
links.push(l.path.clone());
|
||||
}
|
||||
}
|
||||
if show_all || types.contains(&"depend") {
|
||||
for d in &manifest.dependencies {
|
||||
if let Some(ref fmri) = d.fmri {
|
||||
dependencies.push(fmri.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
if show_all || types.contains(&"license") {
|
||||
for l in &manifest.licenses {
|
||||
licenses.push(l.payload.clone());
|
||||
}
|
||||
}
|
||||
|
||||
results.push(PackageContents {
|
||||
package_id: pkg_info.fmri.to_string(),
|
||||
files: if files.is_empty() { None } else { Some(files) },
|
||||
directories: if directories.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(directories)
|
||||
},
|
||||
links: if links.is_empty() { None } else { Some(links) },
|
||||
dependencies: if dependencies.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(dependencies)
|
||||
},
|
||||
licenses: if licenses.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(licenses)
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
fn fetch_payload(&self, publisher: &str, digest: &str, dest: &Path) -> Result<()> {
|
||||
let archive_path = Self::file_path(publisher, digest);
|
||||
debug!("Fetching payload from archive: {}", archive_path);
|
||||
|
||||
let mut archive = self
|
||||
.archive
|
||||
.lock()
|
||||
.map_err(|e| RepositoryError::Other(format!("archive lock poisoned: {}", e)))?;
|
||||
|
||||
let mut entry = archive
|
||||
.by_name(&archive_path)
|
||||
.map_err(|e| RepositoryError::ArchiveError(format!("payload {} not found: {}", digest, e)))?;
|
||||
|
||||
// Write to a temp file then rename for atomicity
|
||||
if let Some(parent) = dest.parent() {
|
||||
fs::create_dir_all(parent).map_err(|e| RepositoryError::DirectoryCreateError {
|
||||
path: parent.to_path_buf(),
|
||||
source: e,
|
||||
})?;
|
||||
}
|
||||
|
||||
let mut out = fs::File::create(dest).map_err(|e| RepositoryError::FileCreateError {
|
||||
path: dest.to_path_buf(),
|
||||
source: e,
|
||||
})?;
|
||||
|
||||
std::io::copy(&mut entry, &mut out)
|
||||
.map_err(|e| RepositoryError::ArchiveError(format!("failed to extract payload: {}", e)))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn fetch_manifest(&self, publisher: &str, fmri: &Fmri) -> Result<Manifest> {
|
||||
let text = self.fetch_manifest_text(publisher, fmri)?;
|
||||
Manifest::parse_string(text).map_err(RepositoryError::from)
|
||||
}
|
||||
|
||||
fn fetch_manifest_text(&self, publisher: &str, fmri: &Fmri) -> Result<String> {
|
||||
let version_str = fmri.version();
|
||||
let archive_path = Self::manifest_path(publisher, &fmri.name, &version_str);
|
||||
debug!("Fetching manifest from archive: {}", archive_path);
|
||||
|
||||
let mut archive = self
|
||||
.archive
|
||||
.lock()
|
||||
.map_err(|e| RepositoryError::Other(format!("archive lock poisoned: {}", e)))?;
|
||||
|
||||
let mut entry = archive
|
||||
.by_name(&archive_path)
|
||||
.map_err(|e| RepositoryError::ArchiveError(format!("manifest not found for {}: {}", fmri, e)))?;
|
||||
|
||||
let mut buf = String::new();
|
||||
entry
|
||||
.read_to_string(&mut buf)
|
||||
.map_err(|e| RepositoryError::ArchiveError(format!("failed to read manifest: {}", e)))?;
|
||||
|
||||
Ok(buf)
|
||||
}
|
||||
|
||||
fn search(
|
||||
&self,
|
||||
query: &str,
|
||||
publisher: Option<&str>,
|
||||
limit: Option<usize>,
|
||||
) -> Result<Vec<PackageInfo>> {
|
||||
// Simple substring search over the package index
|
||||
let query_lower = query.to_lowercase();
|
||||
let mut results = Vec::new();
|
||||
|
||||
for entry in &self.metadata.packages {
|
||||
if let Some(pub_filter) = publisher {
|
||||
if entry.publisher != pub_filter {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let matches = entry.fmri.to_lowercase().contains(&query_lower)
|
||||
|| entry
|
||||
.summary
|
||||
.as_deref()
|
||||
.map(|s| s.to_lowercase().contains(&query_lower))
|
||||
.unwrap_or(false);
|
||||
|
||||
if matches {
|
||||
let fmri = Fmri::parse(&entry.fmri)
|
||||
.map_err(|e| RepositoryError::Other(format!("invalid FMRI: {}", e)))?;
|
||||
results.push(PackageInfo { fmri });
|
||||
|
||||
if let Some(lim) = limit {
|
||||
if results.len() >= lim {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
}
|
||||
|
||||
/// Simple glob-style pattern matching for package names.
|
||||
fn matches_pattern(name: &str, pattern: &str) -> bool {
|
||||
if pattern.contains('*') || pattern.contains('?') {
|
||||
glob_match(name, pattern)
|
||||
} else {
|
||||
name.contains(pattern)
|
||||
}
|
||||
}
|
||||
|
||||
fn glob_match(s: &str, pattern: &str) -> bool {
|
||||
let mut si = s.chars().peekable();
|
||||
let mut pi = pattern.chars().peekable();
|
||||
|
||||
while pi.peek().is_some() {
|
||||
match pi.peek() {
|
||||
Some('*') => {
|
||||
pi.next();
|
||||
if pi.peek().is_none() {
|
||||
return true;
|
||||
}
|
||||
while si.peek().is_some() {
|
||||
if glob_match(&si.clone().collect::<String>(), &pi.clone().collect::<String>())
|
||||
{
|
||||
return true;
|
||||
}
|
||||
si.next();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
Some('?') => {
|
||||
pi.next();
|
||||
if si.next().is_none() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
Some(&pc) => {
|
||||
pi.next();
|
||||
match si.next() {
|
||||
Some(sc) if sc == pc => {}
|
||||
_ => return false,
|
||||
}
|
||||
}
|
||||
None => unreachable!(),
|
||||
}
|
||||
}
|
||||
si.peek().is_none()
|
||||
}
|
||||
333
libips/src/repository/archive_tests.rs
Normal file
333
libips/src/repository/archive_tests.rs
Normal file
|
|
@ -0,0 +1,333 @@
|
|||
// This Source Code Form is subject to the terms of
|
||||
// the Mozilla Public License, v. 2.0. If a copy of the
|
||||
// MPL was not distributed with this file, You can
|
||||
// obtain one at https://mozilla.org/MPL/2.0/.
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::actions::{Attr, File as FileAction, Manifest};
|
||||
use crate::fmri::Fmri;
|
||||
use crate::repository::{
|
||||
ArchiveBackend, ArchiveWriter, FileBackend, ReadableRepository, RepositoryVersion,
|
||||
WritableRepository,
|
||||
};
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[test]
|
||||
fn test_archive_metadata_round_trip() {
|
||||
use crate::repository::archive_writer::{ArchiveMetadata, ArchivePackageEntry};
|
||||
|
||||
let metadata = ArchiveMetadata {
|
||||
version: 1,
|
||||
format: "pkg6p".to_string(),
|
||||
created: "2026-03-17T00:00:00Z".to_string(),
|
||||
publishers: vec!["test".to_string()],
|
||||
default_publisher: Some("test".to_string()),
|
||||
packages: vec![ArchivePackageEntry {
|
||||
fmri: "pkg://test/pkgA@1.0".to_string(),
|
||||
publisher: "test".to_string(),
|
||||
manifest_path: "publisher/test/pkg/pkgA/1.0".to_string(),
|
||||
file_hashes: vec!["abc123".to_string()],
|
||||
obsolete: false,
|
||||
renamed: false,
|
||||
summary: Some("Test package".to_string()),
|
||||
}],
|
||||
total_packages: 1,
|
||||
total_files: 1,
|
||||
};
|
||||
|
||||
let json = serde_json::to_string_pretty(&metadata).unwrap();
|
||||
let parsed: ArchiveMetadata = serde_json::from_str(&json).unwrap();
|
||||
|
||||
assert_eq!(parsed.version, 1);
|
||||
assert_eq!(parsed.format, "pkg6p");
|
||||
assert_eq!(parsed.packages.len(), 1);
|
||||
assert_eq!(parsed.packages[0].fmri, "pkg://test/pkgA@1.0");
|
||||
assert_eq!(parsed.packages[0].summary, Some("Test package".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_archive_create_and_read() {
|
||||
let source_dir = tempdir().unwrap();
|
||||
let archive_dir = tempdir().unwrap();
|
||||
let archive_path = archive_dir.path().join("test.p6p");
|
||||
|
||||
// Create source repo with a package
|
||||
let mut source_repo =
|
||||
FileBackend::create(source_dir.path(), RepositoryVersion::V4).unwrap();
|
||||
source_repo.add_publisher("test").unwrap();
|
||||
|
||||
let fmri = Fmri::parse("pkg://test/pkgA@1.0").unwrap();
|
||||
let mut manifest = Manifest::new();
|
||||
manifest.attributes.push(Attr {
|
||||
key: "pkg.fmri".to_string(),
|
||||
values: vec![fmri.to_string()],
|
||||
..Default::default()
|
||||
});
|
||||
manifest.attributes.push(Attr {
|
||||
key: "pkg.summary".to_string(),
|
||||
values: vec!["Test Package A".to_string()],
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
let mut txn = source_repo.begin_transaction().unwrap();
|
||||
txn.set_publisher("test");
|
||||
txn.update_manifest(manifest);
|
||||
txn.commit().unwrap();
|
||||
source_repo.rebuild(Some("test"), false, false).unwrap();
|
||||
|
||||
// Create archive
|
||||
let mut writer = ArchiveWriter::create(&archive_path).unwrap();
|
||||
writer
|
||||
.add_from_repository(&source_repo, "test", None)
|
||||
.unwrap();
|
||||
writer.finish().unwrap();
|
||||
|
||||
// Verify the archive is a valid ZIP
|
||||
assert!(archive_path.exists());
|
||||
|
||||
// Open archive and verify contents
|
||||
let archive_repo = ArchiveBackend::open(&archive_path).unwrap();
|
||||
|
||||
// Check info
|
||||
let info = archive_repo.get_info().unwrap();
|
||||
assert_eq!(info.publishers.len(), 1);
|
||||
assert_eq!(info.publishers[0].name, "test");
|
||||
|
||||
// List packages
|
||||
let packages = archive_repo.list_packages(Some("test"), None).unwrap();
|
||||
assert_eq!(packages.len(), 1);
|
||||
assert_eq!(packages[0].fmri.name, "pkgA");
|
||||
|
||||
// Fetch manifest
|
||||
let manifest = archive_repo
|
||||
.fetch_manifest("test", &Fmri::parse("pkg://test/pkgA@1.0").unwrap())
|
||||
.unwrap();
|
||||
let summary = manifest
|
||||
.attributes
|
||||
.iter()
|
||||
.find(|a| a.key == "pkg.summary")
|
||||
.and_then(|a| a.values.first().cloned());
|
||||
assert_eq!(summary, Some("Test Package A".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_archive_with_payloads() {
|
||||
let source_dir = tempdir().unwrap();
|
||||
let archive_dir = tempdir().unwrap();
|
||||
let dest_dir = tempdir().unwrap();
|
||||
let archive_path = archive_dir.path().join("payload_test.p6p");
|
||||
|
||||
// Create source repo with a package that has files
|
||||
let mut source_repo =
|
||||
FileBackend::create(source_dir.path(), RepositoryVersion::V4).unwrap();
|
||||
source_repo.add_publisher("test").unwrap();
|
||||
|
||||
// Create a prototype file
|
||||
let proto_dir = source_dir.path().join("proto");
|
||||
std::fs::create_dir_all(&proto_dir).unwrap();
|
||||
let file_path = proto_dir.join("etc/config.txt");
|
||||
std::fs::create_dir_all(file_path.parent().unwrap()).unwrap();
|
||||
std::fs::write(&file_path, b"config-content").unwrap();
|
||||
|
||||
let mut fa = FileAction::default();
|
||||
fa.path = "etc/config.txt".to_string();
|
||||
fa.payload = Some(crate::payload::Payload::compute_payload(&file_path).unwrap());
|
||||
|
||||
let fmri = Fmri::parse("pkg://test/withfiles@1.0").unwrap();
|
||||
let mut manifest = Manifest::new();
|
||||
manifest.attributes.push(Attr {
|
||||
key: "pkg.fmri".to_string(),
|
||||
values: vec![fmri.to_string()],
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
let mut txn = source_repo.begin_transaction().unwrap();
|
||||
txn.set_publisher("test");
|
||||
txn.add_file(fa, &file_path).unwrap();
|
||||
txn.update_manifest(manifest);
|
||||
txn.commit().unwrap();
|
||||
source_repo.rebuild(Some("test"), false, false).unwrap();
|
||||
|
||||
// Create archive
|
||||
let mut writer = ArchiveWriter::create(&archive_path).unwrap();
|
||||
writer
|
||||
.add_from_repository(&source_repo, "test", None)
|
||||
.unwrap();
|
||||
writer.finish().unwrap();
|
||||
|
||||
// Open archive and verify payload can be fetched
|
||||
let archive_repo = ArchiveBackend::open(&archive_path).unwrap();
|
||||
let manifest = archive_repo
|
||||
.fetch_manifest("test", &Fmri::parse("pkg://test/withfiles@1.0").unwrap())
|
||||
.unwrap();
|
||||
assert_eq!(manifest.files.len(), 1);
|
||||
|
||||
// Verify payload can be extracted
|
||||
let hash = &manifest.files[0]
|
||||
.payload
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.primary_identifier
|
||||
.hash;
|
||||
let payload_dest = dest_dir.path().join("extracted_payload");
|
||||
archive_repo
|
||||
.fetch_payload("test", hash, &payload_dest)
|
||||
.unwrap();
|
||||
assert!(payload_dest.exists());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_archive_round_trip_via_recv() {
|
||||
let source_dir = tempdir().unwrap();
|
||||
let archive_dir = tempdir().unwrap();
|
||||
let dest_dir = tempdir().unwrap();
|
||||
let archive_path = archive_dir.path().join("roundtrip.p6p");
|
||||
|
||||
// Create source repo
|
||||
let mut source_repo =
|
||||
FileBackend::create(source_dir.path(), RepositoryVersion::V4).unwrap();
|
||||
source_repo.add_publisher("test").unwrap();
|
||||
|
||||
let fmri = Fmri::parse("pkg://test/pkgB@2.0").unwrap();
|
||||
let mut manifest = Manifest::new();
|
||||
manifest.attributes.push(Attr {
|
||||
key: "pkg.fmri".to_string(),
|
||||
values: vec![fmri.to_string()],
|
||||
..Default::default()
|
||||
});
|
||||
manifest.attributes.push(Attr {
|
||||
key: "pkg.summary".to_string(),
|
||||
values: vec!["Package B".to_string()],
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
let mut txn = source_repo.begin_transaction().unwrap();
|
||||
txn.set_publisher("test");
|
||||
txn.update_manifest(manifest);
|
||||
txn.commit().unwrap();
|
||||
source_repo.rebuild(Some("test"), false, false).unwrap();
|
||||
|
||||
// Create archive from source
|
||||
let mut writer = ArchiveWriter::create(&archive_path).unwrap();
|
||||
writer
|
||||
.add_from_repository(&source_repo, "test", None)
|
||||
.unwrap();
|
||||
writer.finish().unwrap();
|
||||
|
||||
// Import from archive into destination via PackageReceiver
|
||||
let archive_repo = ArchiveBackend::open(&archive_path).unwrap();
|
||||
let dest_repo = FileBackend::create(dest_dir.path(), RepositoryVersion::V4).unwrap();
|
||||
|
||||
let mut receiver = crate::recv::PackageReceiver::new(&archive_repo, dest_repo);
|
||||
let fmris = archive_repo
|
||||
.list_packages(Some("test"), None)
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.map(|p| p.fmri)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
receiver.receive(Some("test"), &fmris, false).unwrap();
|
||||
|
||||
// Verify destination has the package
|
||||
let dest_repo = FileBackend::open(dest_dir.path()).unwrap();
|
||||
let pkgs = dest_repo.list_packages(Some("test"), Some("pkgB")).unwrap();
|
||||
assert_eq!(pkgs.len(), 1);
|
||||
assert_eq!(pkgs[0].fmri.name, "pkgB");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_archive_search() {
|
||||
let source_dir = tempdir().unwrap();
|
||||
let archive_dir = tempdir().unwrap();
|
||||
let archive_path = archive_dir.path().join("search_test.p6p");
|
||||
|
||||
// Create source repo with multiple packages
|
||||
let mut source_repo =
|
||||
FileBackend::create(source_dir.path(), RepositoryVersion::V4).unwrap();
|
||||
source_repo.add_publisher("test").unwrap();
|
||||
|
||||
for (name, summary) in [("web/nginx", "Nginx Webserver"), ("web/apache", "Apache HTTP")] {
|
||||
let fmri = Fmri::parse(&format!("pkg://test/{}@1.0", name)).unwrap();
|
||||
let mut manifest = Manifest::new();
|
||||
manifest.attributes.push(Attr {
|
||||
key: "pkg.fmri".to_string(),
|
||||
values: vec![fmri.to_string()],
|
||||
..Default::default()
|
||||
});
|
||||
manifest.attributes.push(Attr {
|
||||
key: "pkg.summary".to_string(),
|
||||
values: vec![summary.to_string()],
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
let mut txn = source_repo.begin_transaction().unwrap();
|
||||
txn.set_publisher("test");
|
||||
txn.update_manifest(manifest);
|
||||
txn.commit().unwrap();
|
||||
}
|
||||
source_repo.rebuild(Some("test"), false, false).unwrap();
|
||||
|
||||
// Create archive
|
||||
let mut writer = ArchiveWriter::create(&archive_path).unwrap();
|
||||
writer
|
||||
.add_from_repository(&source_repo, "test", None)
|
||||
.unwrap();
|
||||
writer.finish().unwrap();
|
||||
|
||||
// Search
|
||||
let archive_repo = ArchiveBackend::open(&archive_path).unwrap();
|
||||
let results = archive_repo.search("nginx", Some("test"), None).unwrap();
|
||||
assert_eq!(results.len(), 1);
|
||||
assert!(results[0].fmri.name.contains("nginx"));
|
||||
|
||||
let results = archive_repo.search("web", Some("test"), None).unwrap();
|
||||
assert_eq!(results.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_archive_list_with_pattern() {
|
||||
let source_dir = tempdir().unwrap();
|
||||
let archive_dir = tempdir().unwrap();
|
||||
let archive_path = archive_dir.path().join("pattern_test.p6p");
|
||||
|
||||
let mut source_repo =
|
||||
FileBackend::create(source_dir.path(), RepositoryVersion::V4).unwrap();
|
||||
source_repo.add_publisher("test").unwrap();
|
||||
|
||||
for name in ["lib/libfoo", "lib/libbar", "app/myapp"] {
|
||||
let fmri = Fmri::parse(&format!("pkg://test/{}@1.0", name)).unwrap();
|
||||
let mut manifest = Manifest::new();
|
||||
manifest.attributes.push(Attr {
|
||||
key: "pkg.fmri".to_string(),
|
||||
values: vec![fmri.to_string()],
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
let mut txn = source_repo.begin_transaction().unwrap();
|
||||
txn.set_publisher("test");
|
||||
txn.update_manifest(manifest);
|
||||
txn.commit().unwrap();
|
||||
}
|
||||
source_repo.rebuild(Some("test"), false, false).unwrap();
|
||||
|
||||
let mut writer = ArchiveWriter::create(&archive_path).unwrap();
|
||||
writer
|
||||
.add_from_repository(&source_repo, "test", None)
|
||||
.unwrap();
|
||||
writer.finish().unwrap();
|
||||
|
||||
let archive_repo = ArchiveBackend::open(&archive_path).unwrap();
|
||||
|
||||
// Pattern matching with wildcard
|
||||
let results = archive_repo
|
||||
.list_packages(Some("test"), Some("*lib*"))
|
||||
.unwrap();
|
||||
assert_eq!(results.len(), 2);
|
||||
|
||||
let results = archive_repo
|
||||
.list_packages(Some("test"), Some("*myapp*"))
|
||||
.unwrap();
|
||||
assert_eq!(results.len(), 1);
|
||||
}
|
||||
}
|
||||
358
libips/src/repository/archive_writer.rs
Normal file
358
libips/src/repository/archive_writer.rs
Normal file
|
|
@ -0,0 +1,358 @@
|
|||
// This Source Code Form is subject to the terms of
|
||||
// the Mozilla Public License, v. 2.0. If a copy of the
|
||||
// MPL was not distributed with this file, You can
|
||||
// obtain one at https://mozilla.org/MPL/2.0/.
|
||||
|
||||
use crate::fmri::Fmri;
|
||||
use crate::repository::{ReadableRepository, RepositoryError, Result};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashSet;
|
||||
use std::fs;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
use tracing::{debug, info};
|
||||
use zip::write::SimpleFileOptions;
|
||||
use zip::ZipWriter;
|
||||
|
||||
use super::file_backend::FileBackend;
|
||||
|
||||
/// Metadata stored in `pkg6.archive.json` at the root of a `.p6p` archive.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ArchiveMetadata {
|
||||
pub version: u32,
|
||||
pub format: String,
|
||||
pub created: String,
|
||||
pub publishers: Vec<String>,
|
||||
pub default_publisher: Option<String>,
|
||||
pub packages: Vec<ArchivePackageEntry>,
|
||||
pub total_packages: usize,
|
||||
pub total_files: usize,
|
||||
}
|
||||
|
||||
/// An entry in the archive's package index.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ArchivePackageEntry {
|
||||
pub fmri: String,
|
||||
pub publisher: String,
|
||||
pub manifest_path: String,
|
||||
pub file_hashes: Vec<String>,
|
||||
#[serde(default)]
|
||||
pub obsolete: bool,
|
||||
#[serde(default)]
|
||||
pub renamed: bool,
|
||||
pub summary: Option<String>,
|
||||
}
|
||||
|
||||
/// Builder for creating `.p6p` archives.
|
||||
pub struct ArchiveWriter {
|
||||
zip: ZipWriter<fs::File>,
|
||||
packages: Vec<ArchivePackageEntry>,
|
||||
publishers: HashSet<String>,
|
||||
default_publisher: Option<String>,
|
||||
written_files: HashSet<String>,
|
||||
total_files: usize,
|
||||
}
|
||||
|
||||
impl ArchiveWriter {
|
||||
/// Create a new archive at the given path.
|
||||
pub fn create<P: AsRef<Path>>(path: P) -> Result<Self> {
|
||||
let file = fs::File::create(path.as_ref()).map_err(|e| RepositoryError::FileCreateError {
|
||||
path: path.as_ref().to_path_buf(),
|
||||
source: e,
|
||||
})?;
|
||||
|
||||
let zip = ZipWriter::new(file);
|
||||
|
||||
Ok(ArchiveWriter {
|
||||
zip,
|
||||
packages: Vec::new(),
|
||||
publishers: HashSet::new(),
|
||||
default_publisher: None,
|
||||
written_files: HashSet::new(),
|
||||
total_files: 0,
|
||||
})
|
||||
}
|
||||
|
||||
/// Add all packages from a readable repository for the given publisher.
|
||||
/// If `fmri_filter` is `Some`, only packages matching those FMRIs are added.
|
||||
pub fn add_from_repository<R: ReadableRepository + Sync>(
|
||||
&mut self,
|
||||
source: &R,
|
||||
publisher: &str,
|
||||
fmri_filter: Option<&[Fmri]>,
|
||||
) -> Result<()> {
|
||||
self.publishers.insert(publisher.to_string());
|
||||
if self.default_publisher.is_none() {
|
||||
self.default_publisher = Some(publisher.to_string());
|
||||
}
|
||||
|
||||
let packages = source.list_packages(Some(publisher), None)?;
|
||||
|
||||
for pkg_info in &packages {
|
||||
let fmri = &pkg_info.fmri;
|
||||
|
||||
// Apply filter if provided
|
||||
if let Some(filter) = fmri_filter {
|
||||
let matches = filter.iter().any(|f| {
|
||||
f.name == fmri.name
|
||||
&& (f.version.is_none() || f.version() == fmri.version())
|
||||
});
|
||||
if !matches {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
self.add_package_from_repo(source, publisher, fmri)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Add a single package from a repository source.
|
||||
fn add_package_from_repo<R: ReadableRepository + Sync>(
|
||||
&mut self,
|
||||
source: &R,
|
||||
publisher: &str,
|
||||
fmri: &Fmri,
|
||||
) -> Result<()> {
|
||||
info!("Adding package {} from publisher {}", fmri, publisher);
|
||||
|
||||
// Fetch the manifest text (legacy IPS format)
|
||||
let manifest_text = source.fetch_manifest_text(publisher, fmri)?;
|
||||
let manifest = crate::actions::Manifest::parse_string(manifest_text.clone())
|
||||
.map_err(RepositoryError::from)?;
|
||||
|
||||
let version_str = fmri.version();
|
||||
let encoded_stem = FileBackend::url_encode(&fmri.name);
|
||||
let encoded_version = FileBackend::url_encode(&version_str);
|
||||
|
||||
let manifest_archive_path = format!(
|
||||
"publisher/{}/pkg/{}/{}",
|
||||
publisher, encoded_stem, encoded_version
|
||||
);
|
||||
|
||||
// Write the legacy manifest
|
||||
let options = SimpleFileOptions::default()
|
||||
.compression_method(zip::CompressionMethod::Zstd)
|
||||
.compression_level(Some(3));
|
||||
|
||||
self.zip
|
||||
.start_file(&manifest_archive_path, options)
|
||||
.map_err(|e| RepositoryError::ArchiveError(format!("failed to start manifest entry: {}", e)))?;
|
||||
self.zip
|
||||
.write_all(manifest_text.as_bytes())
|
||||
.map_err(|e| RepositoryError::ArchiveError(format!("failed to write manifest: {}", e)))?;
|
||||
|
||||
// Write the JSON manifest
|
||||
let json_manifest_path = format!("{}.json", manifest_archive_path);
|
||||
let json_bytes = serde_json::to_vec_pretty(&manifest)
|
||||
.map_err(|e| RepositoryError::JsonSerializeError(e.to_string()))?;
|
||||
|
||||
self.zip
|
||||
.start_file(&json_manifest_path, options)
|
||||
.map_err(|e| RepositoryError::ArchiveError(format!("failed to start json manifest entry: {}", e)))?;
|
||||
self.zip
|
||||
.write_all(&json_bytes)
|
||||
.map_err(|e| RepositoryError::ArchiveError(format!("failed to write json manifest: {}", e)))?;
|
||||
|
||||
// Collect file hashes and write payloads
|
||||
let mut file_hashes = Vec::new();
|
||||
|
||||
for file_action in &manifest.files {
|
||||
if let Some(ref payload) = file_action.payload {
|
||||
let hash = &payload.primary_identifier.hash;
|
||||
file_hashes.push(hash.clone());
|
||||
|
||||
// Deduplicate: skip if already written
|
||||
if self.written_files.contains(hash) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let file_archive_path = Self::file_archive_path(publisher, hash);
|
||||
|
||||
// Fetch payload to a temporary file, then read and compress into ZIP
|
||||
let temp_dir = tempfile::tempdir()
|
||||
.map_err(|e| RepositoryError::ArchiveError(format!("failed to create temp dir: {}", e)))?;
|
||||
let temp_path = temp_dir.path().join(hash);
|
||||
|
||||
source.fetch_payload(publisher, hash, &temp_path)?;
|
||||
|
||||
// Read the payload (which is compressed from the source repo)
|
||||
// and decompress it before writing to the archive.
|
||||
// The ZIP zstd compression will re-compress it.
|
||||
let payload_bytes = Self::read_and_decompress(&temp_path)?;
|
||||
|
||||
self.zip
|
||||
.start_file(&file_archive_path, options)
|
||||
.map_err(|e| RepositoryError::ArchiveError(format!("failed to start file entry: {}", e)))?;
|
||||
self.zip
|
||||
.write_all(&payload_bytes)
|
||||
.map_err(|e| RepositoryError::ArchiveError(format!("failed to write file: {}", e)))?;
|
||||
|
||||
self.written_files.insert(hash.clone());
|
||||
self.total_files += 1;
|
||||
|
||||
debug!("Wrote payload {} ({} bytes)", hash, payload_bytes.len());
|
||||
}
|
||||
}
|
||||
|
||||
// Also handle signature payloads
|
||||
for sig in &manifest.signatures {
|
||||
let digest = if !sig.value.is_empty() {
|
||||
&sig.value
|
||||
} else if !sig.chash.is_empty() {
|
||||
&sig.chash
|
||||
} else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if self.written_files.contains(digest) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let file_archive_path = Self::file_archive_path(publisher, digest);
|
||||
let temp_dir = tempfile::tempdir()
|
||||
.map_err(|e| RepositoryError::ArchiveError(format!("failed to create temp dir: {}", e)))?;
|
||||
let temp_path = temp_dir.path().join(digest);
|
||||
|
||||
match source.fetch_payload(publisher, digest, &temp_path) {
|
||||
Ok(()) => {
|
||||
let payload_bytes = fs::read(&temp_path).map_err(|e| {
|
||||
RepositoryError::FileReadError {
|
||||
path: temp_path.clone(),
|
||||
source: e,
|
||||
}
|
||||
})?;
|
||||
|
||||
self.zip
|
||||
.start_file(&file_archive_path, options)
|
||||
.map_err(|e| RepositoryError::ArchiveError(format!("failed to start sig entry: {}", e)))?;
|
||||
self.zip
|
||||
.write_all(&payload_bytes)
|
||||
.map_err(|e| RepositoryError::ArchiveError(format!("failed to write sig: {}", e)))?;
|
||||
|
||||
self.written_files.insert(digest.clone());
|
||||
self.total_files += 1;
|
||||
}
|
||||
Err(e) => {
|
||||
debug!("Could not fetch signature payload {}: {}", digest, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Extract summary from manifest attributes
|
||||
let summary = manifest
|
||||
.attributes
|
||||
.iter()
|
||||
.find(|a| a.key == "pkg.summary")
|
||||
.and_then(|a| a.values.first().cloned());
|
||||
|
||||
let obsolete = manifest
|
||||
.attributes
|
||||
.iter()
|
||||
.any(|a| a.key == "pkg.obsolete" && a.values.first().map(|v| v == "true").unwrap_or(false));
|
||||
|
||||
let renamed = manifest
|
||||
.attributes
|
||||
.iter()
|
||||
.any(|a| a.key == "pkg.renamed" && a.values.first().map(|v| v == "true").unwrap_or(false));
|
||||
|
||||
self.packages.push(ArchivePackageEntry {
|
||||
fmri: fmri.to_string(),
|
||||
publisher: publisher.to_string(),
|
||||
manifest_path: manifest_archive_path,
|
||||
file_hashes,
|
||||
obsolete,
|
||||
renamed,
|
||||
summary,
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Construct the archive-internal path for a file payload.
|
||||
fn file_archive_path(publisher: &str, hash: &str) -> String {
|
||||
if hash.len() < 4 {
|
||||
format!("publisher/{}/file/{}", publisher, hash)
|
||||
} else {
|
||||
let first_two = &hash[0..2];
|
||||
let next_two = &hash[2..4];
|
||||
format!(
|
||||
"publisher/{}/file/{}/{}/{}",
|
||||
publisher, first_two, next_two, hash
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Read a file and attempt to decompress it (gzip or lz4).
|
||||
/// If decompression fails, return the raw bytes (already uncompressed).
|
||||
fn read_and_decompress(path: &Path) -> Result<Vec<u8>> {
|
||||
let raw = fs::read(path).map_err(|e| RepositoryError::FileReadError {
|
||||
path: path.to_path_buf(),
|
||||
source: e,
|
||||
})?;
|
||||
|
||||
// Try gzip decompression
|
||||
if raw.len() >= 2 && raw[0] == 0x1f && raw[1] == 0x8b {
|
||||
let mut decoder = flate2::read::GzDecoder::new(&raw[..]);
|
||||
let mut decompressed = Vec::new();
|
||||
if std::io::Read::read_to_end(&mut decoder, &mut decompressed).is_ok() {
|
||||
return Ok(decompressed);
|
||||
}
|
||||
}
|
||||
|
||||
// Try LZ4 decompression (LZ4 frame magic: 04 22 4D 18)
|
||||
if raw.len() >= 4 && raw[0] == 0x04 && raw[1] == 0x22 && raw[2] == 0x4D && raw[3] == 0x18
|
||||
{
|
||||
let decoder = lz4::Decoder::new(&raw[..]);
|
||||
if let Ok(mut decoder) = decoder {
|
||||
let mut decompressed = Vec::new();
|
||||
if std::io::Read::read_to_end(&mut decoder, &mut decompressed).is_ok() {
|
||||
return Ok(decompressed);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Not compressed or unknown format — return raw bytes
|
||||
Ok(raw)
|
||||
}
|
||||
|
||||
/// Finalize the archive: write the metadata index and close the ZIP file.
|
||||
pub fn finish(mut self) -> Result<()> {
|
||||
let metadata = ArchiveMetadata {
|
||||
version: 1,
|
||||
format: "pkg6p".to_string(),
|
||||
created: chrono::Utc::now().to_rfc3339(),
|
||||
publishers: self.publishers.into_iter().collect(),
|
||||
default_publisher: self.default_publisher,
|
||||
packages: self.packages.clone(),
|
||||
total_packages: self.packages.len(),
|
||||
total_files: self.total_files,
|
||||
};
|
||||
|
||||
let options = SimpleFileOptions::default()
|
||||
.compression_method(zip::CompressionMethod::Zstd)
|
||||
.compression_level(Some(3));
|
||||
|
||||
let json = serde_json::to_vec_pretty(&metadata)
|
||||
.map_err(|e| RepositoryError::JsonSerializeError(e.to_string()))?;
|
||||
|
||||
self.zip
|
||||
.start_file("pkg6.archive.json", options)
|
||||
.map_err(|e| RepositoryError::ArchiveError(format!("failed to write index: {}", e)))?;
|
||||
self.zip
|
||||
.write_all(&json)
|
||||
.map_err(|e| RepositoryError::ArchiveError(format!("failed to write index: {}", e)))?;
|
||||
|
||||
self.zip
|
||||
.finish()
|
||||
.map_err(|e| RepositoryError::ArchiveError(format!("failed to finalize archive: {}", e)))?;
|
||||
|
||||
info!(
|
||||
"Archive complete: {} packages, {} files",
|
||||
metadata.total_packages, metadata.total_files
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
@ -2625,7 +2625,7 @@ impl FileBackend {
|
|||
}
|
||||
|
||||
/// URL encode a string for use in a filename
|
||||
fn url_encode(s: &str) -> String {
|
||||
pub(crate) fn url_encode(s: &str) -> String {
|
||||
let mut result = String::new();
|
||||
for c in s.chars() {
|
||||
match c {
|
||||
|
|
|
|||
|
|
@ -215,6 +215,13 @@ pub enum RepositoryError {
|
|||
help("Check that the path is valid and within the expected directory")
|
||||
)]
|
||||
PathPrefixError(String),
|
||||
|
||||
#[error("archive error: {0}")]
|
||||
#[diagnostic(
|
||||
code(ips::repository_error::archive_error),
|
||||
help("Check that the archive is valid and accessible")
|
||||
)]
|
||||
ArchiveError(String),
|
||||
}
|
||||
|
||||
// Implement From for common error types
|
||||
|
|
@ -243,6 +250,14 @@ impl From<rusqlite::Error> for RepositoryError {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<zip::result::ZipError> for RepositoryError {
|
||||
fn from(err: zip::result::ZipError) -> Self {
|
||||
RepositoryError::ArchiveError(err.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
pub mod archive_backend;
|
||||
pub mod archive_writer;
|
||||
pub mod catalog;
|
||||
mod catalog_writer;
|
||||
pub(crate) mod file_backend;
|
||||
|
|
@ -252,6 +267,8 @@ mod rest_backend;
|
|||
pub mod shard_sync;
|
||||
pub mod sqlite_catalog;
|
||||
#[cfg(test)]
|
||||
mod archive_tests;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use crate::actions::ActionError;
|
||||
|
|
@ -259,6 +276,8 @@ use crate::digest::DigestError;
|
|||
pub use catalog::{
|
||||
CatalogAttrs, CatalogError, CatalogManager, CatalogOperationType, CatalogPart, UpdateLog,
|
||||
};
|
||||
pub use archive_backend::ArchiveBackend;
|
||||
pub use archive_writer::ArchiveWriter;
|
||||
pub use file_backend::{FileBackend, IndexEntry};
|
||||
pub use obsoleted::{ObsoletedPackageManager, ObsoletedPackageMetadata};
|
||||
pub use progress::{NoopProgressReporter, ProgressInfo, ProgressReporter};
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ use clap::Parser;
|
|||
use libips::fmri::Fmri;
|
||||
use libips::recv::PackageReceiver;
|
||||
use libips::repository::{
|
||||
FileBackend, ProgressInfo, ProgressReporter, ReadableRepository, RestBackend,
|
||||
ArchiveBackend, FileBackend, ProgressInfo, ProgressReporter, ReadableRepository, RestBackend,
|
||||
};
|
||||
use miette::{IntoDiagnostic, Result};
|
||||
use std::path::PathBuf;
|
||||
|
|
@ -57,13 +57,24 @@ fn main() -> Result<()> {
|
|||
|
||||
let progress = ConsoleProgressReporter;
|
||||
|
||||
// Determine if source is a URL or a path and receive packages
|
||||
// Determine if source is a URL, archive, or path and receive packages
|
||||
if cli.source.starts_with("http://") || cli.source.starts_with("https://") {
|
||||
let source_repo = RestBackend::open(&cli.source).into_diagnostic()?;
|
||||
let dest_repo = FileBackend::open(&cli.dest).into_diagnostic()?;
|
||||
|
||||
|
||||
let fmris = resolve_packages(&source_repo, cli.publisher.as_deref(), &cli.packages)?;
|
||||
|
||||
|
||||
let mut receiver = PackageReceiver::new(&source_repo, dest_repo);
|
||||
receiver = receiver.with_progress(&progress);
|
||||
receiver
|
||||
.receive(cli.publisher.as_deref(), &fmris, cli.recursive)
|
||||
.into_diagnostic()?;
|
||||
} else if cli.source.ends_with(".p6p") {
|
||||
let source_repo = ArchiveBackend::open(&cli.source).into_diagnostic()?;
|
||||
let dest_repo = FileBackend::open(&cli.dest).into_diagnostic()?;
|
||||
|
||||
let fmris = resolve_packages(&source_repo, cli.publisher.as_deref(), &cli.packages)?;
|
||||
|
||||
let mut receiver = PackageReceiver::new(&source_repo, dest_repo);
|
||||
receiver = receiver.with_progress(&progress);
|
||||
receiver
|
||||
|
|
@ -72,9 +83,9 @@ fn main() -> Result<()> {
|
|||
} else {
|
||||
let source_repo = FileBackend::open(&cli.source).into_diagnostic()?;
|
||||
let dest_repo = FileBackend::open(&cli.dest).into_diagnostic()?;
|
||||
|
||||
|
||||
let fmris = resolve_packages(&source_repo, cli.publisher.as_deref(), &cli.packages)?;
|
||||
|
||||
|
||||
let mut receiver = PackageReceiver::new(&source_repo, dest_repo);
|
||||
receiver = receiver.with_progress(&progress);
|
||||
receiver
|
||||
|
|
|
|||
|
|
@ -4,7 +4,10 @@ use error::{Pkg6RepoError, Result};
|
|||
use pkg5_import::Pkg5Importer;
|
||||
|
||||
use clap::{Parser, Subcommand};
|
||||
use libips::repository::{FileBackend, ReadableRepository, RepositoryVersion, WritableRepository};
|
||||
use libips::repository::{
|
||||
FileBackend, ProgressInfo, ProgressReporter, ReadableRepository, RepositoryVersion,
|
||||
WritableRepository,
|
||||
};
|
||||
use serde::Serialize;
|
||||
use std::convert::TryFrom;
|
||||
use std::path::PathBuf;
|
||||
|
|
@ -320,6 +323,40 @@ enum Commands {
|
|||
query: String,
|
||||
},
|
||||
|
||||
/// Create a .p6p archive from a repository
|
||||
Archive {
|
||||
/// Path to the source repository
|
||||
#[clap(short = 's', long)]
|
||||
source: PathBuf,
|
||||
|
||||
/// Output archive path (.p6p)
|
||||
#[clap(short = 'o', long)]
|
||||
output: PathBuf,
|
||||
|
||||
/// Publisher to archive (defaults to all publishers)
|
||||
#[clap(short = 'p', long)]
|
||||
publisher: Option<String>,
|
||||
|
||||
/// Specific packages to include (FMRIs, defaults to all)
|
||||
#[clap(long)]
|
||||
packages: Option<Vec<String>>,
|
||||
},
|
||||
|
||||
/// Import packages from a .p6p archive into a repository
|
||||
ImportArchive {
|
||||
/// Path to the .p6p archive
|
||||
#[clap(short = 's', long)]
|
||||
source: PathBuf,
|
||||
|
||||
/// Path to the destination repository
|
||||
#[clap(short = 'd', long)]
|
||||
destination: PathBuf,
|
||||
|
||||
/// Publisher to import (defaults to all publishers in the archive)
|
||||
#[clap(short = 'p', long)]
|
||||
publisher: Option<String>,
|
||||
},
|
||||
|
||||
/// Import a pkg5 repository
|
||||
ImportPkg5 {
|
||||
/// Path to the pkg5 repository (directory or p5p archive)
|
||||
|
|
@ -504,6 +541,20 @@ enum Commands {
|
|||
},
|
||||
}
|
||||
|
||||
struct ImportProgressReporter;
|
||||
|
||||
impl ProgressReporter for ImportProgressReporter {
|
||||
fn start(&self, info: &ProgressInfo) {
|
||||
info!("{}", info);
|
||||
}
|
||||
fn update(&self, info: &ProgressInfo) {
|
||||
info!("{}", info);
|
||||
}
|
||||
fn finish(&self, info: &ProgressInfo) {
|
||||
info!("DONE: {}", info.operation);
|
||||
}
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
// Initialize the tracing subscriber with the default log level as debug and no decorations
|
||||
// Parse the environment filter first, handling any errors with our custom error type
|
||||
|
|
@ -1254,6 +1305,89 @@ fn main() -> Result<()> {
|
|||
|
||||
Ok(())
|
||||
}
|
||||
Commands::Archive {
|
||||
source,
|
||||
output,
|
||||
publisher,
|
||||
packages,
|
||||
} => {
|
||||
info!(
|
||||
"Creating archive from {} to {}",
|
||||
source.display(),
|
||||
output.display()
|
||||
);
|
||||
|
||||
let source_repo = FileBackend::open(&source)?;
|
||||
let mut writer = libips::repository::ArchiveWriter::create(&output)?;
|
||||
|
||||
let fmri_filter: Option<Vec<libips::fmri::Fmri>> = if let Some(pkg_strs) = packages {
|
||||
let mut fmris = Vec::new();
|
||||
for s in pkg_strs {
|
||||
let fmri = libips::fmri::Fmri::parse(s)
|
||||
.map_err(|e| Pkg6RepoError::Other(format!("invalid FMRI '{}': {}", s, e)))?;
|
||||
fmris.push(fmri);
|
||||
}
|
||||
Some(fmris)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let info = source_repo.get_info()?;
|
||||
let publishers_to_archive: Vec<String> = if let Some(pub_name) = publisher {
|
||||
vec![pub_name.clone()]
|
||||
} else {
|
||||
info.publishers.iter().map(|p| p.name.clone()).collect()
|
||||
};
|
||||
|
||||
for pub_name in &publishers_to_archive {
|
||||
writer.add_from_repository(
|
||||
&source_repo,
|
||||
pub_name,
|
||||
fmri_filter.as_deref(),
|
||||
)?;
|
||||
}
|
||||
|
||||
writer.finish()?;
|
||||
|
||||
info!("Archive created successfully at {}", output.display());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
Commands::ImportArchive {
|
||||
source,
|
||||
destination,
|
||||
publisher,
|
||||
} => {
|
||||
info!(
|
||||
"Importing archive from {} to {}",
|
||||
source.display(),
|
||||
destination.display()
|
||||
);
|
||||
|
||||
let source_repo = libips::repository::ArchiveBackend::open(&source)?;
|
||||
let dest_repo = FileBackend::open(&destination)?;
|
||||
|
||||
let progress = ImportProgressReporter;
|
||||
let mut receiver = libips::recv::PackageReceiver::new(&source_repo, dest_repo);
|
||||
receiver = receiver.with_progress(&progress);
|
||||
|
||||
let source_info = source_repo.get_info()?;
|
||||
let publishers_to_import: Vec<String> = if let Some(pub_name) = publisher {
|
||||
vec![pub_name.clone()]
|
||||
} else {
|
||||
source_info.publishers.iter().map(|p| p.name.clone()).collect()
|
||||
};
|
||||
|
||||
for pub_name in &publishers_to_import {
|
||||
let packages = source_repo.list_packages(Some(pub_name), None)?;
|
||||
let fmris: Vec<libips::fmri::Fmri> = packages.into_iter().map(|p| p.fmri).collect();
|
||||
receiver.receive(Some(pub_name), &fmris, false)?;
|
||||
}
|
||||
|
||||
info!("Archive imported successfully");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
Commands::ImportPkg5 {
|
||||
source,
|
||||
destination,
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue