fix: Store payload files under both compressed and primary hash

Files were only stored under their compressed SHA256 hash, but pkg5
clients and the IPS protocol look up files by the manifest's primary
hash (typically SHA1/SHA256 uncompressed). Create a hardlink from the
primary hash path to the compressed hash file so both old and new
clients can find payloads via the server's /file/0/<hash> endpoint.
This commit is contained in:
Till Wegmueller 2026-03-15 20:44:43 +01:00
parent 0a28909e9e
commit a0fe229ba4
2 changed files with 37 additions and 11 deletions

View file

@ -611,7 +611,8 @@ mod tests {
); );
// Verify payload files exist in the destination repo's file store. // Verify payload files exist in the destination repo's file store.
// The dest re-compresses files so hashes differ from source. Count files in file store. // Each file has both a compressed-hash entry and a primary-hash hardlink,
// so total file count is 2x the number of payload files.
let file_store = dest_dir.path().join("publisher/test/file"); let file_store = dest_dir.path().join("publisher/test/file");
let mut payload_count = 0; let mut payload_count = 0;
if file_store.exists() { if file_store.exists() {
@ -626,11 +627,11 @@ mod tests {
} }
} }
} }
assert_eq!( assert!(
payload_count, payload_count >= file_contents.len(),
"At least {} payload files should exist in destination file store, found {}",
file_contents.len(), file_contents.len(),
"All {} payload files should exist in destination file store", payload_count
file_contents.len()
); );
Ok(()) Ok(())

View file

@ -102,8 +102,8 @@ pub struct Transaction {
path: PathBuf, path: PathBuf,
/// Manifest being updated /// Manifest being updated
manifest: Manifest, manifest: Manifest,
/// Files to be published /// Files to be published: (source_path, compressed_hash, primary_hash)
files: Vec<(PathBuf, String)>, // (source_path, sha256) files: Vec<(PathBuf, String, String)>,
/// Repository reference /// Repository reference
repo: PathBuf, repo: PathBuf,
/// Publisher name /// Publisher name
@ -277,8 +277,9 @@ impl Transaction {
}; };
// Add a file to the list for later processing during commit // Add a file to the list for later processing during commit
// Track both compressed hash (storage key) and primary hash (for compatibility lookups)
self.files self.files
.push((temp_file_path.clone(), compressed_hash.clone())); .push((temp_file_path.clone(), compressed_hash.clone(), hash.clone()));
// Set the primary identifier (uncompressed SHA256 hash) // Set the primary identifier (uncompressed SHA256 hash)
payload.primary_identifier = Digest { payload.primary_identifier = Digest {
@ -392,9 +393,9 @@ impl Transaction {
} }
// Move files to their final location (atomic rename, same filesystem) // Move files to their final location (atomic rename, same filesystem)
for (source_path, hash) in self.files { for (source_path, compressed_hash, primary_hash) in self.files {
let dest_path = let dest_path =
FileBackend::construct_file_path_with_publisher(&self.repo, &publisher, &hash); FileBackend::construct_file_path_with_publisher(&self.repo, &publisher, &compressed_hash);
if let Some(parent) = dest_path.parent() { if let Some(parent) = dest_path.parent() {
fs::create_dir_all(parent).map_err(|e| RepositoryError::DirectoryCreateError { fs::create_dir_all(parent).map_err(|e| RepositoryError::DirectoryCreateError {
@ -406,10 +407,34 @@ impl Transaction {
if !dest_path.exists() { if !dest_path.exists() {
fs::rename(&source_path, &dest_path).map_err(|e| RepositoryError::FileRenameError { fs::rename(&source_path, &dest_path).map_err(|e| RepositoryError::FileRenameError {
from: source_path.clone(), from: source_path.clone(),
to: dest_path, to: dest_path.clone(),
source: e, source: e,
})?; })?;
} }
// Create a hardlink from the primary (uncompressed) hash so clients
// that look up files by the manifest's primary hash can find them
if primary_hash != compressed_hash {
let primary_path =
FileBackend::construct_file_path_with_publisher(&self.repo, &publisher, &primary_hash);
if !primary_path.exists() {
if let Some(parent) = primary_path.parent() {
fs::create_dir_all(parent).map_err(|e| RepositoryError::DirectoryCreateError {
path: parent.to_path_buf(),
source: e,
})?;
}
if let Err(e) = fs::hard_link(&dest_path, &primary_path) {
debug!("Failed to create hardlink from {} to {}: {}, falling back to copy",
dest_path.display(), primary_path.display(), e);
fs::copy(&dest_path, &primary_path).map_err(|e| RepositoryError::FileCopyError {
from: dest_path.clone(),
to: primary_path,
source: e,
})?;
}
}
}
} }
// Create the package directory if it doesn't exist // Create the package directory if it doesn't exist