mirror of
https://codeberg.org/Toasterson/ips.git
synced 2026-04-10 13:20:42 +00:00
fix: Resolve SHA1/SHA-256 hash mismatch in manifest lookups, complete search endpoints
fetch_manifest and the receiver were parsing legacy text manifests which reference SHA1 hashes, while pkg5-imported repos store files under SHA-256. Prefer the JSON manifest (.json) everywhere so payload lookups find the correct files. Fixes archive creation, archive import, and local FileBackend-to-FileBackend receive for pkg5-imported repositories. Also completes the search REST API: v0 now returns 204 for empty results per the pkg5 spec, and publisher-less routes (/search/0/, /search/1/) are added for clients that omit the publisher prefix.
This commit is contained in:
parent
4646d2a9c4
commit
f8ab1f94c1
7 changed files with 186 additions and 20 deletions
|
|
@ -194,8 +194,9 @@ impl<'a, S: ReadableRepository + Sync> PackageReceiver<'a, S> {
|
|||
let progress = self.progress.unwrap_or(&NoopProgressReporter);
|
||||
|
||||
let manifest_text = self.source.fetch_manifest_text(publisher, fmri)?;
|
||||
let manifest =
|
||||
Manifest::parse_string(manifest_text.clone()).map_err(RepositoryError::from)?;
|
||||
// Use fetch_manifest which prefers JSON (has correct SHA-256 hashes
|
||||
// from add_file) over legacy text (which may have SHA1 hashes)
|
||||
let manifest = self.source.fetch_manifest(publisher, fmri)?;
|
||||
|
||||
// Ensure publisher exists in destination
|
||||
let dest_info = self.dest.get_info()?;
|
||||
|
|
|
|||
|
|
@ -252,6 +252,28 @@ impl ReadableRepository for ArchiveBackend {
|
|||
}
|
||||
|
||||
fn fetch_manifest(&self, publisher: &str, fmri: &Fmri) -> Result<Manifest> {
|
||||
let version_str = fmri.version();
|
||||
let base_path = Self::manifest_path(publisher, &fmri.name, &version_str);
|
||||
let json_path = format!("{}.json", base_path);
|
||||
|
||||
// Prefer the JSON manifest (has correct SHA-256 hashes from add_file)
|
||||
{
|
||||
let mut archive = self
|
||||
.archive
|
||||
.lock()
|
||||
.map_err(|e| RepositoryError::Other(format!("archive lock poisoned: {}", e)))?;
|
||||
|
||||
if let Ok(mut entry) = archive.by_name(&json_path) {
|
||||
let mut buf = String::new();
|
||||
entry
|
||||
.read_to_string(&mut buf)
|
||||
.map_err(|e| RepositoryError::ArchiveError(format!("failed to read json manifest: {}", e)))?;
|
||||
return serde_json::from_str(&buf)
|
||||
.map_err(|e| RepositoryError::ArchiveError(format!("invalid json manifest: {}", e)));
|
||||
}
|
||||
}
|
||||
|
||||
// Fall back to legacy text manifest
|
||||
let text = self.fetch_manifest_text(publisher, fmri)?;
|
||||
Manifest::parse_string(text).map_err(RepositoryError::from)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -117,10 +117,13 @@ impl ArchiveWriter {
|
|||
) -> Result<()> {
|
||||
info!("Adding package {} from publisher {}", fmri, publisher);
|
||||
|
||||
// Fetch the manifest text (legacy IPS format)
|
||||
// Fetch the manifest text (legacy IPS format) for archival
|
||||
let manifest_text = source.fetch_manifest_text(publisher, fmri)?;
|
||||
let manifest = crate::actions::Manifest::parse_string(manifest_text.clone())
|
||||
.map_err(RepositoryError::from)?;
|
||||
|
||||
// Fetch the parsed manifest (prefers JSON which has correct SHA-256 hashes
|
||||
// from add_file, falling back to text parsing). pkg5-imported repos store
|
||||
// files under SHA-256 hashes but legacy manifests reference SHA1 hashes.
|
||||
let manifest = source.fetch_manifest(publisher, fmri)?;
|
||||
|
||||
let version_str = fmri.version();
|
||||
let encoded_stem = FileBackend::url_encode(&fmri.name);
|
||||
|
|
|
|||
|
|
@ -1334,10 +1334,28 @@ impl ReadableRepository for FileBackend {
|
|||
));
|
||||
}
|
||||
|
||||
// Helper: try JSON manifest first (has correct SHA-256 hashes from add_file),
|
||||
// then fall back to legacy text manifest.
|
||||
let try_parse = |path: &Path| -> Option<Result<crate::actions::Manifest>> {
|
||||
let json_path = PathBuf::from(format!("{}.json", path.display()));
|
||||
if json_path.exists() {
|
||||
return Some(
|
||||
crate::actions::Manifest::parse_file(&json_path)
|
||||
.map_err(RepositoryError::from),
|
||||
);
|
||||
}
|
||||
if path.exists() {
|
||||
return Some(
|
||||
crate::actions::Manifest::parse_file(path).map_err(RepositoryError::from),
|
||||
);
|
||||
}
|
||||
None
|
||||
};
|
||||
|
||||
// Preferred path: publisher-scoped manifest path
|
||||
let path = Self::construct_manifest_path(&self.path, publisher, fmri.stem(), &version);
|
||||
if path.exists() {
|
||||
return crate::actions::Manifest::parse_file(&path).map_err(RepositoryError::from);
|
||||
if let Some(result) = try_parse(&path) {
|
||||
return result;
|
||||
}
|
||||
|
||||
// Fallbacks: global pkg layout without publisher
|
||||
|
|
@ -1348,8 +1366,8 @@ impl ReadableRepository for FileBackend {
|
|||
.join("pkg")
|
||||
.join(&encoded_stem)
|
||||
.join(&encoded_version);
|
||||
if alt1.exists() {
|
||||
return crate::actions::Manifest::parse_file(&alt1).map_err(RepositoryError::from);
|
||||
if let Some(result) = try_parse(&alt1) {
|
||||
return result;
|
||||
}
|
||||
|
||||
let alt2 = self
|
||||
|
|
@ -1359,8 +1377,8 @@ impl ReadableRepository for FileBackend {
|
|||
.join("pkg")
|
||||
.join(&encoded_stem)
|
||||
.join(&encoded_version);
|
||||
if alt2.exists() {
|
||||
return crate::actions::Manifest::parse_file(&alt2).map_err(RepositoryError::from);
|
||||
if let Some(result) = try_parse(&alt2) {
|
||||
return result;
|
||||
}
|
||||
|
||||
Err(RepositoryError::NotFound(format!(
|
||||
|
|
|
|||
|
|
@ -9,7 +9,27 @@ pub async fn get_search_v0(
|
|||
State(repo): State<Arc<DepotRepo>>,
|
||||
Path((publisher, token)): Path<(String, String)>,
|
||||
) -> Result<Response, DepotError> {
|
||||
let results = repo.search(Some(&publisher), &token, false)?;
|
||||
search_v0_impl(&repo, Some(&publisher), &token)
|
||||
}
|
||||
|
||||
pub async fn get_default_search_v0(
|
||||
State(repo): State<Arc<DepotRepo>>,
|
||||
Path(token): Path<String>,
|
||||
) -> Result<Response, DepotError> {
|
||||
search_v0_impl(&repo, None, &token)
|
||||
}
|
||||
|
||||
fn search_v0_impl(
|
||||
repo: &DepotRepo,
|
||||
publisher: Option<&str>,
|
||||
token: &str,
|
||||
) -> Result<Response, DepotError> {
|
||||
let results = repo.search(publisher, token, false)?;
|
||||
|
||||
// No results -> 204 No Content per pkg5 spec
|
||||
if results.is_empty() {
|
||||
return Ok(axum::http::StatusCode::NO_CONTENT.into_response());
|
||||
}
|
||||
|
||||
// Format: {index_type} {fmri} {action_type} {value}
|
||||
let mut body = String::new();
|
||||
|
|
@ -27,7 +47,14 @@ pub async fn get_search_v1(
|
|||
State(repo): State<Arc<DepotRepo>>,
|
||||
Path((publisher, token)): Path<(String, String)>,
|
||||
) -> Result<Response, DepotError> {
|
||||
search_v1_impl(&repo, &publisher, &token)
|
||||
search_v1_impl(&repo, Some(&publisher), &token)
|
||||
}
|
||||
|
||||
pub async fn get_default_search_v1(
|
||||
State(repo): State<Arc<DepotRepo>>,
|
||||
Path(token): Path<String>,
|
||||
) -> Result<Response, DepotError> {
|
||||
search_v1_impl(&repo, None, &token)
|
||||
}
|
||||
|
||||
pub async fn post_search_v1(
|
||||
|
|
@ -37,12 +64,20 @@ pub async fn post_search_v1(
|
|||
) -> Result<Response, DepotError> {
|
||||
// The POST body contains the token directly
|
||||
let token = body.trim();
|
||||
search_v1_impl(&repo, &publisher, token)
|
||||
search_v1_impl(&repo, Some(&publisher), token)
|
||||
}
|
||||
|
||||
pub async fn post_default_search_v1(
|
||||
State(repo): State<Arc<DepotRepo>>,
|
||||
body: String,
|
||||
) -> Result<Response, DepotError> {
|
||||
let token = body.trim();
|
||||
search_v1_impl(&repo, None, token)
|
||||
}
|
||||
|
||||
fn search_v1_impl(
|
||||
repo: &DepotRepo,
|
||||
publisher: &str,
|
||||
publisher: Option<&str>,
|
||||
token: &str,
|
||||
) -> Result<Response, DepotError> {
|
||||
// Search v1 token format: "<case>_<rtype>_<trans>_<installroot>_<query>"
|
||||
|
|
@ -59,7 +94,7 @@ fn search_v1_impl(
|
|||
let return_type = parts.get(1).copied().unwrap_or("2");
|
||||
|
||||
// Run search
|
||||
let results = repo.search(Some(publisher), query, case_sensitive)?;
|
||||
let results = repo.search(publisher, query, case_sensitive)?;
|
||||
|
||||
// No results -> 204 No Content per v1 spec
|
||||
if results.is_empty() {
|
||||
|
|
|
|||
|
|
@ -66,6 +66,9 @@ pub fn app_router(state: Arc<DepotRepo>) -> Router {
|
|||
.route("/{publisher}/search/0/{token}", get(search::get_search_v0))
|
||||
.route("/{publisher}/search/1/{token}", get(search::get_search_v1))
|
||||
.route("/{publisher}/search/1/", post(search::post_search_v1))
|
||||
.route("/search/0/{token}", get(search::get_default_search_v0))
|
||||
.route("/search/1/{token}", get(search::get_default_search_v1))
|
||||
.route("/search/1/", post(search::post_default_search_v1))
|
||||
.route(
|
||||
"/{publisher}/index/0/{command}",
|
||||
get(index::get_index_v0),
|
||||
|
|
|
|||
|
|
@ -560,15 +560,17 @@ async fn test_search_endpoint() {
|
|||
);
|
||||
assert!(body.contains("example"), "search v1 should find 'example' package");
|
||||
|
||||
// 4. Test search v0 - no results
|
||||
// 4. Test search v0 - no results returns 204
|
||||
let resp = client
|
||||
.get(format!("{}/test/search/0/nonexistentpackage", base_url))
|
||||
.send()
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(resp.status().is_success());
|
||||
let body = resp.text().await.unwrap();
|
||||
assert!(body.is_empty(), "search for nonexistent should return empty body");
|
||||
assert_eq!(
|
||||
resp.status().as_u16(),
|
||||
204,
|
||||
"search v0 with no results should return 204"
|
||||
);
|
||||
|
||||
// 5. Test search v1 - no results returns 204
|
||||
let resp = client
|
||||
|
|
@ -597,4 +599,86 @@ async fn test_search_endpoint() {
|
|||
body.contains("example"),
|
||||
"search by summary keyword should find the package"
|
||||
);
|
||||
|
||||
// 7. Test publisher-less search v0 (falls back to default publisher)
|
||||
let resp = client
|
||||
.get(format!("{}/search/0/example", base_url))
|
||||
.send()
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(
|
||||
resp.status().is_success(),
|
||||
"publisher-less search v0 should succeed"
|
||||
);
|
||||
let body = resp.text().await.unwrap();
|
||||
assert!(
|
||||
body.contains("example"),
|
||||
"publisher-less search v0 should find 'example' package"
|
||||
);
|
||||
|
||||
// 8. Test publisher-less search v1
|
||||
let resp = client
|
||||
.get(format!(
|
||||
"{}/search/1/False_2_None_None_example",
|
||||
base_url
|
||||
))
|
||||
.send()
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(
|
||||
resp.status().is_success(),
|
||||
"publisher-less search v1 should succeed"
|
||||
);
|
||||
let body = resp.text().await.unwrap();
|
||||
assert!(
|
||||
body.contains("example"),
|
||||
"publisher-less search v1 should find 'example' package"
|
||||
);
|
||||
|
||||
// 9. Test POST search v1
|
||||
let resp = client
|
||||
.post(format!("{}/test/search/1/", base_url))
|
||||
.body("False_2_None_None_example")
|
||||
.send()
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(
|
||||
resp.status().is_success(),
|
||||
"POST search v1 should succeed"
|
||||
);
|
||||
let body = resp.text().await.unwrap();
|
||||
assert!(
|
||||
body.contains("example"),
|
||||
"POST search v1 should find 'example' package"
|
||||
);
|
||||
|
||||
// 10. Test search v0 response format matches pkg5 spec
|
||||
let resp = client
|
||||
.get(format!("{}/test/search/0/example", base_url))
|
||||
.send()
|
||||
.await
|
||||
.unwrap();
|
||||
let content_type = resp
|
||||
.headers()
|
||||
.get("content-type")
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.to_string();
|
||||
assert!(
|
||||
content_type.contains("text/plain"),
|
||||
"search v0 content-type should be text/plain, got: {}",
|
||||
content_type
|
||||
);
|
||||
let body = resp.text().await.unwrap();
|
||||
// Each line should have format: {index_type} {fmri} {action_type} {value}
|
||||
for line in body.lines() {
|
||||
let parts: Vec<&str> = line.splitn(4, ' ').collect();
|
||||
assert_eq!(
|
||||
parts.len(),
|
||||
4,
|
||||
"search v0 line should have 4 space-separated fields, got: '{}'",
|
||||
line
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue