mirror of
https://codeberg.org/Toasterson/ips.git
synced 2026-04-10 13:20:42 +00:00
Refactor: Remove LZ4 compression utilities and clean up obsolete catalog functions
- Eliminated LZ4-based compression and related decoding utilities for manifest handling. - Removed unused private helper and legacy methods in `catalog.rs`. - Standardized database path handling, replacing `catalog_db_path` and `obsoleted_db_path` with `active_db_path` and `obsolete_db_path`. - Added `#[allow(dead_code)]` annotations for unused methods in `file_backend.rs` to reduce warnings.
This commit is contained in:
parent
7b9391f36e
commit
0de84b80c8
4 changed files with 12 additions and 73 deletions
|
|
@ -1,10 +1,8 @@
|
|||
use crate::actions::Manifest;
|
||||
use crate::fmri::Fmri;
|
||||
use lz4::{Decoder as Lz4Decoder, EncoderBuilder as Lz4EncoderBuilder};
|
||||
use miette::Diagnostic;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::io::{Cursor, Read, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
use thiserror::Error;
|
||||
|
||||
|
|
@ -43,52 +41,6 @@ pub enum CatalogError {
|
|||
/// Result type for catalog operations
|
||||
pub type Result<T> = std::result::Result<T, CatalogError>;
|
||||
|
||||
// Internal helpers for (de)compressing manifest JSON payloads stored in redb
|
||||
fn is_likely_json(bytes: &[u8]) -> bool {
|
||||
let mut i = 0;
|
||||
while i < bytes.len() && matches!(bytes[i], b' ' | b'\n' | b'\r' | b'\t') {
|
||||
i += 1;
|
||||
}
|
||||
if i >= bytes.len() {
|
||||
return false;
|
||||
}
|
||||
matches!(bytes[i], b'{' | b'[')
|
||||
}
|
||||
|
||||
pub(crate) fn compress_json_lz4(bytes: &[u8]) -> Result<Vec<u8>> {
|
||||
let mut dst = Vec::with_capacity(bytes.len() / 2 + 32);
|
||||
let mut enc = Lz4EncoderBuilder::new()
|
||||
.level(4)
|
||||
.build(Cursor::new(&mut dst))
|
||||
.map_err(|e| CatalogError::Database(format!("Failed to create LZ4 encoder: {}", e)))?;
|
||||
enc.write_all(bytes)
|
||||
.map_err(|e| CatalogError::Database(format!("Failed to write to LZ4 encoder: {}", e)))?;
|
||||
let (_out, res) = enc.finish();
|
||||
res.map_err(|e| CatalogError::Database(format!("Failed to finish LZ4 encoding: {}", e)))?;
|
||||
Ok(dst)
|
||||
}
|
||||
|
||||
pub(crate) fn decode_manifest_bytes(bytes: &[u8]) -> Result<Manifest> {
|
||||
// Fast path: uncompressed legacy JSON
|
||||
if is_likely_json(bytes) {
|
||||
return Ok(serde_json::from_slice::<Manifest>(bytes)?);
|
||||
}
|
||||
// Try LZ4 frame decode
|
||||
let mut decoder = match Lz4Decoder::new(Cursor::new(bytes)) {
|
||||
Ok(d) => d,
|
||||
Err(_) => {
|
||||
// Fallback: attempt JSON anyway
|
||||
return Ok(serde_json::from_slice::<Manifest>(bytes)?);
|
||||
}
|
||||
};
|
||||
let mut out = Vec::new();
|
||||
if let Err(_e) = decoder.read_to_end(&mut out) {
|
||||
// On decode failure, try JSON as last resort
|
||||
return Ok(serde_json::from_slice::<Manifest>(bytes)?);
|
||||
}
|
||||
Ok(serde_json::from_slice::<Manifest>(&out)?)
|
||||
}
|
||||
|
||||
/// Check if a package manifest is marked as obsolete.
|
||||
pub(crate) fn is_package_obsolete(manifest: &Manifest) -> bool {
|
||||
manifest.attributes.iter().any(|attr| {
|
||||
|
|
@ -203,11 +155,8 @@ impl ImageCatalog {
|
|||
// Removed: create_or_update_manifest - no longer needed, use build_shards() instead
|
||||
//
|
||||
// Removed: ensure_fmri_attribute - no longer needed, use build_shards() instead
|
||||
|
||||
/// Check if a package is obsolete (deprecated - use free function is_package_obsolete instead)
|
||||
fn is_package_obsolete(&self, manifest: &Manifest) -> bool {
|
||||
is_package_obsolete(manifest)
|
||||
}
|
||||
//
|
||||
// Removed: is_package_obsolete - use free function is_package_obsolete instead
|
||||
|
||||
/// Query the catalog for packages matching a pattern
|
||||
///
|
||||
|
|
@ -397,20 +346,6 @@ impl ImageCatalog {
|
|||
))
|
||||
}
|
||||
|
||||
/// Dump the contents of the catalog table (private helper)
|
||||
fn dump_catalog_table(&self, _tx: &()) -> Result<()> {
|
||||
Err(CatalogError::Database(
|
||||
"dump_catalog_table is not yet implemented for SQLite catalog".to_string(),
|
||||
))
|
||||
}
|
||||
|
||||
/// Dump the contents of the obsoleted table (private helper)
|
||||
fn dump_obsoleted_table(&self, _tx: &()) -> Result<()> {
|
||||
Err(CatalogError::Database(
|
||||
"dump_obsoleted_table is not yet implemented for SQLite catalog".to_string(),
|
||||
))
|
||||
}
|
||||
|
||||
/// Get database statistics
|
||||
///
|
||||
/// Deprecated: This method used redb. Needs reimplementation for SQLite.
|
||||
|
|
|
|||
|
|
@ -662,8 +662,8 @@ impl Image {
|
|||
// Create the catalog and build it
|
||||
let catalog = ImageCatalog::new(
|
||||
self.catalog_dir(),
|
||||
self.catalog_db_path(),
|
||||
self.obsoleted_db_path(),
|
||||
self.active_db_path(),
|
||||
self.obsolete_db_path(),
|
||||
);
|
||||
catalog
|
||||
.build_catalog(&publisher_names)
|
||||
|
|
@ -674,8 +674,8 @@ impl Image {
|
|||
pub fn query_catalog(&self, pattern: Option<&str>) -> Result<Vec<PackageInfo>> {
|
||||
let catalog = ImageCatalog::new(
|
||||
self.catalog_dir(),
|
||||
self.catalog_db_path(),
|
||||
self.obsoleted_db_path(),
|
||||
self.active_db_path(),
|
||||
self.obsolete_db_path(),
|
||||
);
|
||||
catalog
|
||||
.query_packages(pattern)
|
||||
|
|
|
|||
|
|
@ -149,6 +149,7 @@ struct SearchIndex {
|
|||
|
||||
impl SearchIndex {
|
||||
/// Create a new empty search index
|
||||
#[allow(dead_code)]
|
||||
fn new() -> Self {
|
||||
SearchIndex {
|
||||
terms: HashMap::new(),
|
||||
|
|
@ -161,6 +162,7 @@ impl SearchIndex {
|
|||
}
|
||||
|
||||
/// Add a term to the index for a package
|
||||
#[allow(dead_code)]
|
||||
fn add_term(
|
||||
&mut self,
|
||||
term: &str,
|
||||
|
|
@ -323,6 +325,7 @@ impl SearchIndex {
|
|||
}
|
||||
|
||||
/// Save the index to a file
|
||||
#[allow(dead_code)]
|
||||
fn save(&self, path: &Path) -> Result<()> {
|
||||
// Create the parent directory if it doesn't exist
|
||||
if let Some(parent) = path.parent() {
|
||||
|
|
@ -2903,6 +2906,7 @@ impl FileBackend {
|
|||
}
|
||||
|
||||
/// Build a search index for a publisher
|
||||
#[allow(dead_code)]
|
||||
fn build_search_index(&self, publisher: &str) -> Result<()> {
|
||||
info!("Building search index for publisher: {}", publisher);
|
||||
|
||||
|
|
|
|||
|
|
@ -1383,8 +1383,8 @@ fn main() -> Result<()> {
|
|||
// Create a catalog object for the catalog.redb database
|
||||
let catalog = libips::image::catalog::ImageCatalog::new(
|
||||
image.catalog_dir(),
|
||||
image.catalog_db_path(),
|
||||
image.obsoleted_db_path(),
|
||||
image.active_db_path(),
|
||||
image.obsolete_db_path(),
|
||||
);
|
||||
|
||||
// Create an installed packages object for the installed.redb database
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue