diff --git a/.idea/solstice-ci.iml b/.idea/solstice-ci.iml
index eac4bb5..596d292 100644
--- a/.idea/solstice-ci.iml
+++ b/.idea/solstice-ci.iml
@@ -12,6 +12,7 @@
+
diff --git a/crates/ciadm/Cargo.toml b/crates/ciadm/Cargo.toml
index a84edf3..dea67f3 100644
--- a/crates/ciadm/Cargo.toml
+++ b/crates/ciadm/Cargo.toml
@@ -8,4 +8,10 @@ clap = { version = "4", features = ["derive", "env"] }
miette = { version = "7", features = ["fancy"] }
tracing = "0.1"
common = { path = "../common" }
+logs-client = { path = "../logs-client" }
tokio = { version = "1", features = ["rt-multi-thread", "macros"] }
+ratatui = "0.29"
+crossterm = { version = "0.28", features = ["event-stream"] }
+futures-util = "0.3"
+chrono = "0.4"
+uuid = { version = "1", features = ["v4"] }
diff --git a/crates/ciadm/src/main.rs b/crates/ciadm/src/main.rs
index dcc8195..ff13e2f 100644
--- a/crates/ciadm/src/main.rs
+++ b/crates/ciadm/src/main.rs
@@ -1,6 +1,21 @@
+use std::collections::BTreeMap;
+use std::io::{self, Stdout};
+use std::process::Command;
+use std::time::Duration;
+
use clap::{Parser, Subcommand};
-use miette::Result;
-use tracing::info;
+use crossterm::event::{Event, EventStream, KeyCode, KeyEvent, KeyModifiers};
+use crossterm::terminal::{EnterAlternateScreen, LeaveAlternateScreen};
+use crossterm::{execute, terminal};
+use futures_util::StreamExt;
+use logs_client::{JobGroup, LogsClient};
+use miette::{IntoDiagnostic, Result};
+use ratatui::layout::{Constraint, Direction, Layout, Rect};
+use ratatui::style::{Color, Modifier, Style};
+use ratatui::text::{Line, Span, Text};
+use ratatui::widgets::{Block, Borders, List, ListItem, Paragraph, Wrap};
+use ratatui::{Frame, Terminal};
+use tracing::{info, warn};
#[derive(Parser, Debug)]
#[command(name = "ciadm", version, about = "Solstice CI Admin CLI")]
@@ -29,6 +44,36 @@ enum Commands {
#[arg(long)]
job_id: String,
},
+ /// List recent jobs from the logs service
+ Jobs {
+ /// Logs service base URL (e.g., https://logs.prod.example.com)
+ #[arg(long, env = "LOGS_BASE_URL")]
+ logs_base_url: Option,
+ /// Filter to a repository URL (defaults to git remote origin)
+ #[arg(long)]
+ repo: Option,
+ },
+ /// Fetch logs for a specific job
+ Logs {
+ /// Logs service base URL (e.g., https://logs.prod.example.com)
+ #[arg(long, env = "LOGS_BASE_URL")]
+ logs_base_url: Option,
+ /// Job request ID
+ #[arg(long)]
+ job_id: String,
+ /// Log category (defaults to "default" or first category)
+ #[arg(long)]
+ category: Option,
+ },
+ /// Interactive TUI for browsing jobs and logs
+ Tui {
+ /// Logs service base URL (e.g., https://logs.prod.example.com)
+ #[arg(long, env = "LOGS_BASE_URL")]
+ logs_base_url: Option,
+ /// Preferred repository URL (defaults to git remote origin)
+ #[arg(long)]
+ repo: Option,
+ },
}
#[tokio::main(flavor = "multi_thread")]
@@ -50,6 +95,483 @@ async fn main() -> Result<()> {
// TODO: Query orchestrator for job status
println!("Job {job_id} status: PENDING (stub)");
}
+ Commands::Jobs {
+ logs_base_url,
+ repo,
+ } => {
+ let base_url = resolve_logs_base_url(logs_base_url)?;
+ let repo = resolve_repo_url(repo);
+ cmd_jobs(&base_url, repo.as_deref()).await?;
+ }
+ Commands::Logs {
+ logs_base_url,
+ job_id,
+ category,
+ } => {
+ let base_url = resolve_logs_base_url(logs_base_url)?;
+ cmd_logs(&base_url, &job_id, category.as_deref()).await?;
+ }
+ Commands::Tui { logs_base_url, repo } => {
+ let base_url = resolve_logs_base_url(logs_base_url)?;
+ let repo = resolve_repo_url(repo);
+ run_tui(&base_url, repo).await?;
+ }
}
Ok(())
}
+
+fn resolve_logs_base_url(arg: Option) -> Result {
+ arg.ok_or_else(|| miette::miette!("LOGS_BASE_URL is required (set via env or --logs-base-url)"))
+}
+
+fn resolve_repo_url(arg: Option) -> Option {
+ if arg.is_some() {
+ return arg;
+ }
+ detect_git_remote()
+}
+
+fn detect_git_remote() -> Option {
+ let output = Command::new("git")
+ .args(["config", "--get", "remote.origin.url"])
+ .output()
+ .ok()?;
+ if !output.status.success() {
+ return None;
+ }
+ let s = String::from_utf8_lossy(&output.stdout).trim().to_string();
+ if s.is_empty() {
+ None
+ } else {
+ Some(s)
+ }
+}
+
+async fn cmd_jobs(base_url: &str, repo: Option<&str>) -> Result<()> {
+ let client = LogsClient::new(base_url)?;
+ let groups = client.list_jobs().await?;
+ let groups = filter_groups_by_repo(groups, repo);
+ if groups.is_empty() {
+ println!("No jobs found.");
+ return Ok(());
+ }
+ for group in groups {
+ println!(
+ "{} {} ({} jobs)",
+ short_repo(&group.repo_url),
+ short_sha(&group.commit_sha),
+ group.total_jobs
+ );
+ for job in group.jobs {
+ println!(
+ " {} {:<10} {:<10} {}",
+ job.request_id,
+ job.state,
+ job.runs_on.clone().unwrap_or_else(|| "-".to_string()),
+ job.updated_at.to_rfc3339()
+ );
+ }
+ }
+ Ok(())
+}
+
+async fn cmd_logs(base_url: &str, job_id: &str, category: Option<&str>) -> Result<()> {
+ let client = LogsClient::new(base_url)?;
+ let request_id = uuid::Uuid::parse_str(job_id).into_diagnostic()?;
+ let text = if let Some(cat) = category {
+ client.get_logs_by_category(request_id, cat).await?
+ } else {
+ client.get_default_logs(request_id).await?.0
+ };
+ print!("{text}");
+ Ok(())
+}
+
+async fn run_tui(base_url: &str, repo_hint: Option) -> Result<()> {
+ let mut terminal = setup_terminal()?;
+ let res = run_tui_loop(base_url, repo_hint, &mut terminal).await;
+ restore_terminal(terminal)?;
+ res
+}
+
+async fn run_tui_loop(
+ base_url: &str,
+ repo_hint: Option,
+ terminal: &mut Terminal>,
+) -> Result<()> {
+ let mut app = TuiApp::new(base_url, repo_hint)?;
+ app.refresh_jobs().await?;
+ app.refresh_logs().await?;
+
+ let mut events = EventStream::new();
+ let mut tick = tokio::time::interval(Duration::from_millis(200));
+
+ loop {
+ terminal.draw(|frame| app.draw(frame))?;
+ tokio::select! {
+ _ = tick.tick() => {}
+ maybe_event = events.next() => {
+ match maybe_event {
+ Some(Ok(Event::Key(key))) => {
+ if app.handle_key(key).await? {
+ break;
+ }
+ }
+ Some(Ok(Event::Resize(_, _))) => {}
+ Some(Err(e)) => {
+ warn!(error = %e, "failed to read terminal event");
+ }
+ None => break,
+ }
+ }
+ }
+ }
+ Ok(())
+}
+
+fn setup_terminal() -> Result>> {
+ terminal::enable_raw_mode().into_diagnostic()?;
+ let mut stdout = io::stdout();
+ execute!(stdout, EnterAlternateScreen).into_diagnostic()?;
+ let backend = ratatui::backend::CrosstermBackend::new(stdout);
+ Terminal::new(backend).into_diagnostic()
+}
+
+fn restore_terminal(mut terminal: Terminal>) -> Result<()> {
+ terminal::disable_raw_mode().into_diagnostic()?;
+ execute!(terminal.backend_mut(), LeaveAlternateScreen).into_diagnostic()?;
+ terminal.show_cursor().into_diagnostic()?;
+ Ok(())
+}
+
+fn filter_groups_by_repo(groups: Vec, repo: Option<&str>) -> Vec {
+ if let Some(repo) = repo {
+ groups
+ .into_iter()
+ .filter(|g| g.repo_url == repo)
+ .collect()
+ } else {
+ groups
+ }
+}
+
+fn short_sha(sha: &str) -> String {
+ if sha.len() > 7 {
+ sha[..7].to_string()
+ } else {
+ sha.to_string()
+ }
+}
+
+fn short_repo(repo_url: &str) -> String {
+ let trimmed = repo_url.trim_end_matches(".git");
+ if let Some(rest) = trimmed.strip_prefix("https://") {
+ let parts: Vec<&str> = rest.split('/').collect();
+ if parts.len() >= 3 {
+ return format!("{}/{}", parts[1], parts[2]);
+ }
+ }
+ if let Some(rest) = trimmed.strip_prefix("http://") {
+ let parts: Vec<&str> = rest.split('/').collect();
+ if parts.len() >= 3 {
+ return format!("{}/{}", parts[1], parts[2]);
+ }
+ }
+ if let Some(rest) = trimmed.strip_prefix("ssh://") {
+ let after_host = rest.splitn(2, '/').nth(1).unwrap_or("");
+ let parts: Vec<&str> = after_host.split('/').collect();
+ if parts.len() >= 2 {
+ return format!("{}/{}", parts[0], parts[1]);
+ }
+ }
+ if let Some(idx) = trimmed.find(':') {
+ let after = &trimmed[idx + 1..];
+ let parts: Vec<&str> = after.split('/').collect();
+ if parts.len() >= 2 {
+ return format!("{}/{}", parts[0], parts[1]);
+ }
+ }
+ trimmed.to_string()
+}
+
+struct TuiApp {
+ client: LogsClient,
+ repo_hint: Option,
+ repos: Vec,
+ selected_repo: usize,
+ jobs: Vec,
+ selected_job: usize,
+ logs_text: String,
+ logs_category: Option,
+ logs_lines: usize,
+ logs_scroll: u16,
+ status: String,
+}
+
+#[derive(Clone)]
+struct JobEntry {
+ request_id: uuid::Uuid,
+ commit_sha: String,
+ state: String,
+ runs_on: Option,
+ updated_at: chrono::DateTime,
+}
+
+impl TuiApp {
+ fn new(base_url: &str, repo_hint: Option) -> Result {
+ Ok(Self {
+ client: LogsClient::new(base_url)?,
+ repo_hint,
+ repos: Vec::new(),
+ selected_repo: 0,
+ jobs: Vec::new(),
+ selected_job: 0,
+ logs_text: String::new(),
+ logs_category: None,
+ logs_lines: 0,
+ logs_scroll: 0,
+ status: String::new(),
+ })
+ }
+
+ async fn refresh_jobs(&mut self) -> Result<()> {
+ let groups = self.client.list_jobs().await?;
+ let mut repos_map: BTreeMap> = BTreeMap::new();
+ for group in groups {
+ let entries = group
+ .jobs
+ .into_iter()
+ .map(|job| JobEntry {
+ request_id: job.request_id,
+ commit_sha: group.commit_sha.clone(),
+ state: job.state,
+ runs_on: job.runs_on,
+ updated_at: job.updated_at,
+ });
+ repos_map
+ .entry(group.repo_url)
+ .or_default()
+ .extend(entries);
+ }
+ let mut repos: Vec = repos_map.keys().cloned().collect();
+ repos.sort();
+ self.repos = repos;
+ if let Some(hint) = self.repo_hint.as_deref() {
+ if let Some(idx) = self.repos.iter().position(|r| r == hint) {
+ self.selected_repo = idx;
+ }
+ self.repo_hint = None;
+ }
+ if self.selected_repo >= self.repos.len() {
+ self.selected_repo = 0;
+ }
+ self.jobs = self
+ .current_repo()
+ .and_then(|repo| repos_map.get(repo))
+ .cloned()
+ .unwrap_or_default();
+ self.jobs.sort_by(|a, b| b.updated_at.cmp(&a.updated_at));
+ if self.selected_job >= self.jobs.len() {
+ self.selected_job = 0;
+ }
+ if self.repos.is_empty() {
+ self.status = "No repositories found in logs service.".to_string();
+ }
+ Ok(())
+ }
+
+ async fn refresh_logs(&mut self) -> Result<()> {
+ let Some(job) = self.jobs.get(self.selected_job) else {
+ self.logs_text = "No job selected.".to_string();
+ self.logs_category = None;
+ self.logs_lines = self.logs_text.lines().count();
+ self.logs_scroll = 0;
+ return Ok(());
+ };
+ match self.client.get_default_logs(job.request_id).await {
+ Ok((text, category)) => {
+ self.logs_text = text;
+ self.logs_category = category;
+ self.logs_lines = self.logs_text.lines().count();
+ self.logs_scroll = 0;
+ self.status.clear();
+ }
+ Err(err) => {
+ let report = miette::Report::new(err);
+ self.logs_text = format!("{report}");
+ self.logs_category = None;
+ self.logs_lines = self.logs_text.lines().count();
+ self.logs_scroll = 0;
+ self.status = "Failed to load logs.".to_string();
+ }
+ }
+ Ok(())
+ }
+
+ fn current_repo(&self) -> Option<&String> {
+ self.repos.get(self.selected_repo)
+ }
+
+ async fn handle_key(&mut self, key: KeyEvent) -> Result {
+ match (key.code, key.modifiers) {
+ (KeyCode::Char('q'), _) => return Ok(true),
+ (KeyCode::Char('r'), _) => {
+ if let Err(err) = self.refresh_jobs().await {
+ let report = miette::Report::new(err);
+ self.status = format!("{report}");
+ } else {
+ self.status = "Refreshed jobs.".to_string();
+ }
+ self.refresh_logs().await?;
+ }
+ (KeyCode::Up, _) => {
+ if !self.jobs.is_empty() {
+ self.selected_job = self.selected_job.saturating_sub(1);
+ self.refresh_logs().await?;
+ }
+ }
+ (KeyCode::Down, _) => {
+ if self.selected_job + 1 < self.jobs.len() {
+ self.selected_job += 1;
+ self.refresh_logs().await?;
+ }
+ }
+ (KeyCode::Left, _) => {
+ if self.selected_repo > 0 {
+ self.selected_repo -= 1;
+ self.selected_job = 0;
+ self.refresh_jobs().await?;
+ self.refresh_logs().await?;
+ }
+ }
+ (KeyCode::Right, _) => {
+ if self.selected_repo + 1 < self.repos.len() {
+ self.selected_repo += 1;
+ self.selected_job = 0;
+ self.refresh_jobs().await?;
+ self.refresh_logs().await?;
+ }
+ }
+ (KeyCode::PageUp, _) => {
+ self.logs_scroll = self.logs_scroll.saturating_sub(5);
+ }
+ (KeyCode::PageDown, _) => {
+ let max_scroll = self.logs_lines.saturating_sub(1) as u16;
+ self.logs_scroll = (self.logs_scroll + 5).min(max_scroll);
+ }
+ (KeyCode::Char('c'), KeyModifiers::CONTROL) => return Ok(true),
+ _ => {}
+ }
+ Ok(false)
+ }
+
+ fn draw(&self, frame: &mut Frame) {
+ let size = frame.area();
+ let layout = Layout::default()
+ .direction(Direction::Vertical)
+ .constraints([Constraint::Length(3), Constraint::Min(5), Constraint::Length(2)])
+ .split(size);
+ self.draw_header(frame, layout[0]);
+ self.draw_body(frame, layout[1]);
+ self.draw_footer(frame, layout[2]);
+ }
+
+ fn draw_header(&self, frame: &mut Frame, area: Rect) {
+ let repo_name = self
+ .current_repo()
+ .map(|r| short_repo(r))
+ .unwrap_or_else(|| "-".to_string());
+ let job_info = self.jobs.get(self.selected_job).map(|job| {
+ format!(
+ "{} {} {}",
+ short_sha(&job.commit_sha),
+ job.state,
+ job.runs_on.clone().unwrap_or_else(|| "-".to_string())
+ )
+ });
+ let mut spans = vec![
+ Span::styled("Repo: ", Style::default().add_modifier(Modifier::BOLD)),
+ Span::raw(repo_name),
+ ];
+ if let Some(info) = job_info {
+ spans.push(Span::raw(" "));
+ spans.push(Span::styled(
+ "Job: ",
+ Style::default().add_modifier(Modifier::BOLD),
+ ));
+ spans.push(Span::raw(info));
+ }
+ let title = Paragraph::new(Line::from(spans)).block(Block::default().borders(Borders::ALL));
+ frame.render_widget(title, area);
+ }
+
+ fn draw_body(&self, frame: &mut Frame, area: Rect) {
+ let columns = Layout::default()
+ .direction(Direction::Horizontal)
+ .constraints([Constraint::Percentage(35), Constraint::Percentage(65)])
+ .split(area);
+ self.draw_jobs(frame, columns[0]);
+ self.draw_logs(frame, columns[1]);
+ }
+
+ fn draw_jobs(&self, frame: &mut Frame, area: Rect) {
+ let items: Vec = self
+ .jobs
+ .iter()
+ .map(|job| {
+ let runs_on = job.runs_on.clone().unwrap_or_else(|| "-".to_string());
+ let line = format!(
+ "{} {:<10} {:<8} {}",
+ short_sha(&job.commit_sha),
+ job.state,
+ runs_on,
+ job.updated_at.format("%H:%M:%S")
+ );
+ ListItem::new(line)
+ })
+ .collect();
+ let mut state = ratatui::widgets::ListState::default();
+ state.select(Some(self.selected_job));
+ let list = List::new(items)
+ .block(Block::default().borders(Borders::ALL).title("Jobs"))
+ .highlight_style(
+ Style::default()
+ .bg(Color::Blue)
+ .fg(Color::White)
+ .add_modifier(Modifier::BOLD),
+ )
+ .highlight_symbol(">> ");
+ frame.render_stateful_widget(list, area, &mut state);
+ }
+
+ fn draw_logs(&self, frame: &mut Frame, area: Rect) {
+ let title = match &self.logs_category {
+ Some(cat) => format!("Logs ({cat})"),
+ None => "Logs".to_string(),
+ };
+ let text = if self.logs_text.is_empty() {
+ Text::from("No logs available.")
+ } else {
+ Text::from(self.logs_text.as_str())
+ };
+ let paragraph = Paragraph::new(text)
+ .block(Block::default().borders(Borders::ALL).title(title))
+ .wrap(Wrap { trim: false })
+ .scroll((self.logs_scroll, 0));
+ frame.render_widget(paragraph, area);
+ }
+
+ fn draw_footer(&self, frame: &mut Frame, area: Rect) {
+ let help = "Up/Down: jobs Left/Right: repo PgUp/PgDn: scroll r: refresh q: quit";
+ let mut line = vec![Span::raw(help)];
+ if !self.status.is_empty() {
+ line.push(Span::raw(" "));
+ line.push(Span::styled(
+ &self.status,
+ Style::default().fg(Color::Yellow),
+ ));
+ }
+ let footer = Paragraph::new(Line::from(line)).block(Block::default().borders(Borders::ALL));
+ frame.render_widget(footer, area);
+ }
+}
diff --git a/crates/logs-client/Cargo.toml b/crates/logs-client/Cargo.toml
new file mode 100644
index 0000000..6cdd21f
--- /dev/null
+++ b/crates/logs-client/Cargo.toml
@@ -0,0 +1,14 @@
+[package]
+name = "logs-client"
+version = "0.1.0"
+edition = "2024"
+
+[dependencies]
+chrono = { version = "0.4", features = ["serde"] }
+miette = { version = "7", features = ["fancy"] }
+reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls-native-roots"] }
+serde = { version = "1", features = ["derive"] }
+serde_json = "1"
+thiserror = "2"
+url = "2"
+uuid = { version = "1", features = ["serde", "v4"] }
\ No newline at end of file
diff --git a/crates/logs-client/src/lib.rs b/crates/logs-client/src/lib.rs
new file mode 100644
index 0000000..2852578
--- /dev/null
+++ b/crates/logs-client/src/lib.rs
@@ -0,0 +1,200 @@
+use chrono::{DateTime, Utc};
+use miette::Diagnostic;
+use reqwest::StatusCode;
+use serde::{Deserialize, Serialize, de::DeserializeOwned};
+use thiserror::Error;
+use url::Url;
+use uuid::Uuid;
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct JobLinks {
+ pub logs: String,
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct JobSummary {
+ pub request_id: Uuid,
+ pub runs_on: Option,
+ pub state: String,
+ pub updated_at: DateTime,
+ pub links: JobLinks,
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct JobGroup {
+ pub repo_url: String,
+ pub commit_sha: String,
+ pub last_updated: DateTime,
+ pub total_jobs: usize,
+ pub jobs: Vec,
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct LogCategorySummary {
+ pub category: String,
+ pub count: i64,
+ pub has_errors: bool,
+ pub first_ts: DateTime,
+ pub last_ts: DateTime,
+}
+
+#[derive(Debug, Error, Diagnostic)]
+pub enum LogsClientError {
+ #[error("invalid logs base URL")]
+ #[diagnostic(
+ code(logs_client.invalid_base_url),
+ help("Set LOGS_BASE_URL to the logs-service base URL, e.g. https://logs.prod.example.com")
+ )]
+ InvalidBaseUrl(#[source] url::ParseError),
+ #[error("request failed for {url}")]
+ #[diagnostic(
+ code(logs_client.request_failed),
+ help("Check network connectivity and TLS configuration for the logs service.")
+ )]
+ Request {
+ url: String,
+ #[source]
+ source: reqwest::Error,
+ },
+ #[error("logs service returned {status} for {url}")]
+ #[diagnostic(
+ code(logs_client.http_status),
+ help("Inspect the response body for details and verify the logs-service is reachable.")
+ )]
+ HttpStatus {
+ status: StatusCode,
+ url: String,
+ body: String,
+ },
+ #[error("failed to parse JSON from {url}")]
+ #[diagnostic(
+ code(logs_client.json_decode),
+ help("Confirm the logs-service response matches the expected schema.")
+ )]
+ Json {
+ url: String,
+ #[source]
+ source: serde_json::Error,
+ },
+}
+
+pub type Result = std::result::Result;
+
+#[derive(Clone)]
+pub struct LogsClient {
+ base_url: Url,
+ client: reqwest::Client,
+}
+
+impl LogsClient {
+ pub fn new(base_url: &str) -> Result {
+ let base_url = Url::parse(base_url).map_err(LogsClientError::InvalidBaseUrl)?;
+ Ok(Self {
+ base_url,
+ client: reqwest::Client::new(),
+ })
+ }
+
+ pub async fn list_jobs(&self) -> Result> {
+ let url = self.base_url.join("jobs").map_err(LogsClientError::InvalidBaseUrl)?;
+ self.get_json(url).await
+ }
+
+ pub async fn list_log_categories(&self, request_id: Uuid) -> Result> {
+ let url = self.job_logs_url(request_id, None)?;
+ self.get_json(url).await
+ }
+
+ pub async fn get_logs_by_category(
+ &self,
+ request_id: Uuid,
+ category: &str,
+ ) -> Result {
+ let url = self.job_logs_url(request_id, Some(category))?;
+ self.get_text(url).await
+ }
+
+ pub async fn get_default_logs(&self, request_id: Uuid) -> Result<(String, Option)> {
+ let categories = self.list_log_categories(request_id).await?;
+ let category = categories
+ .iter()
+ .find(|c| c.category == "default")
+ .map(|c| c.category.clone())
+ .or_else(|| categories.first().map(|c| c.category.clone()));
+ let Some(category) = category else {
+ return Ok(("".to_string(), None));
+ };
+ let text = self.get_logs_by_category(request_id, &category).await?;
+ Ok((text, Some(category)))
+ }
+
+ async fn get_json(&self, url: Url) -> Result {
+ let resp = self
+ .client
+ .get(url.clone())
+ .send()
+ .await
+ .map_err(|e| LogsClientError::Request {
+ url: url.to_string(),
+ source: e,
+ })?;
+ let status = resp.status();
+ let bytes = resp.bytes().await.map_err(|e| LogsClientError::Request {
+ url: url.to_string(),
+ source: e,
+ })?;
+ if !status.is_success() {
+ let body = String::from_utf8_lossy(&bytes).to_string();
+ return Err(LogsClientError::HttpStatus {
+ status,
+ url: url.to_string(),
+ body,
+ });
+ }
+ serde_json::from_slice(&bytes).map_err(|e| LogsClientError::Json {
+ url: url.to_string(),
+ source: e,
+ })
+ }
+
+ async fn get_text(&self, url: Url) -> Result {
+ let resp = self
+ .client
+ .get(url.clone())
+ .send()
+ .await
+ .map_err(|e| LogsClientError::Request {
+ url: url.to_string(),
+ source: e,
+ })?;
+ let status = resp.status();
+ let bytes = resp.bytes().await.map_err(|e| LogsClientError::Request {
+ url: url.to_string(),
+ source: e,
+ })?;
+ if !status.is_success() {
+ let body = String::from_utf8_lossy(&bytes).to_string();
+ return Err(LogsClientError::HttpStatus {
+ status,
+ url: url.to_string(),
+ body,
+ });
+ }
+ Ok(String::from_utf8_lossy(&bytes).to_string())
+ }
+
+ fn job_logs_url(&self, request_id: Uuid, category: Option<&str>) -> Result {
+ let mut url = self.base_url.clone();
+ {
+ let mut segments = url
+ .path_segments_mut()
+ .map_err(|_| LogsClientError::InvalidBaseUrl(url::ParseError::RelativeUrlWithoutBase))?;
+ segments.clear();
+ segments.extend(&["jobs", &request_id.to_string(), "logs"]);
+ if let Some(cat) = category {
+ segments.push(cat);
+ }
+ }
+ Ok(url)
+ }
+}