mirror of
https://codeberg.org/Toasterson/solstice-ci.git
synced 2026-04-10 13:20:41 +00:00
Add TUI support and logs-client crate for enhanced job and log management
- Introduce a Terminal User Interface (TUI) to enable interactive browsing of jobs and logs. - Add a new `logs-client` crate to handle communication with the logs service, including job listing and log retrieval. - Extend `ciadm` to include new commands: `jobs`, `logs`, and `tui`, for interacting with the logs service. - Enhance the CLI to support repository filtering, job status retrieval, and detailed log viewing. - Refactor dependencies and organize logs-related functionality for modularity and reusability. Signed-off-by: Till Wegmueller <toasterson@gmail.com>
This commit is contained in:
parent
4c5a8567a4
commit
9306de0acf
5 changed files with 745 additions and 2 deletions
1
.idea/solstice-ci.iml
generated
1
.idea/solstice-ci.iml
generated
|
|
@ -12,6 +12,7 @@
|
||||||
<sourceFolder url="file://$MODULE_DIR$/crates/workflow-runner/src" isTestSource="false" />
|
<sourceFolder url="file://$MODULE_DIR$/crates/workflow-runner/src" isTestSource="false" />
|
||||||
<sourceFolder url="file://$MODULE_DIR$/crates/migration/src" isTestSource="false" />
|
<sourceFolder url="file://$MODULE_DIR$/crates/migration/src" isTestSource="false" />
|
||||||
<sourceFolder url="file://$MODULE_DIR$/crates/logs-service/src" isTestSource="false" />
|
<sourceFolder url="file://$MODULE_DIR$/crates/logs-service/src" isTestSource="false" />
|
||||||
|
<sourceFolder url="file://$MODULE_DIR$/crates/logs-client/src" isTestSource="false" />
|
||||||
<sourceFolder url="file://$MODULE_DIR$/crates/webhook/src" isTestSource="false" />
|
<sourceFolder url="file://$MODULE_DIR$/crates/webhook/src" isTestSource="false" />
|
||||||
<excludeFolder url="file://$MODULE_DIR$/target" />
|
<excludeFolder url="file://$MODULE_DIR$/target" />
|
||||||
</content>
|
</content>
|
||||||
|
|
|
||||||
|
|
@ -8,4 +8,10 @@ clap = { version = "4", features = ["derive", "env"] }
|
||||||
miette = { version = "7", features = ["fancy"] }
|
miette = { version = "7", features = ["fancy"] }
|
||||||
tracing = "0.1"
|
tracing = "0.1"
|
||||||
common = { path = "../common" }
|
common = { path = "../common" }
|
||||||
|
logs-client = { path = "../logs-client" }
|
||||||
tokio = { version = "1", features = ["rt-multi-thread", "macros"] }
|
tokio = { version = "1", features = ["rt-multi-thread", "macros"] }
|
||||||
|
ratatui = "0.29"
|
||||||
|
crossterm = { version = "0.28", features = ["event-stream"] }
|
||||||
|
futures-util = "0.3"
|
||||||
|
chrono = "0.4"
|
||||||
|
uuid = { version = "1", features = ["v4"] }
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,21 @@
|
||||||
|
use std::collections::BTreeMap;
|
||||||
|
use std::io::{self, Stdout};
|
||||||
|
use std::process::Command;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
use clap::{Parser, Subcommand};
|
use clap::{Parser, Subcommand};
|
||||||
use miette::Result;
|
use crossterm::event::{Event, EventStream, KeyCode, KeyEvent, KeyModifiers};
|
||||||
use tracing::info;
|
use crossterm::terminal::{EnterAlternateScreen, LeaveAlternateScreen};
|
||||||
|
use crossterm::{execute, terminal};
|
||||||
|
use futures_util::StreamExt;
|
||||||
|
use logs_client::{JobGroup, LogsClient};
|
||||||
|
use miette::{IntoDiagnostic, Result};
|
||||||
|
use ratatui::layout::{Constraint, Direction, Layout, Rect};
|
||||||
|
use ratatui::style::{Color, Modifier, Style};
|
||||||
|
use ratatui::text::{Line, Span, Text};
|
||||||
|
use ratatui::widgets::{Block, Borders, List, ListItem, Paragraph, Wrap};
|
||||||
|
use ratatui::{Frame, Terminal};
|
||||||
|
use tracing::{info, warn};
|
||||||
|
|
||||||
#[derive(Parser, Debug)]
|
#[derive(Parser, Debug)]
|
||||||
#[command(name = "ciadm", version, about = "Solstice CI Admin CLI")]
|
#[command(name = "ciadm", version, about = "Solstice CI Admin CLI")]
|
||||||
|
|
@ -29,6 +44,36 @@ enum Commands {
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
job_id: String,
|
job_id: String,
|
||||||
},
|
},
|
||||||
|
/// List recent jobs from the logs service
|
||||||
|
Jobs {
|
||||||
|
/// Logs service base URL (e.g., https://logs.prod.example.com)
|
||||||
|
#[arg(long, env = "LOGS_BASE_URL")]
|
||||||
|
logs_base_url: Option<String>,
|
||||||
|
/// Filter to a repository URL (defaults to git remote origin)
|
||||||
|
#[arg(long)]
|
||||||
|
repo: Option<String>,
|
||||||
|
},
|
||||||
|
/// Fetch logs for a specific job
|
||||||
|
Logs {
|
||||||
|
/// Logs service base URL (e.g., https://logs.prod.example.com)
|
||||||
|
#[arg(long, env = "LOGS_BASE_URL")]
|
||||||
|
logs_base_url: Option<String>,
|
||||||
|
/// Job request ID
|
||||||
|
#[arg(long)]
|
||||||
|
job_id: String,
|
||||||
|
/// Log category (defaults to "default" or first category)
|
||||||
|
#[arg(long)]
|
||||||
|
category: Option<String>,
|
||||||
|
},
|
||||||
|
/// Interactive TUI for browsing jobs and logs
|
||||||
|
Tui {
|
||||||
|
/// Logs service base URL (e.g., https://logs.prod.example.com)
|
||||||
|
#[arg(long, env = "LOGS_BASE_URL")]
|
||||||
|
logs_base_url: Option<String>,
|
||||||
|
/// Preferred repository URL (defaults to git remote origin)
|
||||||
|
#[arg(long)]
|
||||||
|
repo: Option<String>,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::main(flavor = "multi_thread")]
|
#[tokio::main(flavor = "multi_thread")]
|
||||||
|
|
@ -50,6 +95,483 @@ async fn main() -> Result<()> {
|
||||||
// TODO: Query orchestrator for job status
|
// TODO: Query orchestrator for job status
|
||||||
println!("Job {job_id} status: PENDING (stub)");
|
println!("Job {job_id} status: PENDING (stub)");
|
||||||
}
|
}
|
||||||
|
Commands::Jobs {
|
||||||
|
logs_base_url,
|
||||||
|
repo,
|
||||||
|
} => {
|
||||||
|
let base_url = resolve_logs_base_url(logs_base_url)?;
|
||||||
|
let repo = resolve_repo_url(repo);
|
||||||
|
cmd_jobs(&base_url, repo.as_deref()).await?;
|
||||||
|
}
|
||||||
|
Commands::Logs {
|
||||||
|
logs_base_url,
|
||||||
|
job_id,
|
||||||
|
category,
|
||||||
|
} => {
|
||||||
|
let base_url = resolve_logs_base_url(logs_base_url)?;
|
||||||
|
cmd_logs(&base_url, &job_id, category.as_deref()).await?;
|
||||||
|
}
|
||||||
|
Commands::Tui { logs_base_url, repo } => {
|
||||||
|
let base_url = resolve_logs_base_url(logs_base_url)?;
|
||||||
|
let repo = resolve_repo_url(repo);
|
||||||
|
run_tui(&base_url, repo).await?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn resolve_logs_base_url(arg: Option<String>) -> Result<String> {
|
||||||
|
arg.ok_or_else(|| miette::miette!("LOGS_BASE_URL is required (set via env or --logs-base-url)"))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn resolve_repo_url(arg: Option<String>) -> Option<String> {
|
||||||
|
if arg.is_some() {
|
||||||
|
return arg;
|
||||||
|
}
|
||||||
|
detect_git_remote()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn detect_git_remote() -> Option<String> {
|
||||||
|
let output = Command::new("git")
|
||||||
|
.args(["config", "--get", "remote.origin.url"])
|
||||||
|
.output()
|
||||||
|
.ok()?;
|
||||||
|
if !output.status.success() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
let s = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||||||
|
if s.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn cmd_jobs(base_url: &str, repo: Option<&str>) -> Result<()> {
|
||||||
|
let client = LogsClient::new(base_url)?;
|
||||||
|
let groups = client.list_jobs().await?;
|
||||||
|
let groups = filter_groups_by_repo(groups, repo);
|
||||||
|
if groups.is_empty() {
|
||||||
|
println!("No jobs found.");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
for group in groups {
|
||||||
|
println!(
|
||||||
|
"{} {} ({} jobs)",
|
||||||
|
short_repo(&group.repo_url),
|
||||||
|
short_sha(&group.commit_sha),
|
||||||
|
group.total_jobs
|
||||||
|
);
|
||||||
|
for job in group.jobs {
|
||||||
|
println!(
|
||||||
|
" {} {:<10} {:<10} {}",
|
||||||
|
job.request_id,
|
||||||
|
job.state,
|
||||||
|
job.runs_on.clone().unwrap_or_else(|| "-".to_string()),
|
||||||
|
job.updated_at.to_rfc3339()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn cmd_logs(base_url: &str, job_id: &str, category: Option<&str>) -> Result<()> {
|
||||||
|
let client = LogsClient::new(base_url)?;
|
||||||
|
let request_id = uuid::Uuid::parse_str(job_id).into_diagnostic()?;
|
||||||
|
let text = if let Some(cat) = category {
|
||||||
|
client.get_logs_by_category(request_id, cat).await?
|
||||||
|
} else {
|
||||||
|
client.get_default_logs(request_id).await?.0
|
||||||
|
};
|
||||||
|
print!("{text}");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn run_tui(base_url: &str, repo_hint: Option<String>) -> Result<()> {
|
||||||
|
let mut terminal = setup_terminal()?;
|
||||||
|
let res = run_tui_loop(base_url, repo_hint, &mut terminal).await;
|
||||||
|
restore_terminal(terminal)?;
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn run_tui_loop(
|
||||||
|
base_url: &str,
|
||||||
|
repo_hint: Option<String>,
|
||||||
|
terminal: &mut Terminal<ratatui::backend::CrosstermBackend<Stdout>>,
|
||||||
|
) -> Result<()> {
|
||||||
|
let mut app = TuiApp::new(base_url, repo_hint)?;
|
||||||
|
app.refresh_jobs().await?;
|
||||||
|
app.refresh_logs().await?;
|
||||||
|
|
||||||
|
let mut events = EventStream::new();
|
||||||
|
let mut tick = tokio::time::interval(Duration::from_millis(200));
|
||||||
|
|
||||||
|
loop {
|
||||||
|
terminal.draw(|frame| app.draw(frame))?;
|
||||||
|
tokio::select! {
|
||||||
|
_ = tick.tick() => {}
|
||||||
|
maybe_event = events.next() => {
|
||||||
|
match maybe_event {
|
||||||
|
Some(Ok(Event::Key(key))) => {
|
||||||
|
if app.handle_key(key).await? {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some(Ok(Event::Resize(_, _))) => {}
|
||||||
|
Some(Err(e)) => {
|
||||||
|
warn!(error = %e, "failed to read terminal event");
|
||||||
|
}
|
||||||
|
None => break,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn setup_terminal() -> Result<Terminal<ratatui::backend::CrosstermBackend<Stdout>>> {
|
||||||
|
terminal::enable_raw_mode().into_diagnostic()?;
|
||||||
|
let mut stdout = io::stdout();
|
||||||
|
execute!(stdout, EnterAlternateScreen).into_diagnostic()?;
|
||||||
|
let backend = ratatui::backend::CrosstermBackend::new(stdout);
|
||||||
|
Terminal::new(backend).into_diagnostic()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn restore_terminal(mut terminal: Terminal<ratatui::backend::CrosstermBackend<Stdout>>) -> Result<()> {
|
||||||
|
terminal::disable_raw_mode().into_diagnostic()?;
|
||||||
|
execute!(terminal.backend_mut(), LeaveAlternateScreen).into_diagnostic()?;
|
||||||
|
terminal.show_cursor().into_diagnostic()?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn filter_groups_by_repo(groups: Vec<JobGroup>, repo: Option<&str>) -> Vec<JobGroup> {
|
||||||
|
if let Some(repo) = repo {
|
||||||
|
groups
|
||||||
|
.into_iter()
|
||||||
|
.filter(|g| g.repo_url == repo)
|
||||||
|
.collect()
|
||||||
|
} else {
|
||||||
|
groups
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn short_sha(sha: &str) -> String {
|
||||||
|
if sha.len() > 7 {
|
||||||
|
sha[..7].to_string()
|
||||||
|
} else {
|
||||||
|
sha.to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn short_repo(repo_url: &str) -> String {
|
||||||
|
let trimmed = repo_url.trim_end_matches(".git");
|
||||||
|
if let Some(rest) = trimmed.strip_prefix("https://") {
|
||||||
|
let parts: Vec<&str> = rest.split('/').collect();
|
||||||
|
if parts.len() >= 3 {
|
||||||
|
return format!("{}/{}", parts[1], parts[2]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(rest) = trimmed.strip_prefix("http://") {
|
||||||
|
let parts: Vec<&str> = rest.split('/').collect();
|
||||||
|
if parts.len() >= 3 {
|
||||||
|
return format!("{}/{}", parts[1], parts[2]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(rest) = trimmed.strip_prefix("ssh://") {
|
||||||
|
let after_host = rest.splitn(2, '/').nth(1).unwrap_or("");
|
||||||
|
let parts: Vec<&str> = after_host.split('/').collect();
|
||||||
|
if parts.len() >= 2 {
|
||||||
|
return format!("{}/{}", parts[0], parts[1]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(idx) = trimmed.find(':') {
|
||||||
|
let after = &trimmed[idx + 1..];
|
||||||
|
let parts: Vec<&str> = after.split('/').collect();
|
||||||
|
if parts.len() >= 2 {
|
||||||
|
return format!("{}/{}", parts[0], parts[1]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
trimmed.to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
struct TuiApp {
|
||||||
|
client: LogsClient,
|
||||||
|
repo_hint: Option<String>,
|
||||||
|
repos: Vec<String>,
|
||||||
|
selected_repo: usize,
|
||||||
|
jobs: Vec<JobEntry>,
|
||||||
|
selected_job: usize,
|
||||||
|
logs_text: String,
|
||||||
|
logs_category: Option<String>,
|
||||||
|
logs_lines: usize,
|
||||||
|
logs_scroll: u16,
|
||||||
|
status: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
struct JobEntry {
|
||||||
|
request_id: uuid::Uuid,
|
||||||
|
commit_sha: String,
|
||||||
|
state: String,
|
||||||
|
runs_on: Option<String>,
|
||||||
|
updated_at: chrono::DateTime<chrono::Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TuiApp {
|
||||||
|
fn new(base_url: &str, repo_hint: Option<String>) -> Result<Self> {
|
||||||
|
Ok(Self {
|
||||||
|
client: LogsClient::new(base_url)?,
|
||||||
|
repo_hint,
|
||||||
|
repos: Vec::new(),
|
||||||
|
selected_repo: 0,
|
||||||
|
jobs: Vec::new(),
|
||||||
|
selected_job: 0,
|
||||||
|
logs_text: String::new(),
|
||||||
|
logs_category: None,
|
||||||
|
logs_lines: 0,
|
||||||
|
logs_scroll: 0,
|
||||||
|
status: String::new(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn refresh_jobs(&mut self) -> Result<()> {
|
||||||
|
let groups = self.client.list_jobs().await?;
|
||||||
|
let mut repos_map: BTreeMap<String, Vec<JobEntry>> = BTreeMap::new();
|
||||||
|
for group in groups {
|
||||||
|
let entries = group
|
||||||
|
.jobs
|
||||||
|
.into_iter()
|
||||||
|
.map(|job| JobEntry {
|
||||||
|
request_id: job.request_id,
|
||||||
|
commit_sha: group.commit_sha.clone(),
|
||||||
|
state: job.state,
|
||||||
|
runs_on: job.runs_on,
|
||||||
|
updated_at: job.updated_at,
|
||||||
|
});
|
||||||
|
repos_map
|
||||||
|
.entry(group.repo_url)
|
||||||
|
.or_default()
|
||||||
|
.extend(entries);
|
||||||
|
}
|
||||||
|
let mut repos: Vec<String> = repos_map.keys().cloned().collect();
|
||||||
|
repos.sort();
|
||||||
|
self.repos = repos;
|
||||||
|
if let Some(hint) = self.repo_hint.as_deref() {
|
||||||
|
if let Some(idx) = self.repos.iter().position(|r| r == hint) {
|
||||||
|
self.selected_repo = idx;
|
||||||
|
}
|
||||||
|
self.repo_hint = None;
|
||||||
|
}
|
||||||
|
if self.selected_repo >= self.repos.len() {
|
||||||
|
self.selected_repo = 0;
|
||||||
|
}
|
||||||
|
self.jobs = self
|
||||||
|
.current_repo()
|
||||||
|
.and_then(|repo| repos_map.get(repo))
|
||||||
|
.cloned()
|
||||||
|
.unwrap_or_default();
|
||||||
|
self.jobs.sort_by(|a, b| b.updated_at.cmp(&a.updated_at));
|
||||||
|
if self.selected_job >= self.jobs.len() {
|
||||||
|
self.selected_job = 0;
|
||||||
|
}
|
||||||
|
if self.repos.is_empty() {
|
||||||
|
self.status = "No repositories found in logs service.".to_string();
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn refresh_logs(&mut self) -> Result<()> {
|
||||||
|
let Some(job) = self.jobs.get(self.selected_job) else {
|
||||||
|
self.logs_text = "No job selected.".to_string();
|
||||||
|
self.logs_category = None;
|
||||||
|
self.logs_lines = self.logs_text.lines().count();
|
||||||
|
self.logs_scroll = 0;
|
||||||
|
return Ok(());
|
||||||
|
};
|
||||||
|
match self.client.get_default_logs(job.request_id).await {
|
||||||
|
Ok((text, category)) => {
|
||||||
|
self.logs_text = text;
|
||||||
|
self.logs_category = category;
|
||||||
|
self.logs_lines = self.logs_text.lines().count();
|
||||||
|
self.logs_scroll = 0;
|
||||||
|
self.status.clear();
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
let report = miette::Report::new(err);
|
||||||
|
self.logs_text = format!("{report}");
|
||||||
|
self.logs_category = None;
|
||||||
|
self.logs_lines = self.logs_text.lines().count();
|
||||||
|
self.logs_scroll = 0;
|
||||||
|
self.status = "Failed to load logs.".to_string();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn current_repo(&self) -> Option<&String> {
|
||||||
|
self.repos.get(self.selected_repo)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn handle_key(&mut self, key: KeyEvent) -> Result<bool> {
|
||||||
|
match (key.code, key.modifiers) {
|
||||||
|
(KeyCode::Char('q'), _) => return Ok(true),
|
||||||
|
(KeyCode::Char('r'), _) => {
|
||||||
|
if let Err(err) = self.refresh_jobs().await {
|
||||||
|
let report = miette::Report::new(err);
|
||||||
|
self.status = format!("{report}");
|
||||||
|
} else {
|
||||||
|
self.status = "Refreshed jobs.".to_string();
|
||||||
|
}
|
||||||
|
self.refresh_logs().await?;
|
||||||
|
}
|
||||||
|
(KeyCode::Up, _) => {
|
||||||
|
if !self.jobs.is_empty() {
|
||||||
|
self.selected_job = self.selected_job.saturating_sub(1);
|
||||||
|
self.refresh_logs().await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(KeyCode::Down, _) => {
|
||||||
|
if self.selected_job + 1 < self.jobs.len() {
|
||||||
|
self.selected_job += 1;
|
||||||
|
self.refresh_logs().await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(KeyCode::Left, _) => {
|
||||||
|
if self.selected_repo > 0 {
|
||||||
|
self.selected_repo -= 1;
|
||||||
|
self.selected_job = 0;
|
||||||
|
self.refresh_jobs().await?;
|
||||||
|
self.refresh_logs().await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(KeyCode::Right, _) => {
|
||||||
|
if self.selected_repo + 1 < self.repos.len() {
|
||||||
|
self.selected_repo += 1;
|
||||||
|
self.selected_job = 0;
|
||||||
|
self.refresh_jobs().await?;
|
||||||
|
self.refresh_logs().await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(KeyCode::PageUp, _) => {
|
||||||
|
self.logs_scroll = self.logs_scroll.saturating_sub(5);
|
||||||
|
}
|
||||||
|
(KeyCode::PageDown, _) => {
|
||||||
|
let max_scroll = self.logs_lines.saturating_sub(1) as u16;
|
||||||
|
self.logs_scroll = (self.logs_scroll + 5).min(max_scroll);
|
||||||
|
}
|
||||||
|
(KeyCode::Char('c'), KeyModifiers::CONTROL) => return Ok(true),
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
Ok(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn draw(&self, frame: &mut Frame) {
|
||||||
|
let size = frame.area();
|
||||||
|
let layout = Layout::default()
|
||||||
|
.direction(Direction::Vertical)
|
||||||
|
.constraints([Constraint::Length(3), Constraint::Min(5), Constraint::Length(2)])
|
||||||
|
.split(size);
|
||||||
|
self.draw_header(frame, layout[0]);
|
||||||
|
self.draw_body(frame, layout[1]);
|
||||||
|
self.draw_footer(frame, layout[2]);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn draw_header(&self, frame: &mut Frame, area: Rect) {
|
||||||
|
let repo_name = self
|
||||||
|
.current_repo()
|
||||||
|
.map(|r| short_repo(r))
|
||||||
|
.unwrap_or_else(|| "-".to_string());
|
||||||
|
let job_info = self.jobs.get(self.selected_job).map(|job| {
|
||||||
|
format!(
|
||||||
|
"{} {} {}",
|
||||||
|
short_sha(&job.commit_sha),
|
||||||
|
job.state,
|
||||||
|
job.runs_on.clone().unwrap_or_else(|| "-".to_string())
|
||||||
|
)
|
||||||
|
});
|
||||||
|
let mut spans = vec![
|
||||||
|
Span::styled("Repo: ", Style::default().add_modifier(Modifier::BOLD)),
|
||||||
|
Span::raw(repo_name),
|
||||||
|
];
|
||||||
|
if let Some(info) = job_info {
|
||||||
|
spans.push(Span::raw(" "));
|
||||||
|
spans.push(Span::styled(
|
||||||
|
"Job: ",
|
||||||
|
Style::default().add_modifier(Modifier::BOLD),
|
||||||
|
));
|
||||||
|
spans.push(Span::raw(info));
|
||||||
|
}
|
||||||
|
let title = Paragraph::new(Line::from(spans)).block(Block::default().borders(Borders::ALL));
|
||||||
|
frame.render_widget(title, area);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn draw_body(&self, frame: &mut Frame, area: Rect) {
|
||||||
|
let columns = Layout::default()
|
||||||
|
.direction(Direction::Horizontal)
|
||||||
|
.constraints([Constraint::Percentage(35), Constraint::Percentage(65)])
|
||||||
|
.split(area);
|
||||||
|
self.draw_jobs(frame, columns[0]);
|
||||||
|
self.draw_logs(frame, columns[1]);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn draw_jobs(&self, frame: &mut Frame, area: Rect) {
|
||||||
|
let items: Vec<ListItem> = self
|
||||||
|
.jobs
|
||||||
|
.iter()
|
||||||
|
.map(|job| {
|
||||||
|
let runs_on = job.runs_on.clone().unwrap_or_else(|| "-".to_string());
|
||||||
|
let line = format!(
|
||||||
|
"{} {:<10} {:<8} {}",
|
||||||
|
short_sha(&job.commit_sha),
|
||||||
|
job.state,
|
||||||
|
runs_on,
|
||||||
|
job.updated_at.format("%H:%M:%S")
|
||||||
|
);
|
||||||
|
ListItem::new(line)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
let mut state = ratatui::widgets::ListState::default();
|
||||||
|
state.select(Some(self.selected_job));
|
||||||
|
let list = List::new(items)
|
||||||
|
.block(Block::default().borders(Borders::ALL).title("Jobs"))
|
||||||
|
.highlight_style(
|
||||||
|
Style::default()
|
||||||
|
.bg(Color::Blue)
|
||||||
|
.fg(Color::White)
|
||||||
|
.add_modifier(Modifier::BOLD),
|
||||||
|
)
|
||||||
|
.highlight_symbol(">> ");
|
||||||
|
frame.render_stateful_widget(list, area, &mut state);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn draw_logs(&self, frame: &mut Frame, area: Rect) {
|
||||||
|
let title = match &self.logs_category {
|
||||||
|
Some(cat) => format!("Logs ({cat})"),
|
||||||
|
None => "Logs".to_string(),
|
||||||
|
};
|
||||||
|
let text = if self.logs_text.is_empty() {
|
||||||
|
Text::from("No logs available.")
|
||||||
|
} else {
|
||||||
|
Text::from(self.logs_text.as_str())
|
||||||
|
};
|
||||||
|
let paragraph = Paragraph::new(text)
|
||||||
|
.block(Block::default().borders(Borders::ALL).title(title))
|
||||||
|
.wrap(Wrap { trim: false })
|
||||||
|
.scroll((self.logs_scroll, 0));
|
||||||
|
frame.render_widget(paragraph, area);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn draw_footer(&self, frame: &mut Frame, area: Rect) {
|
||||||
|
let help = "Up/Down: jobs Left/Right: repo PgUp/PgDn: scroll r: refresh q: quit";
|
||||||
|
let mut line = vec![Span::raw(help)];
|
||||||
|
if !self.status.is_empty() {
|
||||||
|
line.push(Span::raw(" "));
|
||||||
|
line.push(Span::styled(
|
||||||
|
&self.status,
|
||||||
|
Style::default().fg(Color::Yellow),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
let footer = Paragraph::new(Line::from(line)).block(Block::default().borders(Borders::ALL));
|
||||||
|
frame.render_widget(footer, area);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
||||||
14
crates/logs-client/Cargo.toml
Normal file
14
crates/logs-client/Cargo.toml
Normal file
|
|
@ -0,0 +1,14 @@
|
||||||
|
[package]
|
||||||
|
name = "logs-client"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2024"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
chrono = { version = "0.4", features = ["serde"] }
|
||||||
|
miette = { version = "7", features = ["fancy"] }
|
||||||
|
reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls-native-roots"] }
|
||||||
|
serde = { version = "1", features = ["derive"] }
|
||||||
|
serde_json = "1"
|
||||||
|
thiserror = "2"
|
||||||
|
url = "2"
|
||||||
|
uuid = { version = "1", features = ["serde", "v4"] }
|
||||||
200
crates/logs-client/src/lib.rs
Normal file
200
crates/logs-client/src/lib.rs
Normal file
|
|
@ -0,0 +1,200 @@
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use miette::Diagnostic;
|
||||||
|
use reqwest::StatusCode;
|
||||||
|
use serde::{Deserialize, Serialize, de::DeserializeOwned};
|
||||||
|
use thiserror::Error;
|
||||||
|
use url::Url;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct JobLinks {
|
||||||
|
pub logs: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct JobSummary {
|
||||||
|
pub request_id: Uuid,
|
||||||
|
pub runs_on: Option<String>,
|
||||||
|
pub state: String,
|
||||||
|
pub updated_at: DateTime<Utc>,
|
||||||
|
pub links: JobLinks,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct JobGroup {
|
||||||
|
pub repo_url: String,
|
||||||
|
pub commit_sha: String,
|
||||||
|
pub last_updated: DateTime<Utc>,
|
||||||
|
pub total_jobs: usize,
|
||||||
|
pub jobs: Vec<JobSummary>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct LogCategorySummary {
|
||||||
|
pub category: String,
|
||||||
|
pub count: i64,
|
||||||
|
pub has_errors: bool,
|
||||||
|
pub first_ts: DateTime<Utc>,
|
||||||
|
pub last_ts: DateTime<Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Error, Diagnostic)]
|
||||||
|
pub enum LogsClientError {
|
||||||
|
#[error("invalid logs base URL")]
|
||||||
|
#[diagnostic(
|
||||||
|
code(logs_client.invalid_base_url),
|
||||||
|
help("Set LOGS_BASE_URL to the logs-service base URL, e.g. https://logs.prod.example.com")
|
||||||
|
)]
|
||||||
|
InvalidBaseUrl(#[source] url::ParseError),
|
||||||
|
#[error("request failed for {url}")]
|
||||||
|
#[diagnostic(
|
||||||
|
code(logs_client.request_failed),
|
||||||
|
help("Check network connectivity and TLS configuration for the logs service.")
|
||||||
|
)]
|
||||||
|
Request {
|
||||||
|
url: String,
|
||||||
|
#[source]
|
||||||
|
source: reqwest::Error,
|
||||||
|
},
|
||||||
|
#[error("logs service returned {status} for {url}")]
|
||||||
|
#[diagnostic(
|
||||||
|
code(logs_client.http_status),
|
||||||
|
help("Inspect the response body for details and verify the logs-service is reachable.")
|
||||||
|
)]
|
||||||
|
HttpStatus {
|
||||||
|
status: StatusCode,
|
||||||
|
url: String,
|
||||||
|
body: String,
|
||||||
|
},
|
||||||
|
#[error("failed to parse JSON from {url}")]
|
||||||
|
#[diagnostic(
|
||||||
|
code(logs_client.json_decode),
|
||||||
|
help("Confirm the logs-service response matches the expected schema.")
|
||||||
|
)]
|
||||||
|
Json {
|
||||||
|
url: String,
|
||||||
|
#[source]
|
||||||
|
source: serde_json::Error,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type Result<T> = std::result::Result<T, LogsClientError>;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct LogsClient {
|
||||||
|
base_url: Url,
|
||||||
|
client: reqwest::Client,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LogsClient {
|
||||||
|
pub fn new(base_url: &str) -> Result<Self> {
|
||||||
|
let base_url = Url::parse(base_url).map_err(LogsClientError::InvalidBaseUrl)?;
|
||||||
|
Ok(Self {
|
||||||
|
base_url,
|
||||||
|
client: reqwest::Client::new(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn list_jobs(&self) -> Result<Vec<JobGroup>> {
|
||||||
|
let url = self.base_url.join("jobs").map_err(LogsClientError::InvalidBaseUrl)?;
|
||||||
|
self.get_json(url).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn list_log_categories(&self, request_id: Uuid) -> Result<Vec<LogCategorySummary>> {
|
||||||
|
let url = self.job_logs_url(request_id, None)?;
|
||||||
|
self.get_json(url).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_logs_by_category(
|
||||||
|
&self,
|
||||||
|
request_id: Uuid,
|
||||||
|
category: &str,
|
||||||
|
) -> Result<String> {
|
||||||
|
let url = self.job_logs_url(request_id, Some(category))?;
|
||||||
|
self.get_text(url).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_default_logs(&self, request_id: Uuid) -> Result<(String, Option<String>)> {
|
||||||
|
let categories = self.list_log_categories(request_id).await?;
|
||||||
|
let category = categories
|
||||||
|
.iter()
|
||||||
|
.find(|c| c.category == "default")
|
||||||
|
.map(|c| c.category.clone())
|
||||||
|
.or_else(|| categories.first().map(|c| c.category.clone()));
|
||||||
|
let Some(category) = category else {
|
||||||
|
return Ok(("".to_string(), None));
|
||||||
|
};
|
||||||
|
let text = self.get_logs_by_category(request_id, &category).await?;
|
||||||
|
Ok((text, Some(category)))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_json<T: DeserializeOwned>(&self, url: Url) -> Result<T> {
|
||||||
|
let resp = self
|
||||||
|
.client
|
||||||
|
.get(url.clone())
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| LogsClientError::Request {
|
||||||
|
url: url.to_string(),
|
||||||
|
source: e,
|
||||||
|
})?;
|
||||||
|
let status = resp.status();
|
||||||
|
let bytes = resp.bytes().await.map_err(|e| LogsClientError::Request {
|
||||||
|
url: url.to_string(),
|
||||||
|
source: e,
|
||||||
|
})?;
|
||||||
|
if !status.is_success() {
|
||||||
|
let body = String::from_utf8_lossy(&bytes).to_string();
|
||||||
|
return Err(LogsClientError::HttpStatus {
|
||||||
|
status,
|
||||||
|
url: url.to_string(),
|
||||||
|
body,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
serde_json::from_slice(&bytes).map_err(|e| LogsClientError::Json {
|
||||||
|
url: url.to_string(),
|
||||||
|
source: e,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_text(&self, url: Url) -> Result<String> {
|
||||||
|
let resp = self
|
||||||
|
.client
|
||||||
|
.get(url.clone())
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| LogsClientError::Request {
|
||||||
|
url: url.to_string(),
|
||||||
|
source: e,
|
||||||
|
})?;
|
||||||
|
let status = resp.status();
|
||||||
|
let bytes = resp.bytes().await.map_err(|e| LogsClientError::Request {
|
||||||
|
url: url.to_string(),
|
||||||
|
source: e,
|
||||||
|
})?;
|
||||||
|
if !status.is_success() {
|
||||||
|
let body = String::from_utf8_lossy(&bytes).to_string();
|
||||||
|
return Err(LogsClientError::HttpStatus {
|
||||||
|
status,
|
||||||
|
url: url.to_string(),
|
||||||
|
body,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Ok(String::from_utf8_lossy(&bytes).to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn job_logs_url(&self, request_id: Uuid, category: Option<&str>) -> Result<Url> {
|
||||||
|
let mut url = self.base_url.clone();
|
||||||
|
{
|
||||||
|
let mut segments = url
|
||||||
|
.path_segments_mut()
|
||||||
|
.map_err(|_| LogsClientError::InvalidBaseUrl(url::ParseError::RelativeUrlWithoutBase))?;
|
||||||
|
segments.clear();
|
||||||
|
segments.extend(&["jobs", &request_id.to_string(), "logs"]);
|
||||||
|
if let Some(cat) = category {
|
||||||
|
segments.push(cat);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(url)
|
||||||
|
}
|
||||||
|
}
|
||||||
Loading…
Add table
Reference in a new issue