Introduce GNU tar (gtar) support and workflow setup enhancements; bump version to 0.1.16

- Add detection and usage of GNU `tar` for platforms where BSD `tar` is incompatible with required options.
- Refactor `job.sh` to delegate all environment setup to newly introduced per-OS setup scripts.
- Add initial support for workflow setups via `workflow.kdl`, running pre-defined setup scripts before executing workflow steps.
- Integrate step-wise execution and logging for workflows, with structured NDJSON output for detailed traceability.
- Increment orchestrator version to 0.1.16.

Signed-off-by: Till Wegmueller <toasterson@gmail.com>
This commit is contained in:
Till Wegmueller 2025-11-18 15:17:03 +01:00
parent 8f909c0105
commit 08eb82d7f7
No known key found for this signature in database
5 changed files with 385 additions and 102 deletions

View file

@ -1,100 +1,15 @@
#!/usr/bin/env bash #!/usr/bin/env bash
set -euo pipefail set -euo pipefail
# Solstice CI VM job script: build this repository inside the guest. # Solstice CI legacy job script.
# The runner clones the repo at the requested commit and executes this script. # NOTE: All environment and package setup is handled by per-OS setup scripts
# It attempts to ensure required tools (git, curl, protobuf compiler, Rust) exist. # referenced in .solstice/workflow.kdl and executed by the workflow runner.
# This script intentionally contains no setup logic.
# Ensure a sane HOME even under non-login shells with set -u
export HOME=${HOME:-/root}
# Quieter noninteractive installs where supported
export DEBIAN_FRONTEND=${DEBIAN_FRONTEND:-noninteractive}
log() { printf "[job] %s\n" "$*" >&2; } log() { printf "[job] %s\n" "$*" >&2; }
detect_pm() {
if command -v apt-get >/dev/null 2>&1; then echo apt; return; fi
if command -v dnf >/dev/null 2>&1; then echo dnf; return; fi
if command -v yum >/dev/null 2>&1; then echo yum; return; fi
if command -v zypper >/dev/null 2>&1; then echo zypper; return; fi
if command -v apk >/dev/null 2>&1; then echo apk; return; fi
if command -v pacman >/dev/null 2>&1; then echo pacman; return; fi
if command -v pkg >/dev/null 2>&1; then echo pkg; return; fi
if command -v pkgin >/dev/null 2>&1; then echo pkgin; return; fi
echo none
}
install_linux() {
PM=$(detect_pm)
case "$PM" in
apt)
sudo -n true 2>/dev/null || true
sudo apt-get update -y || apt-get update -y || true
sudo apt-get install -y --no-install-recommends curl ca-certificates git build-essential pkg-config libssl-dev protobuf-compiler cmake clang libclang-dev || true
;;
dnf)
sudo dnf install -y curl ca-certificates git gcc gcc-c++ make pkgconf-pkg-config openssl-devel protobuf-compiler clang clang-libs || true
;;
yum)
sudo yum install -y curl ca-certificates git gcc gcc-c++ make pkgconfig openssl-devel protobuf-compiler clang clang-libs || true
;;
zypper)
sudo zypper --non-interactive install curl ca-certificates git gcc gcc-c++ make pkg-config libopenssl-devel protobuf clang || true
;;
apk)
sudo apk add --no-cache curl ca-certificates git build-base pkgconfig openssl-dev protoc clang clang-libs || true
;;
pacman)
sudo pacman -Sy --noconfirm curl ca-certificates git base-devel pkgconf openssl protobuf clang || true
;;
*)
log "unknown package manager ($PM); skipping linux deps install"
;;
esac
}
install_illumos() {
if command -v pkg >/dev/null 2>&1; then
# OpenIndiana IPS packages (best-effort)
sudo pkg refresh || true
sudo pkg install -v developer/build/gnu-make developer/gcc-13 git developer/protobuf developer/rustc developer/clang || true
elif command -v pkgin >/dev/null 2>&1; then
sudo pkgin -y install git gcc gmake protobuf clang || true
else
log "no known package manager found on illumos"
fi
}
ensure_rust() {
if command -v cargo >/dev/null 2>&1; then return 0; fi
OS=$(uname -s 2>/dev/null || echo unknown)
if [ "$OS" = "SunOS" ] && command -v pkg >/dev/null 2>&1; then
log "installing Rust toolchain via IPS package manager (developer/rustc)"
sudo pkg refresh || true
sudo pkg install -v developer/rustc || true
if command -v cargo >/dev/null 2>&1; then return 0; fi
fi
log "installing Rust toolchain with rustup"
curl -fsSL https://sh.rustup.rs | sh -s -- -y
# shellcheck disable=SC1091
if [ -f "$HOME/.cargo/env" ]; then
. "$HOME/.cargo/env"
else
export PATH="$HOME/.cargo/bin:$PATH"
fi
}
main() { main() {
OS=$(uname -s 2>/dev/null || echo unknown) # Keep a minimal representative build as a legacy hook. The workflow steps
case "$OS" in # already perform fmt/clippy/build/test; this is safe to remove later.
Linux) install_linux ;;
SunOS) install_illumos ;;
esac
ensure_rust
# Ensure protoc available in PATH
if ! command -v protoc >/dev/null 2>&1; then
log "WARNING: protoc not found; prost/tonic build may fail"
fi
# Build a representative subset to avoid known sea-orm-cli issues in full workspace builds
log "building workflow-runner" log "building workflow-runner"
cargo build -p workflow-runner --release || cargo build -p workflow-runner cargo build -p workflow-runner --release || cargo build -p workflow-runner
log "done" log "done"

View file

@ -0,0 +1,48 @@
#!/usr/bin/env bash
set -euo pipefail
# Solstice CI per-OS environment prepare (illumos / SunOS)
# Installs baseline tools (curl, git, gtar, compilers, rust) where possible.
log() { printf "[setup-illumos] %s\n" "$*" >&2; }
install_packages() {
if command -v pkg >/dev/null 2>&1; then
# OpenIndiana / IPS
sudo pkg refresh || true
# Prefer GNU tar (gtar) to match runner expectations
sudo pkg install -v \
web/curl \
developer/build/gnu-make \
developer/gcc-13 \
developer/protobuf \
developer/clang \
archiver/gnu-tar \
developer/rustc || true
# CA certs where package exists
sudo pkg install -v web/ca-certificates || true
# mozilla-rootcerts when available
if command -v mozilla-rootcerts >/dev/null 2>&1; then
sudo mozilla-rootcerts install || true
fi
elif command -v pkgin >/dev/null 2>&1; then
# SmartOS/NetBSD pkgin
sudo pkgin -y update || true
sudo pkgin -y install curl gmake gcc protobuf clang gtar rust || true
sudo pkgin -y install mozilla-rootcerts || true
if command -v mozilla-rootcerts >/dev/null 2>&1; then
sudo mozilla-rootcerts install || true
fi
else
log "no known package manager found (pkg/pkgin); skipping installs"
fi
}
main() {
install_packages
# Prefer GNU tar on PATH when available
if command -v gtar >/dev/null 2>&1 && ! command -v tar >/dev/null 2>&1; then
ln -sf "$(command -v gtar)" "$HOME/bin/tar" 2>/dev/null || true
fi
}
main "$@"

67
.solstice/setup-linux.sh Normal file
View file

@ -0,0 +1,67 @@
#!/usr/bin/env bash
set -euo pipefail
# Solstice CI per-OS environment prepare (Linux)
# Installs baseline tools needed by the workflow runner and builds.
log() { printf "[setup-linux] %s\n" "$*" >&2; }
export DEBIAN_FRONTEND=${DEBIAN_FRONTEND:-noninteractive}
detect_pm() {
if command -v apt-get >/dev/null 2>&1; then echo apt; return; fi
if command -v dnf >/dev/null 2>&1; then echo dnf; return; fi
if command -v yum >/dev/null 2>&1; then echo yum; return; fi
if command -v zypper >/dev/null 2>&1; then echo zypper; return; fi
if command -v apk >/dev/null 2>&1; then echo apk; return; fi
if command -v pacman >/dev/null 2>&1; then echo pacman; return; fi
echo none
}
install_packages() {
local pm; pm=$(detect_pm)
case "$pm" in
apt)
sudo -n true 2>/dev/null || true
sudo apt-get update -y || apt-get update -y || true
sudo apt-get install -y --no-install-recommends \
curl ca-certificates git build-essential pkg-config libssl-dev \
protobuf-compiler cmake clang libclang-dev || true
;;
dnf)
sudo dnf install -y curl ca-certificates git gcc gcc-c++ make pkgconf-pkg-config openssl-devel protobuf-compiler clang clang-libs || true
;;
yum)
sudo yum install -y curl ca-certificates git gcc gcc-c++ make pkgconfig openssl-devel protobuf-compiler clang clang-libs || true
;;
zypper)
sudo zypper --non-interactive install curl ca-certificates git gcc gcc-c++ make pkg-config libopenssl-devel protobuf clang || true
;;
apk)
sudo apk add --no-cache curl ca-certificates git build-base pkgconfig openssl-dev protoc clang clang-libs || true
;;
pacman)
sudo pacman -Sy --noconfirm curl ca-certificates git base-devel pkgconf openssl protobuf clang || true
;;
*)
log "unknown package manager ($pm); skipping package install"
;;
esac
}
ensure_rust() {
if command -v cargo >/dev/null 2>&1; then return 0; fi
log "installing Rust toolchain with rustup"
curl -fsSL https://sh.rustup.rs | sh -s -- -y
# shellcheck disable=SC1091
if [ -f "$HOME/.cargo/env" ]; then . "$HOME/.cargo/env"; else export PATH="$HOME/.cargo/bin:$PATH"; fi
}
main() {
install_packages
ensure_rust
if ! command -v protoc >/dev/null 2>&1; then
log "WARNING: protoc not found; prost/tonic builds may fail"
fi
}
main "$@"

View file

@ -1,17 +1,23 @@
workflow name="Solstice CI for solstice-ci" { workflow name="Solstice CI for solstice-ci" {
// Linux build and test on Ubuntu 22.04 runner // Linux build and test on Ubuntu 22.04 runner
job id="linux-build" runs_on="ubuntu-22.04" { job id="linux-build" runs_on="ubuntu-22.04" {
setup path=".solstice/setup-linux.sh"
step name="Show toolchain" run="rustc -Vv && cargo -V" step name="Show toolchain" run="rustc -Vv && cargo -V"
step name="Format" run="cargo fmt --check" step name="Format" run="cargo fmt --check"
step name="Clippy" run="cargo clippy --workspace --all-targets --all-features -- -D warnings" step name="Clippy" run="cargo clippy --workspace --all-targets --all-features -- -D warnings"
step name="Build" run="cargo build --workspace" step name="Build" run="cargo build --workspace"
step name="Test" run="cargo test --workspace --all-targets" step name="Test" run="cargo test --workspace --all-targets"
// Legacy script hook (runs after all other tests)
step name="Legacy job.sh" run=".solstice/job.sh"
} }
// Illumos build (bhyve zone). Keep steps minimal; clippy/format may vary per toolchain. // Illumos build (bhyve zone). Keep steps minimal; clippy/format may vary per toolchain.
job id="illumos-build" runs_on="illumos-latest" { job id="illumos-build" runs_on="illumos-latest" {
setup path=".solstice/setup-illumos.sh"
step name="Show toolchain" run="rustc -Vv && cargo -V" step name="Show toolchain" run="rustc -Vv && cargo -V"
step name="Build" run="cargo build --workspace" step name="Build" run="cargo build --workspace"
step name="Test" run="cargo test --workspace --all-targets" step name="Test" run="cargo test --workspace --all-targets"
// Legacy script hook (runs after all other tests)
step name="Legacy job.sh" run=".solstice/job.sh"
} }
} }

View file

@ -147,7 +147,8 @@ async fn preflight(repo: &str, workdir: &str) -> Result<()> {
let has_curl = has_cmd("curl").await; let has_curl = has_cmd("curl").await;
let has_wget = has_cmd("wget").await; let has_wget = has_cmd("wget").await;
let has_tar = has_cmd("tar").await; let has_tar = has_cmd("tar").await;
for (tool, ok) in [("git", has_git), ("curl", has_curl), ("wget", has_wget), ("tar", has_tar)] { let has_gtar = has_cmd("gtar").await;
for (tool, ok) in [("git", has_git), ("curl", has_curl), ("wget", has_wget), ("tar", has_tar), ("gtar", has_gtar)] {
let lvl = if ok { "info" } else { "warn" }; let lvl = if ok { "info" } else { "warn" };
let msg = if ok { let msg = if ok {
format!("tool {tool}: available") format!("tool {tool}: available")
@ -156,7 +157,7 @@ async fn preflight(repo: &str, workdir: &str) -> Result<()> {
}; };
println!("{}", ndjson_line("tool_check", lvl, &msg, Some(serde_json::json!({"available": ok, "tool": tool})))); println!("{}", ndjson_line("tool_check", lvl, &msg, Some(serde_json::json!({"available": ok, "tool": tool}))));
} }
let can_clone = has_git || (has_tar && (has_curl || has_wget)); let can_clone = has_git || ((has_tar || has_gtar) && (has_curl || has_wget));
let lvl = if can_clone { "info" } else { "error" }; let lvl = if can_clone { "info" } else { "error" };
println!( println!(
"{}", "{}",
@ -211,19 +212,22 @@ async fn fetch_repo_via_archive(repo_https: &str, sha: &str, workdir: &str) -> R
let base = repo_https.trim_end_matches('.').trim_end_matches(".git"); let base = repo_https.trim_end_matches('.').trim_end_matches(".git");
let url = format!("{}/archive/{}.tar.gz", base, sha); let url = format!("{}/archive/{}.tar.gz", base, sha);
// Prefer GNU tar (gtar) when available (illumos' tar is not compatible with -z/--strip-components)
let tar_bin = if has_cmd("gtar").await { "gtar" } else { "tar" };
// Check if we should allow insecure TLS (last resort) // Check if we should allow insecure TLS (last resort)
let insecure = std::env::var("SOLSTICE_ALLOW_INSECURE").ok().map(|v| v == "1" || v.eq_ignore_ascii_case("true")).unwrap_or(false); let insecure = std::env::var("SOLSTICE_ALLOW_INSECURE").ok().map(|v| v == "1" || v.eq_ignore_ascii_case("true")).unwrap_or(false);
let curl_flags = if insecure { "-fSLk" } else { "-fSL" }; let curl_flags = if insecure { "-fSLk" } else { "-fSL" };
// Try curl | tar, then wget | tar // Try curl | tar, then wget | tar
let cmd_curl = format!( let cmd_curl = format!(
"mkdir -p {workdir} && curl {curl_flags} {url} | tar -xz -C {workdir} --strip-components=1" "mkdir -p {workdir} && curl {curl_flags} {url} | {tar_bin} -xz -C {workdir} --strip-components=1"
); );
if run_shell(&cmd_curl).await.is_ok() { if run_shell(&cmd_curl).await.is_ok() {
return Ok(()); return Ok(());
} }
let cmd_wget = format!( let cmd_wget = format!(
"mkdir -p {workdir} && wget -qO- {url} | tar -xz -C {workdir} --strip-components=1" "mkdir -p {workdir} && wget -qO- {url} | {tar_bin} -xz -C {workdir} --strip-components=1"
); );
if run_shell(&cmd_wget).await.is_ok() { if run_shell(&cmd_wget).await.is_ok() {
return Ok(()); return Ok(());
@ -261,7 +265,7 @@ async fn fetch_repo_via_archive(repo_https: &str, sha: &str, workdir: &str) -> R
// As a last resort with explicit opt-in, try curl --insecure // As a last resort with explicit opt-in, try curl --insecure
if insecure { if insecure {
let cmd_curl_insecure = format!( let cmd_curl_insecure = format!(
"mkdir -p {workdir} && curl -fSLk {url} | tar -xz -C {workdir} --strip-components=1" "mkdir -p {workdir} && curl -fSLk {url} | {tar_bin} -xz -C {workdir} --strip-components=1"
); );
if run_shell(&cmd_curl_insecure).await.is_ok() { if run_shell(&cmd_curl_insecure).await.is_ok() {
warn!("used curl --insecure to fetch repo archive on SunOS"); warn!("used curl --insecure to fetch repo archive on SunOS");
@ -429,6 +433,159 @@ async fn run_job_script(workdir: &str, script_override: Option<&str>) -> Result<
Ok(code) Ok(code)
} }
#[derive(Debug)]
struct WorkflowStep { name: String, run: String }
#[derive(Debug)]
struct WorkflowJob { setup: Option<String>, steps: Vec<WorkflowStep> }
fn capture_attr(line: &str, key: &str) -> Option<String> {
let pattern1 = format!("{}=\"", key);
if let Some(start) = line.find(&pattern1) {
let rest = &line[start + pattern1.len()..];
if let Some(end) = rest.find('"') { return Some(rest[..end].to_string()); }
}
let pattern2 = format!("{}='", key);
if let Some(start) = line.find(&pattern2) {
let rest = &line[start + pattern2.len()..];
if let Some(end) = rest.find('\'') { return Some(rest[..end].to_string()); }
}
None
}
fn parse_workflow_for_job(kdl: &str, wanted_job: Option<&str>) -> Option<WorkflowJob> {
let mut lines = kdl.lines().peekable();
while let Some(line) = lines.next() {
let l = line.trim();
if l.starts_with("job ") && l.contains("id=") {
let id = capture_attr(l, "id");
let mut depth = if l.ends_with('{') { 1 } else { 0 };
let mut steps: Vec<WorkflowStep> = Vec::new();
let mut setup: Option<String> = None;
// If this job is the one we want (or no preference and it's the first job), collect its setup and steps
let take_this = match (wanted_job, id.as_deref()) { (Some(w), Some(i)) => w == i, (None, Some(_)) => true, _ => false };
while let Some(peek) = lines.peek() {
let t = peek.trim();
if t.ends_with('{') { depth += 1; }
if t.starts_with('}') {
if depth == 0 { break; }
depth -= 1;
if depth == 0 { lines.next(); break; }
}
if take_this {
if setup.is_none() && t.starts_with("setup ") && t.contains("path=") {
if let Some(p) = capture_attr(t, "path") { setup = Some(p); }
}
if t.starts_with("step ") && t.contains("run=") {
let name = capture_attr(t, "name").unwrap_or_else(|| "unnamed".into());
if let Some(run) = capture_attr(t, "run") {
steps.push(WorkflowStep { name, run });
}
}
}
lines.next();
}
if take_this { return Some(WorkflowJob { setup, steps }); }
}
}
None
}
async fn run_step(workdir: &str, step: &WorkflowStep, idx: usize, total: usize) -> Result<i32> {
// Announce step start
println!("{}", ndjson_line(
"step",
"info",
&format!("starting step: {}", step.name),
Some(serde_json::json!({"step_name": step.name, "step_index": idx, "total_steps": total}))
));
// Build command and spawn
let mut cmd = Command::new("/bin/sh");
cmd.arg("-lc")
.arg(format!("set -e; cd {workdir}; {}", step.run))
.stdout(Stdio::piped())
.stderr(Stdio::piped());
let mut child = cmd.spawn().into_diagnostic()?;
// Stream output with step fields
let extra = serde_json::json!({"step_name": step.name, "step_index": idx, "total_steps": total});
if let Some(stdout) = child.stdout.take() {
let mut reader = BufReader::new(stdout);
let extra_out = extra.clone();
tokio::spawn(async move {
loop {
let mut buf = Vec::with_capacity(256);
match reader.read_until(b'\n', &mut buf).await {
Ok(0) => break,
Ok(_) => {
let line = String::from_utf8_lossy(&buf).trim_end_matches(['\n', '\r']).to_string();
println!("{}", ndjson_line("step_run", "info", &line, Some(extra_out.clone())));
}
Err(e) => {
eprintln!("{}", ndjson_line("step_run", "error", &format!("error reading stdout: {}", e), Some(extra_out.clone())));
break;
}
}
}
});
}
if let Some(stderr) = child.stderr.take() {
let mut reader = BufReader::new(stderr);
let extra_err = extra.clone();
tokio::spawn(async move {
loop {
let mut buf = Vec::with_capacity(256);
match reader.read_until(b'\n', &mut buf).await {
Ok(0) => break,
Ok(_) => {
let line = String::from_utf8_lossy(&buf).trim_end_matches(['\n', '\r']).to_string();
eprintln!("{}", ndjson_line("step_run", "error", &line, Some(extra_err.clone())));
}
Err(e) => {
eprintln!("{}", ndjson_line("step_run", "error", &format!("error reading stderr: {}", e), Some(extra_err.clone())));
break;
}
}
}
});
}
let status = child.wait().await.into_diagnostic()?;
let code = status.code().unwrap_or(1);
if code != 0 {
eprintln!("{}", ndjson_line("step", "error", &format!("step failed: {} (exit {})", step.name, code), Some(extra)));
} else {
println!("{}", ndjson_line("step", "info", &format!("completed step: {}", step.name), Some(serde_json::json!({"step_name": step.name, "step_index": idx, "total_steps": total, "exit_code": code}))));
}
Ok(code)
}
async fn run_workflow_if_present(workdir: &str) -> Result<Option<i32>> {
let path = format!("{}/.solstice/workflow.kdl", workdir);
if !fs::try_exists(&path).await.into_diagnostic()? { return Ok(None); }
let kdl = fs::read_to_string(&path).await.into_diagnostic()?;
// Determine selected job id from job.yaml
let jf = read_job_file().await.ok();
let job_id = jf.and_then(|j| j.workflow_job_id);
let job = match parse_workflow_for_job(&kdl, job_id.as_deref()) { Some(j) => j, None => return Ok(None) };
// Run setup if present
if let Some(setup_path) = job.setup.as_deref() {
let code = run_setup_script(workdir, setup_path).await?;
if code != 0 { return Ok(Some(code)); }
}
if job.steps.is_empty() { return Ok(None); }
let total = job.steps.len();
for (i, step) in job.steps.iter().enumerate() {
let code = run_step(workdir, step, i + 1, total).await?;
if code != 0 { return Ok(Some(code)); }
}
Ok(Some(0))
}
#[tokio::main(flavor = "multi_thread")] #[tokio::main(flavor = "multi_thread")]
async fn main() -> Result<()> { async fn main() -> Result<()> {
let _t = common::init_tracing("solstice-workflow-runner")?; let _t = common::init_tracing("solstice-workflow-runner")?;
@ -469,11 +626,15 @@ async fn main() -> Result<()> {
let code = match ensure_repo(&repo, &sha, &workdir).await { let code = match ensure_repo(&repo, &sha, &workdir).await {
Ok(_) => { Ok(_) => {
// Read job.yaml to get optional script override // Prefer workflow.kdl when present; otherwise run legacy script
let jf = read_job_file().await.ok(); match run_workflow_if_present(&workdir).await? {
let script_override = jf.as_ref().and_then(|j| j.script_path.as_deref()); Some(code) => code,
// proceed to run job script None => {
run_job_script(&workdir, script_override).await? let jf = read_job_file().await.ok();
let script_override = jf.as_ref().and_then(|j| j.script_path.as_deref());
run_job_script(&workdir, script_override).await?
}
}
} }
Err(e) => { Err(e) => {
eprintln!("{}", ndjson_line("env_setup", "error", &format!("failed to prepare repo: {}", e), None)); eprintln!("{}", ndjson_line("env_setup", "error", &format!("failed to prepare repo: {}", e), None));
@ -482,10 +643,96 @@ async fn main() -> Result<()> {
}; };
if code != 0 { if code != 0 {
error!(exit_code = code, "job script failed"); error!(exit_code = code, "workflow failed");
std::process::exit(code); std::process::exit(code);
} }
info!("job complete"); info!("job complete");
Ok(()) Ok(())
} }
// Execute a setup script before workflow steps. Similar to run_job_script but with different categories.
async fn run_setup_script(workdir: &str, setup_rel_or_abs: &str) -> Result<i32> {
// Resolve path
let script = if setup_rel_or_abs.starts_with('/') {
setup_rel_or_abs.to_string()
} else {
format!("{}/{}", workdir, setup_rel_or_abs.trim_start_matches("./"))
};
// Announce
println!("{}", ndjson_line(
"setup",
"info",
&format!("executing setup script: {}", setup_rel_or_abs),
Some(serde_json::json!({"path": setup_rel_or_abs}))
));
if !fs::try_exists(&script).await.into_diagnostic()? {
eprintln!("{}", ndjson_line(
"setup",
"error",
&format!("setup script not found at {}", script),
Some(serde_json::json!({"path": setup_rel_or_abs}))
));
return Ok(1);
}
let _ = run_shell(&format!("chmod +x {} || true", script)).await?;
let mut cmd = Command::new("/bin/sh");
cmd.arg("-lc")
.arg(format!("set -e; cd {workdir}; {}", script))
.stdout(Stdio::piped())
.stderr(Stdio::piped());
let mut child = cmd.spawn().into_diagnostic()?;
// Stream output as setup_run
if let Some(stdout) = child.stdout.take() {
let mut reader = BufReader::new(stdout);
tokio::spawn(async move {
loop {
let mut buf = Vec::with_capacity(256);
match reader.read_until(b'\n', &mut buf).await {
Ok(0) => break,
Ok(_) => {
let line = String::from_utf8_lossy(&buf).trim_end_matches(['\n', '\r']).to_string();
println!("{}", ndjson_line("setup_run", "info", &line, None));
}
Err(e) => {
eprintln!("{}", ndjson_line("setup_run", "error", &format!("error reading stdout: {}", e), None));
break;
}
}
}
});
}
if let Some(stderr) = child.stderr.take() {
let mut reader = BufReader::new(stderr);
tokio::spawn(async move {
loop {
let mut buf = Vec::with_capacity(256);
match reader.read_until(b'\n', &mut buf).await {
Ok(0) => break,
Ok(_) => {
let line = String::from_utf8_lossy(&buf).trim_end_matches(['\n', '\r']).to_string();
eprintln!("{}", ndjson_line("setup_run", "error", &line, None));
}
Err(e) => {
eprintln!("{}", ndjson_line("setup_run", "error", &format!("error reading stderr: {}", e), None));
break;
}
}
}
});
}
let status = child.wait().await.into_diagnostic()?;
let code = status.code().unwrap_or(1);
if code != 0 {
eprintln!("{}", ndjson_line("setup", "error", &format!("setup script exited with code {}", code), Some(serde_json::json!({"path": setup_rel_or_abs, "exit_code": code}))));
} else {
println!("{}", ndjson_line("setup", "info", &format!("completed setup: {}", setup_rel_or_abs), Some(serde_json::json!({"exit_code": code}))));
}
Ok(code)
}