15
ci/Cargo.toml
Normal file
15
ci/Cargo.toml
Normal file
@@ -0,0 +1,15 @@
|
||||
[package]
|
||||
name = "ci"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[[bin]]
|
||||
name = "ci"
|
||||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
dagger-sdk = "0.20"
|
||||
eyre = "0.6"
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
clap = { version = "4", features = ["derive"] }
|
||||
chrono = "0.4"
|
||||
325
ci/src/main.rs
Normal file
325
ci/src/main.rs
Normal file
@@ -0,0 +1,325 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use clap::Parser;
|
||||
|
||||
const BIN_NAME: &str = "forage-server";
|
||||
const MOLD_VERSION: &str = "2.40.4";
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(name = "ci")]
|
||||
enum Cli {
|
||||
/// Run PR validation pipeline (check + test + build)
|
||||
Pr,
|
||||
/// Run main branch pipeline (check + test + build + publish)
|
||||
Main,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> eyre::Result<()> {
|
||||
let cli = Cli::parse();
|
||||
|
||||
dagger_sdk::connect(|client| async move {
|
||||
match cli {
|
||||
Cli::Pr => run_pr(&client).await?,
|
||||
Cli::Main => run_main(&client).await?,
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn run_pr(client: &dagger_sdk::Query) -> eyre::Result<()> {
|
||||
eprintln!("==> PR pipeline: check + test + build");
|
||||
|
||||
let base = build_base(client).await?;
|
||||
|
||||
eprintln!("--- cargo check --workspace");
|
||||
base.clone()
|
||||
.with_exec(vec!["cargo", "check", "--workspace"])
|
||||
.sync()
|
||||
.await?;
|
||||
|
||||
eprintln!("--- cargo clippy");
|
||||
base.clone()
|
||||
.with_exec(vec![
|
||||
"cargo",
|
||||
"clippy",
|
||||
"--workspace",
|
||||
"--",
|
||||
"-D",
|
||||
"warnings",
|
||||
])
|
||||
.sync()
|
||||
.await?;
|
||||
|
||||
eprintln!("--- cargo fmt --check");
|
||||
base.clone()
|
||||
.with_exec(vec!["cargo", "fmt", "--", "--check"])
|
||||
.sync()
|
||||
.await?;
|
||||
|
||||
eprintln!("--- running tests");
|
||||
run_tests(&base).await?;
|
||||
|
||||
eprintln!("--- building release image");
|
||||
let _image = build_release_image(client, &base).await?;
|
||||
|
||||
eprintln!("==> PR pipeline complete");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn run_main(client: &dagger_sdk::Query) -> eyre::Result<()> {
|
||||
eprintln!("==> Main pipeline: check + test + build + publish");
|
||||
|
||||
let base = build_base(client).await?;
|
||||
|
||||
eprintln!("--- cargo check --workspace");
|
||||
base.clone()
|
||||
.with_exec(vec!["cargo", "check", "--workspace"])
|
||||
.sync()
|
||||
.await?;
|
||||
|
||||
eprintln!("--- running tests");
|
||||
run_tests(&base).await?;
|
||||
|
||||
eprintln!("--- building release image");
|
||||
let image = build_release_image(client, &base).await?;
|
||||
|
||||
eprintln!("--- publishing image");
|
||||
publish_image(client, &image).await?;
|
||||
|
||||
eprintln!("==> Main pipeline complete");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Load only Rust-relevant source files from host.
|
||||
/// Using include patterns prevents cache busting from unrelated file changes.
|
||||
fn load_source(client: &dagger_sdk::Query) -> eyre::Result<dagger_sdk::Directory> {
|
||||
let src = client.host().directory_opts(
|
||||
".",
|
||||
dagger_sdk::HostDirectoryOptsBuilder::default()
|
||||
.include(vec![
|
||||
"**/*.rs",
|
||||
"**/Cargo.toml",
|
||||
"Cargo.lock",
|
||||
".sqlx/**",
|
||||
"**/*.sql",
|
||||
"**/*.toml",
|
||||
"templates/**",
|
||||
"static/**",
|
||||
])
|
||||
.build()?,
|
||||
);
|
||||
Ok(src)
|
||||
}
|
||||
|
||||
/// Load dependency-only source (Cargo.toml + Cargo.lock + .sqlx, no .rs or tests).
|
||||
fn load_dep_source(client: &dagger_sdk::Query) -> eyre::Result<dagger_sdk::Directory> {
|
||||
let src = client.host().directory_opts(
|
||||
".",
|
||||
dagger_sdk::HostDirectoryOptsBuilder::default()
|
||||
.include(vec!["**/Cargo.toml", "Cargo.lock", ".sqlx/**"])
|
||||
.build()?,
|
||||
);
|
||||
Ok(src)
|
||||
}
|
||||
|
||||
/// Create skeleton source files so cargo can resolve deps without real source.
|
||||
fn create_skeleton_files(client: &dagger_sdk::Query) -> eyre::Result<dagger_sdk::Directory> {
|
||||
let main_content = r#"fn main() { panic!("skeleton"); }"#;
|
||||
let lib_content = r#"pub fn _skeleton() {}"#;
|
||||
|
||||
let crate_paths = discover_crates()?;
|
||||
let mut dir = client.directory();
|
||||
|
||||
for crate_path in &crate_paths {
|
||||
let src_dir = crate_path.join("src");
|
||||
dir = dir.with_new_file(
|
||||
src_dir.join("main.rs").to_string_lossy().to_string(),
|
||||
main_content,
|
||||
);
|
||||
dir = dir.with_new_file(
|
||||
src_dir.join("lib.rs").to_string_lossy().to_string(),
|
||||
lib_content,
|
||||
);
|
||||
}
|
||||
|
||||
// Also add skeleton for ci/ crate itself.
|
||||
dir = dir.with_new_file("ci/src/main.rs".to_string(), main_content);
|
||||
|
||||
Ok(dir)
|
||||
}
|
||||
|
||||
/// Discover workspace crate directories by finding Cargo.toml files.
|
||||
fn discover_crates() -> eyre::Result<Vec<PathBuf>> {
|
||||
let mut crate_paths = Vec::new();
|
||||
let root = PathBuf::from("crates");
|
||||
if root.is_dir() {
|
||||
find_crates_recursive(&root, &mut crate_paths)?;
|
||||
}
|
||||
Ok(crate_paths)
|
||||
}
|
||||
|
||||
fn find_crates_recursive(dir: &PathBuf, out: &mut Vec<PathBuf>) -> eyre::Result<()> {
|
||||
if dir.join("Cargo.toml").exists() {
|
||||
out.push(dir.clone());
|
||||
}
|
||||
for entry in std::fs::read_dir(dir)? {
|
||||
let entry = entry?;
|
||||
if entry.file_type()?.is_dir() {
|
||||
let name = entry.file_name();
|
||||
if name == "target" || name == "node_modules" {
|
||||
continue;
|
||||
}
|
||||
find_crates_recursive(&entry.path(), out)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Build the base Rust container with all deps cached.
|
||||
async fn build_base(client: &dagger_sdk::Query) -> eyre::Result<dagger_sdk::Container> {
|
||||
let src = load_source(client)?;
|
||||
let dep_src = load_dep_source(client)?;
|
||||
let skeleton = create_skeleton_files(client)?;
|
||||
|
||||
let dep_src_with_skeleton = dep_src.with_directory(".", skeleton);
|
||||
|
||||
// Base rust image with build tools.
|
||||
let rust_base = client
|
||||
.container()
|
||||
.from("rust:1.85-bookworm")
|
||||
.with_exec(vec!["apt", "update"])
|
||||
.with_exec(vec!["apt", "install", "-y", "clang", "wget"])
|
||||
// Install mold linker.
|
||||
.with_exec(vec![
|
||||
"wget",
|
||||
"-q",
|
||||
&format!(
|
||||
"https://github.com/rui314/mold/releases/download/v{MOLD_VERSION}/mold-{MOLD_VERSION}-x86_64-linux.tar.gz"
|
||||
),
|
||||
])
|
||||
.with_exec(vec![
|
||||
"tar",
|
||||
"-xf",
|
||||
&format!("mold-{MOLD_VERSION}-x86_64-linux.tar.gz"),
|
||||
])
|
||||
.with_exec(vec![
|
||||
"mv",
|
||||
&format!("mold-{MOLD_VERSION}-x86_64-linux/bin/mold"),
|
||||
"/usr/bin/mold",
|
||||
]);
|
||||
|
||||
// Step 1: build deps with skeleton source (cacheable layer).
|
||||
let prebuild = rust_base
|
||||
.clone()
|
||||
.with_workdir("/mnt/src")
|
||||
.with_env_variable("SQLX_OFFLINE", "true")
|
||||
.with_directory("/mnt/src", dep_src_with_skeleton)
|
||||
.with_exec(vec!["cargo", "build", "--release", "--bin", BIN_NAME]);
|
||||
|
||||
// Step 2: copy cargo registry from prebuild (avoids re-downloading deps).
|
||||
let build_container = rust_base
|
||||
.with_workdir("/mnt/src")
|
||||
.with_env_variable("SQLX_OFFLINE", "true")
|
||||
.with_directory("/usr/local/cargo", prebuild.directory("/usr/local/cargo"))
|
||||
.with_directory("/mnt/src/", src);
|
||||
|
||||
Ok(build_container)
|
||||
}
|
||||
|
||||
/// Run tests.
|
||||
async fn run_tests(base: &dagger_sdk::Container) -> eyre::Result<()> {
|
||||
base.clone()
|
||||
.with_exec(vec!["cargo", "test", "--workspace"])
|
||||
.sync()
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Build release binary and package into a slim image.
|
||||
async fn build_release_image(
|
||||
client: &dagger_sdk::Query,
|
||||
base: &dagger_sdk::Container,
|
||||
) -> eyre::Result<dagger_sdk::Container> {
|
||||
let built = base
|
||||
.clone()
|
||||
.with_exec(vec!["cargo", "build", "--release", "--bin", BIN_NAME]);
|
||||
|
||||
let binary = built.file(format!("/mnt/src/target/release/{BIN_NAME}"));
|
||||
|
||||
// Load templates and static assets from host for the runtime image.
|
||||
let templates = client.host().directory("templates");
|
||||
let static_assets = client.host().directory("static");
|
||||
|
||||
// Distroless cc-debian12 matches the build image's glibc
|
||||
// and includes libgcc + ca-certificates with no shell or package manager.
|
||||
let final_image = client
|
||||
.container()
|
||||
.from("gcr.io/distroless/cc-debian12")
|
||||
.with_file(format!("/usr/local/bin/{BIN_NAME}"), binary)
|
||||
.with_directory("/templates", templates)
|
||||
.with_directory("/static", static_assets)
|
||||
.with_env_variable("FORAGE_TEMPLATES_PATH", "/templates");
|
||||
|
||||
final_image.sync().await?;
|
||||
|
||||
// Set the final entrypoint for the published image.
|
||||
let final_image = final_image.with_entrypoint(vec![BIN_NAME]);
|
||||
|
||||
eprintln!("--- release image built successfully");
|
||||
Ok(final_image)
|
||||
}
|
||||
|
||||
/// Publish image to container registry with latest, commit, and timestamp tags.
|
||||
async fn publish_image(
|
||||
client: &dagger_sdk::Query,
|
||||
image: &dagger_sdk::Container,
|
||||
) -> eyre::Result<()> {
|
||||
let registry = std::env::var("CI_REGISTRY").unwrap_or_else(|_| "registry.forage.sh".into());
|
||||
let user = std::env::var("CI_REGISTRY_USER").unwrap_or_else(|_| "forage".into());
|
||||
let image_name = std::env::var("CI_IMAGE_NAME")
|
||||
.unwrap_or_else(|_| format!("{registry}/{user}/forage-server"));
|
||||
|
||||
let password = std::env::var("CI_REGISTRY_PASSWORD")
|
||||
.map_err(|_| eyre::eyre!("CI_REGISTRY_PASSWORD must be set for publishing"))?;
|
||||
|
||||
let commit = git_short_hash()?;
|
||||
let timestamp = chrono::Utc::now().format("%Y%m%d%H%M%S").to_string();
|
||||
|
||||
let tags = vec!["latest".to_string(), commit, timestamp];
|
||||
|
||||
let authed = image.clone().with_registry_auth(
|
||||
®istry,
|
||||
&user,
|
||||
client.set_secret("registry-password", &password),
|
||||
);
|
||||
|
||||
for tag in &tags {
|
||||
let image_ref = format!("{image_name}:{tag}");
|
||||
authed
|
||||
.publish_opts(
|
||||
&image_ref,
|
||||
dagger_sdk::ContainerPublishOptsBuilder::default().build()?,
|
||||
)
|
||||
.await?;
|
||||
eprintln!("--- published {image_ref}");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get the short git commit hash from the host.
|
||||
fn git_short_hash() -> eyre::Result<String> {
|
||||
let output = std::process::Command::new("git")
|
||||
.args(["rev-parse", "--short", "HEAD"])
|
||||
.output()?;
|
||||
let hash = String::from_utf8(output.stdout)?.trim().to_string();
|
||||
if hash.is_empty() {
|
||||
return Err(eyre::eyre!("could not determine git commit hash"));
|
||||
}
|
||||
Ok(hash)
|
||||
}
|
||||
Reference in New Issue
Block a user