diff --git a/.playwright-mcp/console-2026-03-07T23-04-16-205Z.log b/.playwright-mcp/console-2026-03-07T23-04-16-205Z.log new file mode 100644 index 0000000..a3b9632 --- /dev/null +++ b/.playwright-mcp/console-2026-03-07T23-04-16-205Z.log @@ -0,0 +1 @@ +[ 72ms] [ERROR] Failed to load resource: the server responded with a status of 404 (Not Found) @ http://localhost:3000/favicon.ico:0 diff --git a/.playwright-mcp/console-2026-03-07T23-06-48-917Z.log b/.playwright-mcp/console-2026-03-07T23-06-48-917Z.log new file mode 100644 index 0000000..99da8b8 --- /dev/null +++ b/.playwright-mcp/console-2026-03-07T23-06-48-917Z.log @@ -0,0 +1 @@ +[ 9ms] [ERROR] Failed to load resource: the server responded with a status of 404 (Not Found) @ http://localhost:3000/users/kjuulh:0 diff --git a/.playwright-mcp/console-2026-03-07T23-08-47-266Z.log b/.playwright-mcp/console-2026-03-07T23-08-47-266Z.log new file mode 100644 index 0000000..b5186e4 --- /dev/null +++ b/.playwright-mcp/console-2026-03-07T23-08-47-266Z.log @@ -0,0 +1 @@ +[ 7745ms] [ERROR] Failed to load resource: the server responded with a status of 404 (Not Found) @ http://localhost:3000/users/kjuulh:0 diff --git a/.playwright-mcp/console-2026-03-07T23-28-21-144Z.log b/.playwright-mcp/console-2026-03-07T23-28-21-144Z.log new file mode 100644 index 0000000..7d44f01 --- /dev/null +++ b/.playwright-mcp/console-2026-03-07T23-28-21-144Z.log @@ -0,0 +1 @@ +[ 243429ms] [ERROR] Failed to load resource: the server responded with a status of 404 (Not Found) @ http://localhost:3000/users/kjuulh:0 diff --git a/.playwright-mcp/console-2026-03-08T13-09-32-749Z.log b/.playwright-mcp/console-2026-03-08T13-09-32-749Z.log new file mode 100644 index 0000000..b96515c --- /dev/null +++ b/.playwright-mcp/console-2026-03-08T13-09-32-749Z.log @@ -0,0 +1 @@ +[ 11ms] [ERROR] Failed to load resource: the server responded with a status of 403 (Forbidden) @ http://localhost:3000/orgs/testorg/projects/my-api/policies:0 diff --git a/.playwright-mcp/console-2026-03-08T14-22-14-670Z.log b/.playwright-mcp/console-2026-03-08T14-22-14-670Z.log new file mode 100644 index 0000000..0d80f56 --- /dev/null +++ b/.playwright-mcp/console-2026-03-08T14-22-14-670Z.log @@ -0,0 +1 @@ +[ 83695ms] [ERROR] Failed to load resource: the server responded with a status of 500 (Internal Server Error) @ http://localhost:3000/orgs/rawpotion/projects/other-example/pipelines:0 diff --git a/.playwright-mcp/console-2026-03-08T14-24-37-198Z.log b/.playwright-mcp/console-2026-03-08T14-24-37-198Z.log new file mode 100644 index 0000000..a0f0225 --- /dev/null +++ b/.playwright-mcp/console-2026-03-08T14-24-37-198Z.log @@ -0,0 +1 @@ +[ 27797ms] [ERROR] Failed to load resource: the server responded with a status of 500 (Internal Server Error) @ http://localhost:3000/orgs/rawpotion/projects/other-example/pipelines:0 diff --git a/.playwright-mcp/console-2026-03-08T15-10-54-843Z.log b/.playwright-mcp/console-2026-03-08T15-10-54-843Z.log new file mode 100644 index 0000000..ffbe8ce --- /dev/null +++ b/.playwright-mcp/console-2026-03-08T15-10-54-843Z.log @@ -0,0 +1,10 @@ +[ 183938ms] [ERROR] Failed to load resource: the server responded with a status of 502 (Bad Gateway) @ http://localhost:3000/orgs/rawpotion/projects/other-example/events:0 +[ 185942ms] [ERROR] Failed to load resource: the server responded with a status of 502 (Bad Gateway) @ http://localhost:3000/orgs/rawpotion/projects/other-example/events:0 +[ 189946ms] [ERROR] Failed to load resource: the server responded with a status of 502 (Bad Gateway) @ http://localhost:3000/orgs/rawpotion/projects/other-example/events:0 +[ 197960ms] [ERROR] Failed to load resource: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/orgs/rawpotion/projects/other-example/events:0 +[ 213961ms] [ERROR] Failed to load resource: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/orgs/rawpotion/projects/other-example/events:0 +[ 243962ms] [ERROR] Failed to load resource: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/orgs/rawpotion/projects/other-example/events:0 +[ 273963ms] [ERROR] Failed to load resource: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/orgs/rawpotion/projects/other-example/events:0 +[ 303968ms] [ERROR] Failed to load resource: the server responded with a status of 502 (Bad Gateway) @ http://localhost:3000/orgs/rawpotion/projects/other-example/events:0 +[ 333973ms] [ERROR] Failed to load resource: the server responded with a status of 502 (Bad Gateway) @ http://localhost:3000/orgs/rawpotion/projects/other-example/events:0 +[ 363977ms] [ERROR] Failed to load resource: the server responded with a status of 502 (Bad Gateway) @ http://localhost:3000/orgs/rawpotion/projects/other-example/events:0 diff --git a/.playwright-mcp/console-2026-03-08T18-58-10-322Z.log b/.playwright-mcp/console-2026-03-08T18-58-10-322Z.log new file mode 100644 index 0000000..41f899f --- /dev/null +++ b/.playwright-mcp/console-2026-03-08T18-58-10-322Z.log @@ -0,0 +1 @@ +[ 69ms] [ERROR] Failed to load resource: the server responded with a status of 404 (Not Found) @ http://localhost:3000/favicon.ico:0 diff --git a/.playwright-mcp/console-2026-03-08T19-03-18-217Z.log b/.playwright-mcp/console-2026-03-08T19-03-18-217Z.log new file mode 100644 index 0000000..1722d64 --- /dev/null +++ b/.playwright-mcp/console-2026-03-08T19-03-18-217Z.log @@ -0,0 +1,4 @@ +[ 42748ms] [ERROR] Failed to load resource: net::ERR_INCOMPLETE_CHUNKED_ENCODING @ http://localhost:3000/orgs/rawpotion/projects/service-example/events:0 +[ 43749ms] [ERROR] Failed to load resource: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/orgs/rawpotion/projects/service-example/events:0 +[ 49108ms] [ERROR] Failed to load resource: net::ERR_INCOMPLETE_CHUNKED_ENCODING @ http://localhost:3000/orgs/rawpotion/projects/service-example/events:0 +[ 50109ms] [ERROR] Failed to load resource: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/orgs/rawpotion/projects/service-example/events:0 diff --git a/.playwright-mcp/console-2026-03-08T19-16-00-412Z.log b/.playwright-mcp/console-2026-03-08T19-16-00-412Z.log new file mode 100644 index 0000000..edaca28 --- /dev/null +++ b/.playwright-mcp/console-2026-03-08T19-16-00-412Z.log @@ -0,0 +1 @@ +[ 281704ms] [ERROR] Failed to load resource: the server responded with a status of 500 (Internal Server Error) @ http://localhost:3000/orgs/rawpotion/projects/service-example:0 diff --git a/.playwright-mcp/console-2026-03-08T20-27-09-753Z.log b/.playwright-mcp/console-2026-03-08T20-27-09-753Z.log new file mode 100644 index 0000000..f8d1c17 --- /dev/null +++ b/.playwright-mcp/console-2026-03-08T20-27-09-753Z.log @@ -0,0 +1,10 @@ +[ 136576ms] [ERROR] Failed to load resource: net::ERR_INCOMPLETE_CHUNKED_ENCODING @ http://localhost:3000/orgs/rawpotion/projects/service-example/events:0 +[ 137577ms] [ERROR] Failed to load resource: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/orgs/rawpotion/projects/service-example/events:0 +[ 139578ms] [ERROR] Failed to load resource: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/orgs/rawpotion/projects/service-example/events:0 +[ 152714ms] [ERROR] Failed to load resource: net::ERR_INCOMPLETE_CHUNKED_ENCODING @ http://localhost:3000/orgs/rawpotion/projects/service-example/events:0 +[ 153715ms] [ERROR] Failed to load resource: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/orgs/rawpotion/projects/service-example/events:0 +[ 601126ms] [ERROR] Failed to load resource: net::ERR_INCOMPLETE_CHUNKED_ENCODING @ http://localhost:3000/orgs/rawpotion/projects/service-example/events:0 +[ 602127ms] [ERROR] Failed to load resource: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/orgs/rawpotion/projects/service-example/events:0 +[ 604128ms] [ERROR] Failed to load resource: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/orgs/rawpotion/projects/service-example/events:0 +[ 608129ms] [ERROR] Failed to load resource: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/orgs/rawpotion/projects/service-example/events:0 +[ 616130ms] [ERROR] Failed to load resource: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/orgs/rawpotion/projects/service-example/events:0 diff --git a/.playwright-mcp/console-2026-03-08T21-01-30-414Z.log b/.playwright-mcp/console-2026-03-08T21-01-30-414Z.log new file mode 100644 index 0000000..23ff7b5 --- /dev/null +++ b/.playwright-mcp/console-2026-03-08T21-01-30-414Z.log @@ -0,0 +1,5 @@ +[ 80067ms] [ERROR] Failed to load resource: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/notifications?_partial=1:0 +[ 90065ms] [ERROR] Failed to load resource: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/notifications?_partial=1:0 +[ 100065ms] [ERROR] Failed to load resource: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/notifications?_partial=1:0 +[ 110065ms] [ERROR] Failed to load resource: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/notifications?_partial=1:0 +[ 120065ms] [ERROR] Failed to load resource: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/notifications?_partial=1:0 diff --git a/.playwright-mcp/console-2026-03-08T21-05-06-279Z.log b/.playwright-mcp/console-2026-03-08T21-05-06-279Z.log new file mode 100644 index 0000000..7964b38 --- /dev/null +++ b/.playwright-mcp/console-2026-03-08T21-05-06-279Z.log @@ -0,0 +1 @@ +[ 1030036ms] [ERROR] Failed to load resource: net::ERR_CONNECTION_REFUSED @ http://localhost:3000/notifications?_partial=1:0 diff --git a/.playwright-mcp/element-2026-03-08T21-50-01-232Z.png b/.playwright-mcp/element-2026-03-08T21-50-01-232Z.png new file mode 100644 index 0000000..c9be955 Binary files /dev/null and b/.playwright-mcp/element-2026-03-08T21-50-01-232Z.png differ diff --git a/.playwright-mcp/element-2026-03-08T21-50-44-447Z.png b/.playwright-mcp/element-2026-03-08T21-50-44-447Z.png new file mode 100644 index 0000000..f1f4d54 Binary files /dev/null and b/.playwright-mcp/element-2026-03-08T21-50-44-447Z.png differ diff --git a/.playwright-mcp/element-2026-03-08T21-53-14-160Z.png b/.playwright-mcp/element-2026-03-08T21-53-14-160Z.png new file mode 100644 index 0000000..c39c6dd Binary files /dev/null and b/.playwright-mcp/element-2026-03-08T21-53-14-160Z.png differ diff --git a/.playwright-mcp/element-2026-03-08T21-54-05-889Z.png b/.playwright-mcp/element-2026-03-08T21-54-05-889Z.png new file mode 100644 index 0000000..ae2755b Binary files /dev/null and b/.playwright-mcp/element-2026-03-08T21-54-05-889Z.png differ diff --git a/.playwright-mcp/element-2026-03-08T21-55-10-800Z.png b/.playwright-mcp/element-2026-03-08T21-55-10-800Z.png new file mode 100644 index 0000000..b1229f9 Binary files /dev/null and b/.playwright-mcp/element-2026-03-08T21-55-10-800Z.png differ diff --git a/.playwright-mcp/page-2026-03-08T21-56-48-888Z.png b/.playwright-mcp/page-2026-03-08T21-56-48-888Z.png new file mode 100644 index 0000000..62d636a Binary files /dev/null and b/.playwright-mcp/page-2026-03-08T21-56-48-888Z.png differ diff --git a/Cargo.lock b/Cargo.lock index 3e1400a..7ad9fb5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -195,6 +195,7 @@ dependencies = [ "axum-core", "bytes", "cookie", + "form_urlencoded", "futures-util", "http 1.4.0", "http-body 1.0.1", @@ -203,6 +204,8 @@ dependencies = [ "pin-project-lite", "rustversion", "serde_core", + "serde_html_form", + "serde_path_to_error", "tower-layer", "tower-service", "tracing", @@ -510,7 +513,7 @@ dependencies = [ "hex", "hex-literal", "platform-info", - "reqwest", + "reqwest 0.11.27", "serde", "serde_graphql_input", "serde_json", @@ -832,17 +835,24 @@ dependencies = [ "forage-core", "forage-db", "forage-grpc", + "futures-util", "minijinja", + "opentelemetry", + "opentelemetry-otlp", + "opentelemetry_sdk", "serde", "serde_json", "sqlx", "time", "tokio", + "tokio-stream", "tonic", "tower", "tower-http", "tracing", + "tracing-opentelemetry", "tracing-subscriber", + "urlencoding", "uuid", ] @@ -1026,7 +1036,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09cdf7b487d864c2939b23902291a5041bc4a84418268f25fda1c8d4e15ad8fa" dependencies = [ "graphql_query_derive", - "reqwest", + "reqwest 0.11.27", "serde", "serde_json", ] @@ -1327,13 +1337,16 @@ version = "0.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0" dependencies = [ + "base64 0.22.1", "bytes", "futures-channel", "futures-util", "http 1.4.0", "http-body 1.0.1", "hyper 1.8.1", + "ipnet", "libc", + "percent-encoding", "pin-project-lite", "socket2 0.6.3", "tokio", @@ -1503,6 +1516,16 @@ version = "2.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d98f6fed1fde3f8c21bc40a1abb88dd75e67924f9cffc3ef95607bad8017f8e2" +[[package]] +name = "iri-string" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c91338f0783edbd6195decb37bae672fd3b165faffb89bf7b9e6942f8b1a731a" +dependencies = [ + "memchr", + "serde", +] + [[package]] name = "is_terminal_polyfill" version = "1.70.2" @@ -1768,6 +1791,82 @@ version = "1.70.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" +[[package]] +name = "opentelemetry" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b84bcd6ae87133e903af7ef497404dda70c60d0ea14895fc8a5e6722754fc2a0" +dependencies = [ + "futures-core", + "futures-sink", + "js-sys", + "pin-project-lite", + "thiserror 2.0.18", + "tracing", +] + +[[package]] +name = "opentelemetry-http" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7a6d09a73194e6b66df7c8f1b680f156d916a1a942abf2de06823dd02b7855d" +dependencies = [ + "async-trait", + "bytes", + "http 1.4.0", + "opentelemetry", + "reqwest 0.12.28", +] + +[[package]] +name = "opentelemetry-otlp" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2366db2dca4d2ad033cad11e6ee42844fd727007af5ad04a1730f4cb8163bf" +dependencies = [ + "http 1.4.0", + "opentelemetry", + "opentelemetry-http", + "opentelemetry-proto", + "opentelemetry_sdk", + "prost", + "reqwest 0.12.28", + "thiserror 2.0.18", + "tokio", + "tonic", + "tracing", +] + +[[package]] +name = "opentelemetry-proto" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7175df06de5eaee9909d4805a3d07e28bb752c34cab57fa9cff549da596b30f" +dependencies = [ + "opentelemetry", + "opentelemetry_sdk", + "prost", + "tonic", + "tonic-prost", +] + +[[package]] +name = "opentelemetry_sdk" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e14ae4f5991976fd48df6d843de219ca6d31b01daaab2dad5af2badeded372bd" +dependencies = [ + "futures-channel", + "futures-executor", + "futures-util", + "opentelemetry", + "percent-encoding", + "rand 0.9.2", + "thiserror 2.0.18", + "tokio", + "tokio-stream", +] + [[package]] name = "option-ext" version = "0.2.0" @@ -2137,6 +2236,40 @@ dependencies = [ "winreg", ] +[[package]] +name = "reqwest" +version = "0.12.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eddd3ca559203180a307f12d114c268abf583f59b03cb906fd0b3ff8646c1147" +dependencies = [ + "base64 0.22.1", + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "http 1.4.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.8.1", + "hyper-util", + "js-sys", + "log", + "percent-encoding", + "pin-project-lite", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper 1.0.2", + "tokio", + "tower", + "tower-http", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + [[package]] name = "ring" version = "0.17.14" @@ -2332,6 +2465,19 @@ dependencies = [ "tracing", ] +[[package]] +name = "serde_html_form" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2f2d7ff8a2140333718bb329f5c40fc5f0865b84c426183ce14c97d2ab8154f" +dependencies = [ + "form_urlencoded", + "indexmap", + "itoa", + "ryu", + "serde_core", +] + [[package]] name = "serde_json" version = "1.0.149" @@ -2751,6 +2897,9 @@ name = "sync_wrapper" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] [[package]] name = "synstructure" @@ -3050,12 +3199,14 @@ dependencies = [ "http-body-util", "http-range-header", "httpdate", + "iri-string", "mime", "mime_guess", "percent-encoding", "pin-project-lite", "tokio", "tokio-util", + "tower", "tower-layer", "tower-service", "tracing", @@ -3117,6 +3268,22 @@ dependencies = [ "tracing-core", ] +[[package]] +name = "tracing-opentelemetry" +version = "0.32.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ac28f2d093c6c477eaa76b23525478f38de514fa9aeb1285738d4b97a9552fc" +dependencies = [ + "js-sys", + "opentelemetry", + "smallvec", + "tracing", + "tracing-core", + "tracing-log", + "tracing-subscriber", + "web-time", +] + [[package]] name = "tracing-subscriber" version = "0.3.22" @@ -3204,6 +3371,12 @@ dependencies = [ "serde", ] +[[package]] +name = "urlencoding" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" + [[package]] name = "utf8_iter" version = "1.0.4" @@ -3401,6 +3574,16 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + [[package]] name = "webpki-roots" version = "0.25.4" diff --git a/Cargo.toml b/Cargo.toml index 97f0ff6..1b02e01 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,7 +17,7 @@ tokio = { version = "1", features = ["full"] } tracing = "0.1" tracing-subscriber = { version = "0.3", features = ["env-filter"] } axum = { version = "0.8", features = ["macros"] } -axum-extra = { version = "0.10", features = ["cookie"] } +axum-extra = { version = "0.10", features = ["cookie", "form"] } minijinja = { version = "2", features = ["loader"] } sqlx = { version = "0.8", features = ["runtime-tokio", "tls-rustls", "postgres", "migrate", "uuid", "chrono"] } uuid = { version = "1", features = ["v4", "serde"] } @@ -31,3 +31,7 @@ tonic-prost = "0.14" async-trait = "0.1" rand = "0.9" time = "0.3" +opentelemetry = "0.31" +opentelemetry_sdk = { version = "0.31", features = ["rt-tokio"] } +opentelemetry-otlp = { version = "0.31", features = ["grpc-tonic"] } +tracing-opentelemetry = "0.32" diff --git a/crates/forage-core/src/auth/mod.rs b/crates/forage-core/src/auth/mod.rs index 2b53f90..8b92383 100644 --- a/crates/forage-core/src/auth/mod.rs +++ b/crates/forage-core/src/auth/mod.rs @@ -20,6 +20,14 @@ pub struct User { pub emails: Vec, } +/// Public user profile (no emails). +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct UserProfile { + pub user_id: String, + pub username: String, + pub created_at: Option, +} + #[derive(Debug, Clone, Serialize, Deserialize)] pub struct UserEmail { pub email: String, @@ -91,6 +99,12 @@ pub trait ForestAuth: Send + Sync { async fn get_user(&self, access_token: &str) -> Result; + async fn get_user_by_username( + &self, + access_token: &str, + username: &str, + ) -> Result; + async fn list_tokens( &self, access_token: &str, diff --git a/crates/forage-core/src/platform/mod.rs b/crates/forage-core/src/platform/mod.rs index 8fd33a8..3422942 100644 --- a/crates/forage-core/src/platform/mod.rs +++ b/crates/forage-core/src/platform/mod.rs @@ -69,6 +69,8 @@ pub struct ArtifactDestination { pub type_name: Option, #[serde(default)] pub type_version: Option, + #[serde(default)] + pub status: Option, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -79,6 +81,16 @@ pub struct OrgMember { pub joined_at: Option, } +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Environment { + pub id: String, + pub organisation: String, + pub name: String, + pub description: Option, + pub sort_order: i32, + pub created_at: String, +} + #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Destination { pub name: String, @@ -97,6 +109,201 @@ pub struct DestinationType { pub version: u64, } +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct DestinationState { + pub destination_id: String, + pub destination_name: String, + pub environment: String, + pub release_id: Option, + pub artifact_id: Option, + pub status: Option, + pub error_message: Option, + pub queued_at: Option, + pub completed_at: Option, + pub queue_position: Option, + #[serde(default)] + pub started_at: Option, +} + +/// Runtime status of a single pipeline stage. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PipelineRunStageState { + pub stage_id: String, + pub depends_on: Vec, + pub stage_type: String, // "deploy" or "wait" + pub status: String, // "PENDING", "RUNNING", "SUCCEEDED", "FAILED", "CANCELLED" + pub environment: Option, + pub duration_seconds: Option, + pub queued_at: Option, + pub started_at: Option, + pub completed_at: Option, + pub error_message: Option, + pub wait_until: Option, + #[serde(default)] + pub release_ids: Vec, +} + +/// Combined response from get_destination_states: destinations only. +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +pub struct DeploymentStates { + pub destinations: Vec, +} + +/// Full state of a release intent: pipeline stages + individual release steps. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ReleaseIntentState { + pub release_intent_id: String, + pub artifact_id: String, + pub project: String, + pub created_at: String, + pub stages: Vec, + pub steps: Vec, +} + +/// Status of an individual release step (deploy work item). +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ReleaseStepState { + pub release_id: String, + pub stage_id: Option, + pub destination_name: String, + pub environment: String, + pub status: String, + pub queued_at: Option, + pub assigned_at: Option, + pub started_at: Option, + pub completed_at: Option, + pub error_message: Option, +} + +// ── Triggers (auto-release triggers) ──────────────────────────────── + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Trigger { + pub id: String, + pub name: String, + pub enabled: bool, + pub branch_pattern: Option, + pub title_pattern: Option, + pub author_pattern: Option, + pub commit_message_pattern: Option, + pub source_type_pattern: Option, + pub target_environments: Vec, + pub target_destinations: Vec, + pub force_release: bool, + pub use_pipeline: bool, + pub created_at: String, + pub updated_at: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CreateTriggerInput { + pub name: String, + pub branch_pattern: Option, + pub title_pattern: Option, + pub author_pattern: Option, + pub commit_message_pattern: Option, + pub source_type_pattern: Option, + pub target_environments: Vec, + pub target_destinations: Vec, + pub force_release: bool, + pub use_pipeline: bool, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct UpdateTriggerInput { + pub enabled: Option, + pub branch_pattern: Option, + pub title_pattern: Option, + pub author_pattern: Option, + pub commit_message_pattern: Option, + pub source_type_pattern: Option, + pub target_environments: Vec, + pub target_destinations: Vec, + pub force_release: Option, + pub use_pipeline: Option, +} + +// ── Policies (deployment gating) ──────────────────────────────────── + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Policy { + pub id: String, + pub name: String, + pub enabled: bool, + pub policy_type: String, + pub config: PolicyConfig, + pub created_at: String, + pub updated_at: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub enum PolicyConfig { + SoakTime { + source_environment: String, + target_environment: String, + duration_seconds: i64, + }, + BranchRestriction { + target_environment: String, + branch_pattern: String, + }, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CreatePolicyInput { + pub name: String, + pub config: PolicyConfig, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct UpdatePolicyInput { + pub enabled: Option, + pub config: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PolicyEvaluation { + pub policy_name: String, + pub policy_type: String, + pub passed: bool, + pub reason: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PipelineStage { + pub id: String, + pub depends_on: Vec, + pub config: PipelineStageConfig, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub enum PipelineStageConfig { + Deploy { environment: String }, + Wait { duration_seconds: i64 }, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ReleasePipeline { + pub id: String, + pub name: String, + pub enabled: bool, + pub stages: Vec, + pub created_at: String, + pub updated_at: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CreateReleasePipelineInput { + pub name: String, + pub stages: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct UpdateReleasePipelineInput { + pub enabled: Option, + pub stages: Option>, +} + #[derive(Debug, Clone, thiserror::Error)] pub enum PlatformError { #[error("not authenticated")] @@ -175,11 +382,170 @@ pub trait ForestPlatform: Send + Sync { slug: &str, ) -> Result; + async fn list_environments( + &self, + access_token: &str, + organisation: &str, + ) -> Result, PlatformError>; + async fn list_destinations( &self, access_token: &str, organisation: &str, ) -> Result, PlatformError>; + + async fn create_environment( + &self, + access_token: &str, + organisation: &str, + name: &str, + description: Option<&str>, + sort_order: i32, + ) -> Result; + + async fn create_destination( + &self, + access_token: &str, + organisation: &str, + name: &str, + environment: &str, + metadata: &std::collections::HashMap, + dest_type: Option<&DestinationType>, + ) -> Result<(), PlatformError>; + + async fn update_destination( + &self, + access_token: &str, + name: &str, + metadata: &std::collections::HashMap, + ) -> Result<(), PlatformError>; + + async fn get_destination_states( + &self, + access_token: &str, + organisation: &str, + project: Option<&str>, + ) -> Result; + + async fn get_release_intent_states( + &self, + access_token: &str, + organisation: &str, + project: Option<&str>, + include_completed: bool, + ) -> Result, PlatformError>; + + async fn release_artifact( + &self, + access_token: &str, + artifact_id: &str, + destinations: &[String], + environments: &[String], + use_pipeline: bool, + ) -> Result<(), PlatformError>; + + async fn list_triggers( + &self, + access_token: &str, + organisation: &str, + project: &str, + ) -> Result, PlatformError>; + + async fn create_trigger( + &self, + access_token: &str, + organisation: &str, + project: &str, + input: &CreateTriggerInput, + ) -> Result; + + async fn update_trigger( + &self, + access_token: &str, + organisation: &str, + project: &str, + name: &str, + input: &UpdateTriggerInput, + ) -> Result; + + async fn delete_trigger( + &self, + access_token: &str, + organisation: &str, + project: &str, + name: &str, + ) -> Result<(), PlatformError>; + + async fn list_policies( + &self, + access_token: &str, + organisation: &str, + project: &str, + ) -> Result, PlatformError>; + + async fn create_policy( + &self, + access_token: &str, + organisation: &str, + project: &str, + input: &CreatePolicyInput, + ) -> Result; + + async fn update_policy( + &self, + access_token: &str, + organisation: &str, + project: &str, + name: &str, + input: &UpdatePolicyInput, + ) -> Result; + + async fn delete_policy( + &self, + access_token: &str, + organisation: &str, + project: &str, + name: &str, + ) -> Result<(), PlatformError>; + + async fn list_release_pipelines( + &self, + access_token: &str, + organisation: &str, + project: &str, + ) -> Result, PlatformError>; + + async fn create_release_pipeline( + &self, + access_token: &str, + organisation: &str, + project: &str, + input: &CreateReleasePipelineInput, + ) -> Result; + + async fn update_release_pipeline( + &self, + access_token: &str, + organisation: &str, + project: &str, + name: &str, + input: &UpdateReleasePipelineInput, + ) -> Result; + + async fn delete_release_pipeline( + &self, + access_token: &str, + organisation: &str, + project: &str, + name: &str, + ) -> Result<(), PlatformError>; + + /// Get the spec (forest.cue) content for an artifact. Returns empty string if no spec was uploaded. + async fn get_artifact_spec( + &self, + access_token: &str, + artifact_id: &str, + ) -> Result; } #[cfg(test)] diff --git a/crates/forage-grpc/src/grpc/forest/v1/forest.v1.rs b/crates/forage-grpc/src/grpc/forest/v1/forest.v1.rs index 0b528fa..8158643 100644 --- a/crates/forage-grpc/src/grpc/forest/v1/forest.v1.rs +++ b/crates/forage-grpc/src/grpc/forest/v1/forest.v1.rs @@ -1,5 +1,717 @@ // @generated // This file is @generated by prost-build. +// ─── Core types ────────────────────────────────────────────────────── + +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct App { + #[prost(string, tag="1")] + pub app_id: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub organisation_id: ::prost::alloc::string::String, + #[prost(string, tag="3")] + pub name: ::prost::alloc::string::String, + #[prost(string, tag="4")] + pub description: ::prost::alloc::string::String, + #[prost(string, repeated, tag="5")] + pub permissions: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + #[prost(bool, tag="6")] + pub suspended: bool, + #[prost(message, optional, tag="7")] + pub created_at: ::core::option::Option<::prost_types::Timestamp>, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct AppToken { + #[prost(string, tag="1")] + pub token_id: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub name: ::prost::alloc::string::String, + #[prost(message, optional, tag="3")] + pub expires_at: ::core::option::Option<::prost_types::Timestamp>, + #[prost(message, optional, tag="4")] + pub last_used: ::core::option::Option<::prost_types::Timestamp>, + #[prost(bool, tag="5")] + pub revoked: bool, + #[prost(message, optional, tag="6")] + pub created_at: ::core::option::Option<::prost_types::Timestamp>, +} +// ─── App lifecycle ─────────────────────────────────────────────────── + +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct CreateAppRequest { + #[prost(string, tag="1")] + pub organisation_id: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub name: ::prost::alloc::string::String, + #[prost(string, tag="3")] + pub description: ::prost::alloc::string::String, + #[prost(string, repeated, tag="4")] + pub permissions: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct CreateAppResponse { + #[prost(message, optional, tag="1")] + pub app: ::core::option::Option, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetAppRequest { + #[prost(string, tag="1")] + pub app_id: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetAppResponse { + #[prost(message, optional, tag="1")] + pub app: ::core::option::Option, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct ListAppsRequest { + #[prost(string, tag="1")] + pub organisation_id: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ListAppsResponse { + #[prost(message, repeated, tag="1")] + pub apps: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct DeleteAppRequest { + #[prost(string, tag="1")] + pub app_id: ::prost::alloc::string::String, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct DeleteAppResponse { +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct SuspendAppRequest { + #[prost(string, tag="1")] + pub app_id: ::prost::alloc::string::String, + #[prost(bool, tag="2")] + pub suspended: bool, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct SuspendAppResponse { +} +// ─── App tokens ────────────────────────────────────────────────────── + +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct CreateAppTokenRequest { + #[prost(string, tag="1")] + pub app_id: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub name: ::prost::alloc::string::String, + /// 0 = no expiry + #[prost(int64, tag="3")] + pub expires_in_seconds: i64, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct CreateAppTokenResponse { + #[prost(message, optional, tag="1")] + pub token: ::core::option::Option, + /// only returned on creation + #[prost(string, tag="2")] + pub raw_token: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct ListAppTokensRequest { + #[prost(string, tag="1")] + pub app_id: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ListAppTokensResponse { + #[prost(message, repeated, tag="1")] + pub tokens: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct RevokeAppTokenRequest { + #[prost(string, tag="1")] + pub token_id: ::prost::alloc::string::String, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct RevokeAppTokenResponse { +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct BeginUploadArtifactRequest { +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct BeginUploadArtifactResponse { + #[prost(string, tag="1")] + pub upload_id: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct UploadArtifactRequest { + #[prost(string, tag="1")] + pub upload_id: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub env: ::prost::alloc::string::String, + #[prost(string, tag="3")] + pub destination: ::prost::alloc::string::String, + #[prost(string, tag="4")] + pub file_name: ::prost::alloc::string::String, + #[prost(string, tag="5")] + pub file_content: ::prost::alloc::string::String, + /// Category of the file: "deployment" (default), "spec", or "attachment" + #[prost(string, tag="6")] + pub category: ::prost::alloc::string::String, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct UploadArtifactResponse { +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct CommitArtifactRequest { + #[prost(string, tag="1")] + pub upload_id: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct CommitArtifactResponse { + #[prost(string, tag="1")] + pub artifact_id: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetArtifactFilesRequest { + #[prost(string, tag="1")] + pub artifact_id: ::prost::alloc::string::String, + #[prost(string, optional, tag="2")] + pub category: ::core::option::Option<::prost::alloc::string::String>, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct GetArtifactFilesResponse { + #[prost(message, repeated, tag="1")] + pub files: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct ArtifactFile { + #[prost(string, tag="1")] + pub file_name: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub category: ::prost::alloc::string::String, + #[prost(string, tag="3")] + pub env: ::prost::alloc::string::String, + #[prost(string, tag="4")] + pub destination: ::prost::alloc::string::String, + #[prost(string, tag="5")] + pub content: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetArtifactSpecRequest { + #[prost(string, tag="1")] + pub artifact_id: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetArtifactSpecResponse { + #[prost(string, tag="1")] + pub content: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateDestinationRequest { + #[prost(string, tag="1")] + pub name: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub environment: ::prost::alloc::string::String, + #[prost(map="string, string", tag="3")] + pub metadata: ::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>, + #[prost(message, optional, tag="4")] + pub r#type: ::core::option::Option, + #[prost(string, tag="5")] + pub organisation: ::prost::alloc::string::String, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct CreateDestinationResponse { +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct UpdateDestinationRequest { + #[prost(string, tag="1")] + pub name: ::prost::alloc::string::String, + #[prost(map="string, string", tag="2")] + pub metadata: ::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct UpdateDestinationResponse { +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct DeleteDestinationRequest { + #[prost(string, tag="1")] + pub name: ::prost::alloc::string::String, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct DeleteDestinationResponse { +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetDestinationsRequest { + #[prost(string, tag="1")] + pub organisation: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct GetDestinationsResponse { + #[prost(message, repeated, tag="1")] + pub destinations: ::prost::alloc::vec::Vec, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct ListDestinationTypesRequest { +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ListDestinationTypesResponse { + #[prost(message, repeated, tag="1")] + pub types: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Destination { + #[prost(string, tag="1")] + pub name: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub environment: ::prost::alloc::string::String, + #[prost(map="string, string", tag="3")] + pub metadata: ::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>, + #[prost(message, optional, tag="4")] + pub r#type: ::core::option::Option, + #[prost(string, tag="5")] + pub organisation: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct DestinationType { + #[prost(string, tag="1")] + pub organisation: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub name: ::prost::alloc::string::String, + #[prost(uint64, tag="3")] + pub version: u64, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct Environment { + #[prost(string, tag="1")] + pub id: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub organisation: ::prost::alloc::string::String, + #[prost(string, tag="3")] + pub name: ::prost::alloc::string::String, + #[prost(string, optional, tag="4")] + pub description: ::core::option::Option<::prost::alloc::string::String>, + #[prost(int32, tag="5")] + pub sort_order: i32, + #[prost(string, tag="6")] + pub created_at: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct CreateEnvironmentRequest { + #[prost(string, tag="1")] + pub organisation: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub name: ::prost::alloc::string::String, + #[prost(string, optional, tag="3")] + pub description: ::core::option::Option<::prost::alloc::string::String>, + #[prost(int32, tag="4")] + pub sort_order: i32, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct CreateEnvironmentResponse { + #[prost(message, optional, tag="1")] + pub environment: ::core::option::Option, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetEnvironmentRequest { + #[prost(oneof="get_environment_request::Identifier", tags="1, 2")] + pub identifier: ::core::option::Option, +} +/// Nested message and enum types in `GetEnvironmentRequest`. +pub mod get_environment_request { + #[derive(Clone, PartialEq, Eq, Hash, ::prost::Oneof)] + pub enum Identifier { + #[prost(string, tag="1")] + Id(::prost::alloc::string::String), + #[prost(message, tag="2")] + Lookup(super::EnvironmentLookup), + } +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct EnvironmentLookup { + #[prost(string, tag="1")] + pub organisation: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub name: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetEnvironmentResponse { + #[prost(message, optional, tag="1")] + pub environment: ::core::option::Option, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct ListEnvironmentsRequest { + #[prost(string, tag="1")] + pub organisation: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ListEnvironmentsResponse { + #[prost(message, repeated, tag="1")] + pub environments: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct UpdateEnvironmentRequest { + #[prost(string, tag="1")] + pub id: ::prost::alloc::string::String, + #[prost(string, optional, tag="2")] + pub description: ::core::option::Option<::prost::alloc::string::String>, + #[prost(int32, optional, tag="3")] + pub sort_order: ::core::option::Option, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct UpdateEnvironmentResponse { + #[prost(message, optional, tag="1")] + pub environment: ::core::option::Option, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct DeleteEnvironmentRequest { + #[prost(string, tag="1")] + pub id: ::prost::alloc::string::String, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct DeleteEnvironmentResponse { +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct SubscribeEventsRequest { + #[prost(string, tag="1")] + pub organisation: ::prost::alloc::string::String, + /// optional — empty means all projects in org + #[prost(string, tag="2")] + pub project: ::prost::alloc::string::String, + /// optional filter: "release", "destination", etc. + #[prost(string, repeated, tag="3")] + pub resource_types: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + /// optional filter: "created", "updated", etc. + #[prost(string, repeated, tag="4")] + pub actions: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + /// 0 = latest only, >0 = replay from that sequence + #[prost(int64, tag="5")] + pub since_sequence: i64, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct SubscribeDurableRequest { + #[prost(string, tag="1")] + pub organisation: ::prost::alloc::string::String, + /// the registered subscription name + #[prost(string, tag="2")] + pub subscription_name: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct AcknowledgeEventsRequest { + #[prost(string, tag="1")] + pub organisation: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub subscription_name: ::prost::alloc::string::String, + /// advance cursor to this sequence + #[prost(int64, tag="3")] + pub sequence: i64, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct AcknowledgeEventsResponse { + /// the new cursor value + #[prost(int64, tag="1")] + pub cursor: i64, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct OrgEvent { + /// monotonic cursor — client stores this for reconnect + #[prost(int64, tag="1")] + pub sequence: i64, + /// UUID, dedup key + #[prost(string, tag="2")] + pub event_id: ::prost::alloc::string::String, + /// RFC 3339 + #[prost(string, tag="3")] + pub timestamp: ::prost::alloc::string::String, + #[prost(string, tag="4")] + pub organisation: ::prost::alloc::string::String, + /// empty for org-level events + #[prost(string, tag="5")] + pub project: ::prost::alloc::string::String, + /// "release", "destination", "environment", "pipeline", "artifact", "policy", "app", "organisation" + #[prost(string, tag="6")] + pub resource_type: ::prost::alloc::string::String, + /// "created", "updated", "deleted", "status_changed" + #[prost(string, tag="7")] + pub action: ::prost::alloc::string::String, + /// ID of the changed resource + #[prost(string, tag="8")] + pub resource_id: ::prost::alloc::string::String, + /// lightweight context (e.g. "status" → "SUCCEEDED") + #[prost(map="string, string", tag="9")] + pub metadata: ::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct EventSubscription { + #[prost(string, tag="1")] + pub id: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub organisation: ::prost::alloc::string::String, + #[prost(string, tag="3")] + pub name: ::prost::alloc::string::String, + #[prost(string, repeated, tag="4")] + pub resource_types: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + #[prost(string, repeated, tag="5")] + pub actions: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + #[prost(string, repeated, tag="6")] + pub projects: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + /// "active", "paused" + #[prost(string, tag="7")] + pub status: ::prost::alloc::string::String, + /// last acknowledged sequence + #[prost(int64, tag="8")] + pub cursor: i64, + #[prost(string, tag="9")] + pub created_at: ::prost::alloc::string::String, + #[prost(string, tag="10")] + pub updated_at: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct CreateEventSubscriptionRequest { + #[prost(string, tag="1")] + pub organisation: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub name: ::prost::alloc::string::String, + /// empty = all + #[prost(string, repeated, tag="3")] + pub resource_types: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + /// empty = all + #[prost(string, repeated, tag="4")] + pub actions: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + /// empty = all projects in org + #[prost(string, repeated, tag="5")] + pub projects: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct CreateEventSubscriptionResponse { + #[prost(message, optional, tag="1")] + pub subscription: ::core::option::Option, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct UpdateEventSubscriptionRequest { + #[prost(string, tag="1")] + pub organisation: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub name: ::prost::alloc::string::String, + /// "active" or "paused" + #[prost(string, optional, tag="3")] + pub status: ::core::option::Option<::prost::alloc::string::String>, + /// To update filters, set update_filters = true and provide new values. + /// Empty arrays mean "all" (no filter). + #[prost(bool, tag="4")] + pub update_filters: bool, + #[prost(string, repeated, tag="5")] + pub resource_types: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + #[prost(string, repeated, tag="6")] + pub actions: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + #[prost(string, repeated, tag="7")] + pub projects: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct UpdateEventSubscriptionResponse { + #[prost(message, optional, tag="1")] + pub subscription: ::core::option::Option, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct DeleteEventSubscriptionRequest { + #[prost(string, tag="1")] + pub organisation: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub name: ::prost::alloc::string::String, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct DeleteEventSubscriptionResponse { +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct ListEventSubscriptionsRequest { + #[prost(string, tag="1")] + pub organisation: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ListEventSubscriptionsResponse { + #[prost(message, repeated, tag="1")] + pub subscriptions: ::prost::alloc::vec::Vec, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetStatusRequest { +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetStatusResponse { +} +/// Rich context about the release that triggered the notification. +/// Integrations decide which fields to use. +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct ReleaseContext { + #[prost(string, tag="1")] + pub slug: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub organisation: ::prost::alloc::string::String, + #[prost(string, tag="3")] + pub project: ::prost::alloc::string::String, + #[prost(string, tag="4")] + pub artifact_id: ::prost::alloc::string::String, + #[prost(string, tag="5")] + pub release_intent_id: ::prost::alloc::string::String, + #[prost(string, tag="6")] + pub destination: ::prost::alloc::string::String, + #[prost(string, tag="7")] + pub environment: ::prost::alloc::string::String, + /// Source info + #[prost(string, tag="8")] + pub source_username: ::prost::alloc::string::String, + #[prost(string, tag="9")] + pub source_email: ::prost::alloc::string::String, + /// Git ref + #[prost(string, tag="10")] + pub commit_sha: ::prost::alloc::string::String, + #[prost(string, tag="11")] + pub commit_branch: ::prost::alloc::string::String, + /// Artifact context + #[prost(string, tag="12")] + pub context_title: ::prost::alloc::string::String, + #[prost(string, tag="13")] + pub context_description: ::prost::alloc::string::String, + #[prost(string, tag="14")] + pub context_web: ::prost::alloc::string::String, + /// Error info (populated on failure) + #[prost(string, tag="15")] + pub error_message: ::prost::alloc::string::String, + /// Number of destinations involved + #[prost(int32, tag="16")] + pub destination_count: i32, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct Notification { + #[prost(string, tag="1")] + pub id: ::prost::alloc::string::String, + #[prost(enumeration="NotificationType", tag="2")] + pub notification_type: i32, + #[prost(string, tag="3")] + pub title: ::prost::alloc::string::String, + #[prost(string, tag="4")] + pub body: ::prost::alloc::string::String, + #[prost(string, tag="5")] + pub organisation: ::prost::alloc::string::String, + #[prost(string, tag="6")] + pub project: ::prost::alloc::string::String, + #[prost(message, optional, tag="7")] + pub release_context: ::core::option::Option, + #[prost(string, tag="8")] + pub created_at: ::prost::alloc::string::String, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct NotificationPreference { + #[prost(enumeration="NotificationType", tag="1")] + pub notification_type: i32, + #[prost(enumeration="NotificationChannel", tag="2")] + pub channel: i32, + #[prost(bool, tag="3")] + pub enabled: bool, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetNotificationPreferencesRequest { +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct GetNotificationPreferencesResponse { + #[prost(message, repeated, tag="1")] + pub preferences: ::prost::alloc::vec::Vec, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct SetNotificationPreferenceRequest { + #[prost(enumeration="NotificationType", tag="1")] + pub notification_type: i32, + #[prost(enumeration="NotificationChannel", tag="2")] + pub channel: i32, + #[prost(bool, tag="3")] + pub enabled: bool, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct SetNotificationPreferenceResponse { + #[prost(message, optional, tag="1")] + pub preference: ::core::option::Option, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct ListenNotificationsRequest { + #[prost(string, optional, tag="1")] + pub organisation: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="2")] + pub project: ::core::option::Option<::prost::alloc::string::String>, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct ListNotificationsRequest { + #[prost(int32, tag="1")] + pub page_size: i32, + #[prost(string, tag="2")] + pub page_token: ::prost::alloc::string::String, + #[prost(string, optional, tag="3")] + pub organisation: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="4")] + pub project: ::core::option::Option<::prost::alloc::string::String>, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ListNotificationsResponse { + #[prost(message, repeated, tag="1")] + pub notifications: ::prost::alloc::vec::Vec, + #[prost(string, tag="2")] + pub next_page_token: ::prost::alloc::string::String, +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum NotificationType { + Unspecified = 0, + ReleaseAnnotated = 1, + ReleaseStarted = 2, + ReleaseSucceeded = 3, + ReleaseFailed = 4, +} +impl NotificationType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Unspecified => "NOTIFICATION_TYPE_UNSPECIFIED", + Self::ReleaseAnnotated => "NOTIFICATION_TYPE_RELEASE_ANNOTATED", + Self::ReleaseStarted => "NOTIFICATION_TYPE_RELEASE_STARTED", + Self::ReleaseSucceeded => "NOTIFICATION_TYPE_RELEASE_SUCCEEDED", + Self::ReleaseFailed => "NOTIFICATION_TYPE_RELEASE_FAILED", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "NOTIFICATION_TYPE_UNSPECIFIED" => Some(Self::Unspecified), + "NOTIFICATION_TYPE_RELEASE_ANNOTATED" => Some(Self::ReleaseAnnotated), + "NOTIFICATION_TYPE_RELEASE_STARTED" => Some(Self::ReleaseStarted), + "NOTIFICATION_TYPE_RELEASE_SUCCEEDED" => Some(Self::ReleaseSucceeded), + "NOTIFICATION_TYPE_RELEASE_FAILED" => Some(Self::ReleaseFailed), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum NotificationChannel { + Unspecified = 0, + Cli = 1, + Slack = 2, +} +impl NotificationChannel { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Unspecified => "NOTIFICATION_CHANNEL_UNSPECIFIED", + Self::Cli => "NOTIFICATION_CHANNEL_CLI", + Self::Slack => "NOTIFICATION_CHANNEL_SLACK", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "NOTIFICATION_CHANNEL_UNSPECIFIED" => Some(Self::Unspecified), + "NOTIFICATION_CHANNEL_CLI" => Some(Self::Cli), + "NOTIFICATION_CHANNEL_SLACK" => Some(Self::Slack), + _ => None, + } + } +} #[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] pub struct Organisation { #[prost(string, tag="1")] @@ -188,6 +900,12 @@ pub struct ReleaseRequest { pub destinations: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, #[prost(string, repeated, tag="3")] pub environments: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + #[prost(bool, tag="4")] + pub force: bool, + /// When true, use the project's release pipeline (DAG) instead of + /// deploying directly to the specified destinations/environments. + #[prost(bool, tag="5")] + pub use_pipeline: bool, } #[derive(Clone, PartialEq, ::prost::Message)] pub struct ReleaseResponse { @@ -211,7 +929,7 @@ pub struct WaitReleaseRequest { } #[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] pub struct WaitReleaseEvent { - #[prost(oneof="wait_release_event::Event", tags="1, 2")] + #[prost(oneof="wait_release_event::Event", tags="1, 2, 3")] pub event: ::core::option::Option, } /// Nested message and enum types in `WaitReleaseEvent`. @@ -222,8 +940,32 @@ pub mod wait_release_event { StatusUpdate(super::ReleaseStatusUpdate), #[prost(message, tag="2")] LogLine(super::ReleaseLogLine), + #[prost(message, tag="3")] + StageUpdate(super::PipelineStageUpdate), } } +/// Streamed in WaitRelease for pipeline releases: reports stage status changes. +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct PipelineStageUpdate { + #[prost(string, tag="1")] + pub stage_id: ::prost::alloc::string::String, + /// "deploy", "wait" + #[prost(string, tag="2")] + pub stage_type: ::prost::alloc::string::String, + /// PENDING, ACTIVE, SUCCEEDED, FAILED, CANCELLED + #[prost(string, tag="3")] + pub status: ::prost::alloc::string::String, + #[prost(string, optional, tag="4")] + pub queued_at: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="5")] + pub started_at: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="6")] + pub completed_at: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="7")] + pub wait_until: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="8")] + pub error_message: ::core::option::Option<::prost::alloc::string::String>, +} #[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] pub struct ReleaseStatusUpdate { #[prost(string, tag="1")] @@ -269,6 +1011,251 @@ pub struct GetProjectsResponse { pub projects: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, } #[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct CreateProjectRequest { + #[prost(string, tag="1")] + pub organisation: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub project: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct CreateProjectResponse { + #[prost(message, optional, tag="1")] + pub project: ::core::option::Option, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetReleasesByActorRequest { + /// user_id or app_id + #[prost(string, tag="1")] + pub actor_id: ::prost::alloc::string::String, + /// "user" or "app" + #[prost(string, tag="2")] + pub actor_type: ::prost::alloc::string::String, + #[prost(int32, tag="3")] + pub page_size: i32, + #[prost(string, tag="4")] + pub page_token: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct GetReleasesByActorResponse { + #[prost(message, repeated, tag="1")] + pub releases: ::prost::alloc::vec::Vec, + #[prost(string, tag="2")] + pub next_page_token: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ReleaseIntentSummary { + #[prost(string, tag="1")] + pub release_intent_id: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub artifact_id: ::prost::alloc::string::String, + #[prost(message, optional, tag="3")] + pub project: ::core::option::Option, + #[prost(message, repeated, tag="4")] + pub destinations: ::prost::alloc::vec::Vec, + #[prost(string, tag="5")] + pub created_at: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct ReleaseDestinationStatus { + #[prost(string, tag="1")] + pub destination: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub environment: ::prost::alloc::string::String, + #[prost(string, tag="3")] + pub status: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetDestinationStatesRequest { + #[prost(string, tag="1")] + pub organisation: ::prost::alloc::string::String, + #[prost(string, optional, tag="2")] + pub project: ::core::option::Option<::prost::alloc::string::String>, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct GetDestinationStatesResponse { + #[prost(message, repeated, tag="1")] + pub destinations: ::prost::alloc::vec::Vec, + /// Active pipeline runs affecting these destinations (if any). + #[prost(message, repeated, tag="2")] + pub pipeline_runs: ::prost::alloc::vec::Vec, +} +// ── Release intent states (release-centric view) ───────────────────── + +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetReleaseIntentStatesRequest { + #[prost(string, tag="1")] + pub organisation: ::prost::alloc::string::String, + #[prost(string, optional, tag="2")] + pub project: ::core::option::Option<::prost::alloc::string::String>, + /// When true, also include recently completed release intents. + #[prost(bool, tag="3")] + pub include_completed: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct GetReleaseIntentStatesResponse { + #[prost(message, repeated, tag="1")] + pub release_intents: ::prost::alloc::vec::Vec, +} +/// Full state of a release intent: pipeline stages + individual release steps. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ReleaseIntentState { + #[prost(string, tag="1")] + pub release_intent_id: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub artifact_id: ::prost::alloc::string::String, + #[prost(string, tag="3")] + pub project: ::prost::alloc::string::String, + #[prost(string, tag="4")] + pub created_at: ::prost::alloc::string::String, + /// Pipeline stages (empty for non-pipeline releases). + #[prost(message, repeated, tag="5")] + pub stages: ::prost::alloc::vec::Vec, + /// All release_states rows for this intent (deploy steps). + #[prost(message, repeated, tag="6")] + pub steps: ::prost::alloc::vec::Vec, +} +/// Status of a single pipeline stage (saga coordinator view). +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct PipelineStageState { + #[prost(string, tag="1")] + pub stage_id: ::prost::alloc::string::String, + #[prost(string, repeated, tag="2")] + pub depends_on: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + #[prost(enumeration="PipelineRunStageType", tag="3")] + pub stage_type: i32, + #[prost(enumeration="PipelineRunStageStatus", tag="4")] + pub status: i32, + /// Consistent timestamps for all stage types. + #[prost(string, optional, tag="5")] + pub queued_at: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="6")] + pub started_at: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="7")] + pub completed_at: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="8")] + pub error_message: ::core::option::Option<::prost::alloc::string::String>, + /// Type-specific context. + /// + /// deploy stages + #[prost(string, optional, tag="9")] + pub environment: ::core::option::Option<::prost::alloc::string::String>, + /// wait stages + #[prost(int64, optional, tag="10")] + pub duration_seconds: ::core::option::Option, + /// wait stages + #[prost(string, optional, tag="11")] + pub wait_until: ::core::option::Option<::prost::alloc::string::String>, + /// deploy stages: individual release IDs + #[prost(string, repeated, tag="12")] + pub release_ids: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, +} +/// Status of a single release step (release_states row). +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct ReleaseStepState { + #[prost(string, tag="1")] + pub release_id: ::prost::alloc::string::String, + #[prost(string, optional, tag="2")] + pub stage_id: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, tag="3")] + pub destination_name: ::prost::alloc::string::String, + #[prost(string, tag="4")] + pub environment: ::prost::alloc::string::String, + #[prost(string, tag="5")] + pub status: ::prost::alloc::string::String, + #[prost(string, optional, tag="6")] + pub queued_at: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="7")] + pub assigned_at: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="8")] + pub started_at: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="9")] + pub completed_at: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="10")] + pub error_message: ::core::option::Option<::prost::alloc::string::String>, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct DestinationState { + #[prost(string, tag="1")] + pub destination_id: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub destination_name: ::prost::alloc::string::String, + #[prost(string, tag="3")] + pub environment: ::prost::alloc::string::String, + #[prost(string, optional, tag="4")] + pub release_id: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="5")] + pub artifact_id: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="6")] + pub status: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="7")] + pub error_message: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="8")] + pub queued_at: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="9")] + pub completed_at: ::core::option::Option<::prost::alloc::string::String>, + #[prost(int32, optional, tag="10")] + pub queue_position: ::core::option::Option, + /// Pipeline context: set when this release was created by a pipeline stage. + #[prost(string, optional, tag="11")] + pub release_intent_id: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="12")] + pub stage_id: ::core::option::Option<::prost::alloc::string::String>, + /// When a runner was assigned to this release. + #[prost(string, optional, tag="13")] + pub assigned_at: ::core::option::Option<::prost::alloc::string::String>, + /// When the runner actually started executing. + #[prost(string, optional, tag="14")] + pub started_at: ::core::option::Option<::prost::alloc::string::String>, +} +// ── Pipeline run progress ──────────────────────────────────────────── + +/// Snapshot of an active (or recently completed) pipeline run. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PipelineRunState { + #[prost(string, tag="1")] + pub release_intent_id: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub artifact_id: ::prost::alloc::string::String, + #[prost(string, tag="3")] + pub created_at: ::prost::alloc::string::String, + #[prost(message, repeated, tag="4")] + pub stages: ::prost::alloc::vec::Vec, +} +/// Status of a single stage within a pipeline run. +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct PipelineRunStage { + #[prost(string, tag="1")] + pub stage_id: ::prost::alloc::string::String, + #[prost(string, repeated, tag="2")] + pub depends_on: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + #[prost(enumeration="PipelineRunStageType", tag="3")] + pub stage_type: i32, + #[prost(enumeration="PipelineRunStageStatus", tag="4")] + pub status: i32, + /// Type-specific context + /// + /// deploy stages + #[prost(string, optional, tag="5")] + pub environment: ::core::option::Option<::prost::alloc::string::String>, + /// wait stages + #[prost(int64, optional, tag="6")] + pub duration_seconds: ::core::option::Option, + /// when dependencies were met + #[prost(string, optional, tag="7")] + pub queued_at: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="8")] + pub started_at: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="9")] + pub completed_at: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="10")] + pub error_message: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="11")] + pub wait_until: ::core::option::Option<::prost::alloc::string::String>, + /// deploy stages: individual release IDs + #[prost(string, repeated, tag="12")] + pub release_ids: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] pub struct Source { #[prost(string, optional, tag="1")] pub user: ::core::option::Option<::prost::alloc::string::String>, @@ -310,6 +1297,8 @@ pub struct Artifact { pub destinations: ::prost::alloc::vec::Vec, #[prost(string, tag="9")] pub created_at: ::prost::alloc::string::String, + #[prost(message, optional, tag="10")] + pub r#ref: ::core::option::Option, } #[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] pub struct ArtifactDestination { @@ -323,6 +1312,8 @@ pub struct ArtifactDestination { pub type_name: ::prost::alloc::string::String, #[prost(uint64, tag="5")] pub type_version: u64, + #[prost(string, tag="6")] + pub status: ::prost::alloc::string::String, } #[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] pub struct Project { @@ -378,6 +1369,922 @@ impl LogChannel { } } } +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum PipelineRunStageType { + Unspecified = 0, + Deploy = 1, + Wait = 2, +} +impl PipelineRunStageType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Unspecified => "PIPELINE_RUN_STAGE_TYPE_UNSPECIFIED", + Self::Deploy => "PIPELINE_RUN_STAGE_TYPE_DEPLOY", + Self::Wait => "PIPELINE_RUN_STAGE_TYPE_WAIT", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "PIPELINE_RUN_STAGE_TYPE_UNSPECIFIED" => Some(Self::Unspecified), + "PIPELINE_RUN_STAGE_TYPE_DEPLOY" => Some(Self::Deploy), + "PIPELINE_RUN_STAGE_TYPE_WAIT" => Some(Self::Wait), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum PipelineRunStageStatus { + Unspecified = 0, + Pending = 1, + Active = 2, + Succeeded = 3, + Failed = 4, + Cancelled = 5, +} +impl PipelineRunStageStatus { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Unspecified => "PIPELINE_RUN_STAGE_STATUS_UNSPECIFIED", + Self::Pending => "PIPELINE_RUN_STAGE_STATUS_PENDING", + Self::Active => "PIPELINE_RUN_STAGE_STATUS_ACTIVE", + Self::Succeeded => "PIPELINE_RUN_STAGE_STATUS_SUCCEEDED", + Self::Failed => "PIPELINE_RUN_STAGE_STATUS_FAILED", + Self::Cancelled => "PIPELINE_RUN_STAGE_STATUS_CANCELLED", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "PIPELINE_RUN_STAGE_STATUS_UNSPECIFIED" => Some(Self::Unspecified), + "PIPELINE_RUN_STAGE_STATUS_PENDING" => Some(Self::Pending), + "PIPELINE_RUN_STAGE_STATUS_ACTIVE" => Some(Self::Active), + "PIPELINE_RUN_STAGE_STATUS_SUCCEEDED" => Some(Self::Succeeded), + "PIPELINE_RUN_STAGE_STATUS_FAILED" => Some(Self::Failed), + "PIPELINE_RUN_STAGE_STATUS_CANCELLED" => Some(Self::Cancelled), + _ => None, + } + } +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct SoakTimeConfig { + /// Environment that must have a successful deploy before target is allowed + #[prost(string, tag="1")] + pub source_environment: ::prost::alloc::string::String, + /// Environment that is gated by this policy + #[prost(string, tag="2")] + pub target_environment: ::prost::alloc::string::String, + /// Seconds to wait after source environment succeeds + #[prost(int64, tag="3")] + pub duration_seconds: i64, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct BranchRestrictionConfig { + /// Environment that is restricted + #[prost(string, tag="1")] + pub target_environment: ::prost::alloc::string::String, + /// Regex that source branch must match + #[prost(string, tag="2")] + pub branch_pattern: ::prost::alloc::string::String, +} +// ── Policy resource ───────────────────────────────────────────────── + +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct Policy { + #[prost(string, tag="1")] + pub id: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub name: ::prost::alloc::string::String, + #[prost(bool, tag="3")] + pub enabled: bool, + #[prost(enumeration="PolicyType", tag="4")] + pub policy_type: i32, + #[prost(string, tag="20")] + pub created_at: ::prost::alloc::string::String, + #[prost(string, tag="21")] + pub updated_at: ::prost::alloc::string::String, + #[prost(oneof="policy::Config", tags="10, 11")] + pub config: ::core::option::Option, +} +/// Nested message and enum types in `Policy`. +pub mod policy { + #[derive(Clone, PartialEq, Eq, Hash, ::prost::Oneof)] + pub enum Config { + #[prost(message, tag="10")] + SoakTime(super::SoakTimeConfig), + #[prost(message, tag="11")] + BranchRestriction(super::BranchRestrictionConfig), + } +} +// ── Policy evaluation result ──────────────────────────────────────── + +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct PolicyEvaluation { + #[prost(string, tag="1")] + pub policy_name: ::prost::alloc::string::String, + #[prost(enumeration="PolicyType", tag="2")] + pub policy_type: i32, + #[prost(bool, tag="3")] + pub passed: bool, + /// Human-readable explanation when blocked + #[prost(string, tag="4")] + pub reason: ::prost::alloc::string::String, +} +// ── CRUD messages ─────────────────────────────────────────────────── + +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct CreatePolicyRequest { + #[prost(message, optional, tag="1")] + pub project: ::core::option::Option, + #[prost(string, tag="2")] + pub name: ::prost::alloc::string::String, + #[prost(enumeration="PolicyType", tag="3")] + pub policy_type: i32, + #[prost(oneof="create_policy_request::Config", tags="10, 11")] + pub config: ::core::option::Option, +} +/// Nested message and enum types in `CreatePolicyRequest`. +pub mod create_policy_request { + #[derive(Clone, PartialEq, Eq, Hash, ::prost::Oneof)] + pub enum Config { + #[prost(message, tag="10")] + SoakTime(super::SoakTimeConfig), + #[prost(message, tag="11")] + BranchRestriction(super::BranchRestrictionConfig), + } +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct CreatePolicyResponse { + #[prost(message, optional, tag="1")] + pub policy: ::core::option::Option, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct UpdatePolicyRequest { + #[prost(message, optional, tag="1")] + pub project: ::core::option::Option, + #[prost(string, tag="2")] + pub name: ::prost::alloc::string::String, + #[prost(bool, optional, tag="3")] + pub enabled: ::core::option::Option, + #[prost(oneof="update_policy_request::Config", tags="10, 11")] + pub config: ::core::option::Option, +} +/// Nested message and enum types in `UpdatePolicyRequest`. +pub mod update_policy_request { + #[derive(Clone, PartialEq, Eq, Hash, ::prost::Oneof)] + pub enum Config { + #[prost(message, tag="10")] + SoakTime(super::SoakTimeConfig), + #[prost(message, tag="11")] + BranchRestriction(super::BranchRestrictionConfig), + } +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct UpdatePolicyResponse { + #[prost(message, optional, tag="1")] + pub policy: ::core::option::Option, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct DeletePolicyRequest { + #[prost(message, optional, tag="1")] + pub project: ::core::option::Option, + #[prost(string, tag="2")] + pub name: ::prost::alloc::string::String, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct DeletePolicyResponse { +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct ListPoliciesRequest { + #[prost(message, optional, tag="1")] + pub project: ::core::option::Option, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ListPoliciesResponse { + #[prost(message, repeated, tag="1")] + pub policies: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct EvaluatePoliciesRequest { + #[prost(message, optional, tag="1")] + pub project: ::core::option::Option, + #[prost(string, tag="2")] + pub target_environment: ::prost::alloc::string::String, + /// For branch restriction checks + #[prost(string, optional, tag="3")] + pub branch: ::core::option::Option<::prost::alloc::string::String>, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct EvaluatePoliciesResponse { + #[prost(message, repeated, tag="1")] + pub evaluations: ::prost::alloc::vec::Vec, + #[prost(bool, tag="2")] + pub all_passed: bool, +} +// ── Policy types ──────────────────────────────────────────────────── + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum PolicyType { + Unspecified = 0, + SoakTime = 1, + BranchRestriction = 2, +} +impl PolicyType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Unspecified => "POLICY_TYPE_UNSPECIFIED", + Self::SoakTime => "POLICY_TYPE_SOAK_TIME", + Self::BranchRestriction => "POLICY_TYPE_BRANCH_RESTRICTION", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "POLICY_TYPE_UNSPECIFIED" => Some(Self::Unspecified), + "POLICY_TYPE_SOAK_TIME" => Some(Self::SoakTime), + "POLICY_TYPE_BRANCH_RESTRICTION" => Some(Self::BranchRestriction), + _ => None, + } + } +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetComponentsRequest { +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetComponentsResponse { +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetComponentRequest { + #[prost(string, tag="1")] + pub name: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub organisation: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetComponentResponse { + #[prost(message, optional, tag="1")] + pub component: ::core::option::Option, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct Component { + #[prost(string, tag="1")] + pub id: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub version: ::prost::alloc::string::String, +} +/// ComponentVersion +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetComponentVersionRequest { + #[prost(string, tag="1")] + pub name: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub organisation: ::prost::alloc::string::String, + #[prost(string, tag="3")] + pub version: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetComponentVersionResponse { + #[prost(message, optional, tag="1")] + pub component: ::core::option::Option, +} +// BeginUpload + +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct BeginUploadRequest { + #[prost(string, tag="1")] + pub name: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub organisation: ::prost::alloc::string::String, + #[prost(string, tag="3")] + pub version: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct BeginUploadResponse { + #[prost(string, tag="1")] + pub upload_context: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct UploadFileRequest { + #[prost(string, tag="1")] + pub upload_context: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub file_path: ::prost::alloc::string::String, + #[prost(bytes="vec", tag="3")] + pub file_content: ::prost::alloc::vec::Vec, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct UploadFileResponse { +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct CommitUploadRequest { + #[prost(string, tag="1")] + pub upload_context: ::prost::alloc::string::String, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct CommitUploadResponse { +} +/// Get component files +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetComponentFilesRequest { + #[prost(string, tag="1")] + pub component_id: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetComponentFilesResponse { + #[prost(oneof="get_component_files_response::Msg", tags="1, 2")] + pub msg: ::core::option::Option, +} +/// Nested message and enum types in `GetComponentFilesResponse`. +pub mod get_component_files_response { + #[derive(Clone, PartialEq, Eq, Hash, ::prost::Oneof)] + pub enum Msg { + #[prost(message, tag="1")] + Done(super::Done), + #[prost(message, tag="2")] + ComponentFile(super::ComponentFile), + } +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct ComponentFile { + #[prost(string, tag="1")] + pub file_path: ::prost::alloc::string::String, + #[prost(bytes="vec", tag="2")] + pub file_content: ::prost::alloc::vec::Vec, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct Done { +} +// ── Per-type config messages ───────────────────────────────────────── + +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct DeployStageConfig { + #[prost(string, tag="1")] + pub environment: ::prost::alloc::string::String, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct WaitStageConfig { + #[prost(int64, tag="1")] + pub duration_seconds: i64, +} +// ── A single pipeline stage ────────────────────────────────────────── + +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct PipelineStage { + #[prost(string, tag="1")] + pub id: ::prost::alloc::string::String, + #[prost(string, repeated, tag="2")] + pub depends_on: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + #[prost(oneof="pipeline_stage::Config", tags="10, 11")] + pub config: ::core::option::Option, +} +/// Nested message and enum types in `PipelineStage`. +pub mod pipeline_stage { + #[derive(Clone, PartialEq, Eq, Hash, ::prost::Oneof)] + pub enum Config { + #[prost(message, tag="10")] + Deploy(super::DeployStageConfig), + #[prost(message, tag="11")] + Wait(super::WaitStageConfig), + } +} +// ── Pipeline resource ──────────────────────────────────────────────── + +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ReleasePipeline { + #[prost(string, tag="1")] + pub id: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub name: ::prost::alloc::string::String, + #[prost(bool, tag="3")] + pub enabled: bool, + #[prost(message, repeated, tag="4")] + pub stages: ::prost::alloc::vec::Vec, + #[prost(string, tag="5")] + pub created_at: ::prost::alloc::string::String, + #[prost(string, tag="6")] + pub updated_at: ::prost::alloc::string::String, +} +// ── CRUD messages ──────────────────────────────────────────────────── + +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateReleasePipelineRequest { + #[prost(message, optional, tag="1")] + pub project: ::core::option::Option, + #[prost(string, tag="2")] + pub name: ::prost::alloc::string::String, + #[prost(message, repeated, tag="3")] + pub stages: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateReleasePipelineResponse { + #[prost(message, optional, tag="1")] + pub pipeline: ::core::option::Option, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct UpdateReleasePipelineRequest { + #[prost(message, optional, tag="1")] + pub project: ::core::option::Option, + #[prost(string, tag="2")] + pub name: ::prost::alloc::string::String, + #[prost(bool, optional, tag="3")] + pub enabled: ::core::option::Option, + /// When set, replaces all stages. When absent, stages are unchanged. + #[prost(message, repeated, tag="4")] + pub stages: ::prost::alloc::vec::Vec, + #[prost(bool, tag="5")] + pub update_stages: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct UpdateReleasePipelineResponse { + #[prost(message, optional, tag="1")] + pub pipeline: ::core::option::Option, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct DeleteReleasePipelineRequest { + #[prost(message, optional, tag="1")] + pub project: ::core::option::Option, + #[prost(string, tag="2")] + pub name: ::prost::alloc::string::String, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct DeleteReleasePipelineResponse { +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct ListReleasePipelinesRequest { + #[prost(message, optional, tag="1")] + pub project: ::core::option::Option, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ListReleasePipelinesResponse { + #[prost(message, repeated, tag="1")] + pub pipelines: ::prost::alloc::vec::Vec, +} +// ── Stage type enum (useful for UI dropdowns / filtering) ──────────── + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum StageType { + Unspecified = 0, + Deploy = 1, + Wait = 2, +} +impl StageType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Unspecified => "STAGE_TYPE_UNSPECIFIED", + Self::Deploy => "STAGE_TYPE_DEPLOY", + Self::Wait => "STAGE_TYPE_WAIT", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "STAGE_TYPE_UNSPECIFIED" => Some(Self::Unspecified), + "STAGE_TYPE_DEPLOY" => Some(Self::Deploy), + "STAGE_TYPE_WAIT" => Some(Self::Wait), + _ => None, + } + } +} +// ── Runtime stage status (for observing pipeline progress) ─────────── + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum PipelineStageStatus { + Unspecified = 0, + Pending = 1, + Active = 2, + Succeeded = 3, + Failed = 4, + Cancelled = 5, +} +impl PipelineStageStatus { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Unspecified => "PIPELINE_STAGE_STATUS_UNSPECIFIED", + Self::Pending => "PIPELINE_STAGE_STATUS_PENDING", + Self::Active => "PIPELINE_STAGE_STATUS_ACTIVE", + Self::Succeeded => "PIPELINE_STAGE_STATUS_SUCCEEDED", + Self::Failed => "PIPELINE_STAGE_STATUS_FAILED", + Self::Cancelled => "PIPELINE_STAGE_STATUS_CANCELLED", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "PIPELINE_STAGE_STATUS_UNSPECIFIED" => Some(Self::Unspecified), + "PIPELINE_STAGE_STATUS_PENDING" => Some(Self::Pending), + "PIPELINE_STAGE_STATUS_ACTIVE" => Some(Self::Active), + "PIPELINE_STAGE_STATUS_SUCCEEDED" => Some(Self::Succeeded), + "PIPELINE_STAGE_STATUS_FAILED" => Some(Self::Failed), + "PIPELINE_STAGE_STATUS_CANCELLED" => Some(Self::Cancelled), + _ => None, + } + } +} +// ============================================================================ +// Connect stream: Runner → Server +// ============================================================================ + +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RunnerMessage { + #[prost(oneof="runner_message::Message", tags="1, 2, 3")] + pub message: ::core::option::Option, +} +/// Nested message and enum types in `RunnerMessage`. +pub mod runner_message { + #[derive(Clone, PartialEq, ::prost::Oneof)] + pub enum Message { + #[prost(message, tag="1")] + Register(super::RunnerRegister), + #[prost(message, tag="2")] + Heartbeat(super::RunnerHeartbeat), + #[prost(message, tag="3")] + WorkAck(super::WorkAck), + } +} +/// First message a runner sends on the Connect stream. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RunnerRegister { + /// Runner-chosen unique identifier. If empty, the server assigns one. + #[prost(string, tag="1")] + pub runner_id: ::prost::alloc::string::String, + /// Destination types this runner can handle. + #[prost(message, repeated, tag="2")] + pub capabilities: ::prost::alloc::vec::Vec, + /// Maximum number of simultaneous releases this runner can process. + #[prost(int32, tag="3")] + pub max_concurrent: i32, +} +/// Describes a destination type the runner supports. +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct DestinationCapability { + #[prost(string, tag="1")] + pub organisation: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub name: ::prost::alloc::string::String, + #[prost(uint64, tag="3")] + pub version: u64, +} +/// Periodic keepalive sent by the runner (recommended every 10s). +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct RunnerHeartbeat { + /// Current number of in-progress releases on this runner. + #[prost(int32, tag="1")] + pub active_releases: i32, +} +/// Runner's response to a WorkAssignment. +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct WorkAck { + #[prost(string, tag="1")] + pub release_token: ::prost::alloc::string::String, + /// false = runner rejects the work (e.g., overloaded). The server will + /// reassign or fall back to in-process execution. + #[prost(bool, tag="2")] + pub accepted: bool, +} +// ============================================================================ +// Connect stream: Server → Runner +// ============================================================================ + +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ServerMessage { + #[prost(oneof="server_message::Message", tags="1, 2")] + pub message: ::core::option::Option, +} +/// Nested message and enum types in `ServerMessage`. +pub mod server_message { + #[derive(Clone, PartialEq, ::prost::Oneof)] + pub enum Message { + #[prost(message, tag="1")] + RegisterAck(super::RegisterAck), + #[prost(message, tag="2")] + WorkAssignment(super::WorkAssignment), + } +} +/// Server response to RunnerRegister. +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct RegisterAck { + /// Server-confirmed (or server-assigned) runner ID. + #[prost(string, tag="1")] + pub runner_id: ::prost::alloc::string::String, + #[prost(bool, tag="2")] + pub accepted: bool, + #[prost(string, tag="3")] + pub reason: ::prost::alloc::string::String, +} +/// Work assignment pushed to a runner when a matching release is available. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct WorkAssignment { + /// Scoped opaque auth token. Use this for GetReleaseFiles, PushLogs, + /// and CompleteRelease. The token restricts access to only the data + /// associated with this specific release. + #[prost(string, tag="1")] + pub release_token: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub release_id: ::prost::alloc::string::String, + #[prost(string, tag="3")] + pub release_intent_id: ::prost::alloc::string::String, + #[prost(string, tag="4")] + pub artifact_id: ::prost::alloc::string::String, + #[prost(string, tag="5")] + pub destination_id: ::prost::alloc::string::String, + /// Full destination configuration including metadata. + #[prost(message, optional, tag="6")] + pub destination: ::core::option::Option, +} +/// Destination configuration sent with the work assignment. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct DestinationInfo { + #[prost(string, tag="1")] + pub name: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub environment: ::prost::alloc::string::String, + #[prost(map="string, string", tag="3")] + pub metadata: ::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>, + #[prost(message, optional, tag="4")] + pub r#type: ::core::option::Option, + #[prost(string, tag="5")] + pub organisation: ::prost::alloc::string::String, +} +// ============================================================================ +// GetReleaseFiles +// ============================================================================ + +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetReleaseFilesRequest { + #[prost(string, tag="1")] + pub release_token: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct ReleaseFile { + #[prost(string, tag="1")] + pub file_name: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub file_content: ::prost::alloc::string::String, +} +// ============================================================================ +// GetSpecFiles +// ============================================================================ + +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetSpecFilesRequest { + #[prost(string, tag="1")] + pub release_token: ::prost::alloc::string::String, +} +// ============================================================================ +// GetReleaseAnnotation +// ============================================================================ + +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetReleaseAnnotationRequest { + #[prost(string, tag="1")] + pub release_token: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct ReleaseAnnotationResponse { + #[prost(string, tag="1")] + pub slug: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub source_username: ::prost::alloc::string::String, + #[prost(string, tag="3")] + pub source_email: ::prost::alloc::string::String, + #[prost(string, tag="4")] + pub context_title: ::prost::alloc::string::String, + #[prost(string, tag="5")] + pub context_description: ::prost::alloc::string::String, + #[prost(string, tag="6")] + pub context_web: ::prost::alloc::string::String, + #[prost(string, tag="7")] + pub reference_version: ::prost::alloc::string::String, + #[prost(string, tag="8")] + pub reference_commit_sha: ::prost::alloc::string::String, + #[prost(string, tag="9")] + pub reference_commit_branch: ::prost::alloc::string::String, + #[prost(string, tag="10")] + pub reference_commit_message: ::prost::alloc::string::String, + #[prost(string, tag="11")] + pub created_at: ::prost::alloc::string::String, +} +// ============================================================================ +// GetProjectInfo +// ============================================================================ + +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetProjectInfoRequest { + #[prost(string, tag="1")] + pub release_token: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct ProjectInfoResponse { + #[prost(string, tag="1")] + pub organisation: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub project: ::prost::alloc::string::String, +} +// ============================================================================ +// PushLogs +// ============================================================================ + +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct PushLogRequest { + #[prost(string, tag="1")] + pub release_token: ::prost::alloc::string::String, + /// "stdout" or "stderr" + #[prost(string, tag="2")] + pub channel: ::prost::alloc::string::String, + #[prost(string, tag="3")] + pub line: ::prost::alloc::string::String, + #[prost(uint64, tag="4")] + pub timestamp: u64, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct PushLogResponse { +} +// ============================================================================ +// CompleteRelease +// ============================================================================ + +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct CompleteReleaseRequest { + #[prost(string, tag="1")] + pub release_token: ::prost::alloc::string::String, + #[prost(enumeration="ReleaseOutcome", tag="2")] + pub outcome: i32, + /// Error description when outcome is FAILURE. + #[prost(string, tag="3")] + pub error_message: ::prost::alloc::string::String, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct CompleteReleaseResponse { +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum ReleaseOutcome { + Unspecified = 0, + Success = 1, + Failure = 2, +} +impl ReleaseOutcome { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Unspecified => "RELEASE_OUTCOME_UNSPECIFIED", + Self::Success => "RELEASE_OUTCOME_SUCCESS", + Self::Failure => "RELEASE_OUTCOME_FAILURE", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "RELEASE_OUTCOME_UNSPECIFIED" => Some(Self::Unspecified), + "RELEASE_OUTCOME_SUCCESS" => Some(Self::Success), + "RELEASE_OUTCOME_FAILURE" => Some(Self::Failure), + _ => None, + } + } +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct Trigger { + #[prost(string, tag="1")] + pub id: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub name: ::prost::alloc::string::String, + #[prost(bool, tag="3")] + pub enabled: bool, + #[prost(string, optional, tag="4")] + pub branch_pattern: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="5")] + pub title_pattern: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="6")] + pub author_pattern: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="7")] + pub commit_message_pattern: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="8")] + pub source_type_pattern: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, repeated, tag="9")] + pub target_environments: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + #[prost(string, repeated, tag="10")] + pub target_destinations: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + #[prost(bool, tag="11")] + pub force_release: bool, + #[prost(string, tag="12")] + pub created_at: ::prost::alloc::string::String, + #[prost(string, tag="13")] + pub updated_at: ::prost::alloc::string::String, + /// When true, trigger the project's release pipeline instead of + /// deploying directly to target destinations/environments. + #[prost(bool, tag="14")] + pub use_pipeline: bool, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct CreateTriggerRequest { + #[prost(message, optional, tag="1")] + pub project: ::core::option::Option, + #[prost(string, tag="2")] + pub name: ::prost::alloc::string::String, + #[prost(string, optional, tag="3")] + pub branch_pattern: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="4")] + pub title_pattern: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="5")] + pub author_pattern: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="6")] + pub commit_message_pattern: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="7")] + pub source_type_pattern: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, repeated, tag="8")] + pub target_environments: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + #[prost(string, repeated, tag="9")] + pub target_destinations: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + #[prost(bool, tag="10")] + pub force_release: bool, + #[prost(bool, tag="11")] + pub use_pipeline: bool, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct CreateTriggerResponse { + #[prost(message, optional, tag="1")] + pub trigger: ::core::option::Option, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct UpdateTriggerRequest { + #[prost(message, optional, tag="1")] + pub project: ::core::option::Option, + #[prost(string, tag="2")] + pub name: ::prost::alloc::string::String, + #[prost(bool, optional, tag="3")] + pub enabled: ::core::option::Option, + #[prost(string, optional, tag="4")] + pub branch_pattern: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="5")] + pub title_pattern: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="6")] + pub author_pattern: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="7")] + pub commit_message_pattern: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, optional, tag="8")] + pub source_type_pattern: ::core::option::Option<::prost::alloc::string::String>, + #[prost(string, repeated, tag="9")] + pub target_environments: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + #[prost(string, repeated, tag="10")] + pub target_destinations: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + #[prost(bool, optional, tag="11")] + pub force_release: ::core::option::Option, + #[prost(bool, optional, tag="12")] + pub use_pipeline: ::core::option::Option, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct UpdateTriggerResponse { + #[prost(message, optional, tag="1")] + pub trigger: ::core::option::Option, +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct DeleteTriggerRequest { + #[prost(message, optional, tag="1")] + pub project: ::core::option::Option, + #[prost(string, tag="2")] + pub name: ::prost::alloc::string::String, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct DeleteTriggerResponse { +} +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct ListTriggersRequest { + #[prost(message, optional, tag="1")] + pub project: ::core::option::Option, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ListTriggersResponse { + #[prost(message, repeated, tag="1")] + pub triggers: ::prost::alloc::vec::Vec, +} // ─── Core types ────────────────────────────────────────────────────── #[derive(Clone, PartialEq, ::prost::Message)] @@ -713,6 +2620,43 @@ pub struct DeletePersonalAccessTokenRequest { #[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] pub struct DeletePersonalAccessTokenResponse { } +// ─── Stats ────────────────────────────────────────────────────────── + +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetUserStatsRequest { + #[prost(oneof="get_user_stats_request::Identifier", tags="1, 2")] + pub identifier: ::core::option::Option, +} +/// Nested message and enum types in `GetUserStatsRequest`. +pub mod get_user_stats_request { + #[derive(Clone, PartialEq, Eq, Hash, ::prost::Oneof)] + pub enum Identifier { + #[prost(string, tag="1")] + UserId(::prost::alloc::string::String), + #[prost(string, tag="2")] + Username(::prost::alloc::string::String), + } +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct GetUserStatsResponse { + #[prost(message, optional, tag="1")] + pub stats: ::core::option::Option, +} +#[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] +pub struct UserStats { + #[prost(int64, tag="1")] + pub total_releases: i64, + #[prost(int64, tag="2")] + pub successful_releases: i64, + #[prost(int64, tag="3")] + pub failed_releases: i64, + #[prost(int64, tag="4")] + pub in_progress_releases: i64, + #[prost(int64, tag="5")] + pub total_annotations: i64, + #[prost(int64, tag="6")] + pub total_uploads: i64, +} #[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] pub struct SetupMfaRequest { #[prost(string, tag="1")] diff --git a/crates/forage-grpc/src/grpc/forest/v1/forest.v1.tonic.rs b/crates/forage-grpc/src/grpc/forest/v1/forest.v1.tonic.rs index 88a398f..112882d 100644 --- a/crates/forage-grpc/src/grpc/forest/v1/forest.v1.tonic.rs +++ b/crates/forage-grpc/src/grpc/forest/v1/forest.v1.tonic.rs @@ -1,5 +1,4565 @@ // @generated /// Generated client implementations. +pub mod app_service_client { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + use tonic::codegen::http::Uri; + #[derive(Debug, Clone)] + pub struct AppServiceClient { + inner: tonic::client::Grpc, + } + impl AppServiceClient { + /// Attempt to create a new client by connecting to a given endpoint. + pub async fn connect(dst: D) -> Result + where + D: TryInto, + D::Error: Into, + { + let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; + Ok(Self::new(conn)) + } + } + impl AppServiceClient + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + std::marker::Send + 'static, + ::Error: Into + std::marker::Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> AppServiceClient> + where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response< + >::ResponseBody, + >, + >, + , + >>::Error: Into + std::marker::Send + std::marker::Sync, + { + AppServiceClient::new(InterceptedService::new(inner, interceptor)) + } + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } + pub async fn create_app( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.AppService/CreateApp", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.AppService", "CreateApp")); + self.inner.unary(req, path, codec).await + } + pub async fn get_app( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result, tonic::Status> { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.AppService/GetApp", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.AppService", "GetApp")); + self.inner.unary(req, path, codec).await + } + pub async fn list_apps( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.AppService/ListApps", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.AppService", "ListApps")); + self.inner.unary(req, path, codec).await + } + pub async fn delete_app( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.AppService/DeleteApp", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.AppService", "DeleteApp")); + self.inner.unary(req, path, codec).await + } + pub async fn suspend_app( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.AppService/SuspendApp", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.AppService", "SuspendApp")); + self.inner.unary(req, path, codec).await + } + pub async fn create_app_token( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.AppService/CreateAppToken", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.AppService", "CreateAppToken")); + self.inner.unary(req, path, codec).await + } + pub async fn list_app_tokens( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.AppService/ListAppTokens", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.AppService", "ListAppTokens")); + self.inner.unary(req, path, codec).await + } + pub async fn revoke_app_token( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.AppService/RevokeAppToken", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.AppService", "RevokeAppToken")); + self.inner.unary(req, path, codec).await + } + } +} +/// Generated server implementations. +pub mod app_service_server { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + /// Generated trait containing gRPC methods that should be implemented for use with AppServiceServer. + #[async_trait] + pub trait AppService: std::marker::Send + std::marker::Sync + 'static { + async fn create_app( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + async fn get_app( + &self, + request: tonic::Request, + ) -> std::result::Result, tonic::Status>; + async fn list_apps( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + async fn delete_app( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + async fn suspend_app( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + async fn create_app_token( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + async fn list_app_tokens( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + async fn revoke_app_token( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + } + #[derive(Debug)] + pub struct AppServiceServer { + inner: Arc, + accept_compression_encodings: EnabledCompressionEncodings, + send_compression_encodings: EnabledCompressionEncodings, + max_decoding_message_size: Option, + max_encoding_message_size: Option, + } + impl AppServiceServer { + pub fn new(inner: T) -> Self { + Self::from_arc(Arc::new(inner)) + } + pub fn from_arc(inner: Arc) -> Self { + Self { + inner, + accept_compression_encodings: Default::default(), + send_compression_encodings: Default::default(), + max_decoding_message_size: None, + max_encoding_message_size: None, + } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> InterceptedService + where + F: tonic::service::Interceptor, + { + InterceptedService::new(Self::new(inner), interceptor) + } + /// Enable decompressing requests with the given encoding. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.accept_compression_encodings.enable(encoding); + self + } + /// Compress responses with the given encoding, if the client supports it. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.send_compression_encodings.enable(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.max_decoding_message_size = Some(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.max_encoding_message_size = Some(limit); + self + } + } + impl tonic::codegen::Service> for AppServiceServer + where + T: AppService, + B: Body + std::marker::Send + 'static, + B::Error: Into + std::marker::Send + 'static, + { + type Response = http::Response; + type Error = std::convert::Infallible; + type Future = BoxFuture; + fn poll_ready( + &mut self, + _cx: &mut Context<'_>, + ) -> Poll> { + Poll::Ready(Ok(())) + } + fn call(&mut self, req: http::Request) -> Self::Future { + match req.uri().path() { + "/forest.v1.AppService/CreateApp" => { + #[allow(non_camel_case_types)] + struct CreateAppSvc(pub Arc); + impl< + T: AppService, + > tonic::server::UnaryService + for CreateAppSvc { + type Response = super::CreateAppResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::create_app(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = CreateAppSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.AppService/GetApp" => { + #[allow(non_camel_case_types)] + struct GetAppSvc(pub Arc); + impl tonic::server::UnaryService + for GetAppSvc { + type Response = super::GetAppResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::get_app(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = GetAppSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.AppService/ListApps" => { + #[allow(non_camel_case_types)] + struct ListAppsSvc(pub Arc); + impl< + T: AppService, + > tonic::server::UnaryService + for ListAppsSvc { + type Response = super::ListAppsResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::list_apps(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = ListAppsSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.AppService/DeleteApp" => { + #[allow(non_camel_case_types)] + struct DeleteAppSvc(pub Arc); + impl< + T: AppService, + > tonic::server::UnaryService + for DeleteAppSvc { + type Response = super::DeleteAppResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::delete_app(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = DeleteAppSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.AppService/SuspendApp" => { + #[allow(non_camel_case_types)] + struct SuspendAppSvc(pub Arc); + impl< + T: AppService, + > tonic::server::UnaryService + for SuspendAppSvc { + type Response = super::SuspendAppResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::suspend_app(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = SuspendAppSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.AppService/CreateAppToken" => { + #[allow(non_camel_case_types)] + struct CreateAppTokenSvc(pub Arc); + impl< + T: AppService, + > tonic::server::UnaryService + for CreateAppTokenSvc { + type Response = super::CreateAppTokenResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::create_app_token(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = CreateAppTokenSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.AppService/ListAppTokens" => { + #[allow(non_camel_case_types)] + struct ListAppTokensSvc(pub Arc); + impl< + T: AppService, + > tonic::server::UnaryService + for ListAppTokensSvc { + type Response = super::ListAppTokensResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::list_app_tokens(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = ListAppTokensSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.AppService/RevokeAppToken" => { + #[allow(non_camel_case_types)] + struct RevokeAppTokenSvc(pub Arc); + impl< + T: AppService, + > tonic::server::UnaryService + for RevokeAppTokenSvc { + type Response = super::RevokeAppTokenResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::revoke_app_token(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = RevokeAppTokenSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + _ => { + Box::pin(async move { + let mut response = http::Response::new( + tonic::body::Body::default(), + ); + let headers = response.headers_mut(); + headers + .insert( + tonic::Status::GRPC_STATUS, + (tonic::Code::Unimplemented as i32).into(), + ); + headers + .insert( + http::header::CONTENT_TYPE, + tonic::metadata::GRPC_CONTENT_TYPE, + ); + Ok(response) + }) + } + } + } + } + impl Clone for AppServiceServer { + fn clone(&self) -> Self { + let inner = self.inner.clone(); + Self { + inner, + accept_compression_encodings: self.accept_compression_encodings, + send_compression_encodings: self.send_compression_encodings, + max_decoding_message_size: self.max_decoding_message_size, + max_encoding_message_size: self.max_encoding_message_size, + } + } + } + /// Generated gRPC service name + pub const SERVICE_NAME: &str = "forest.v1.AppService"; + impl tonic::server::NamedService for AppServiceServer { + const NAME: &'static str = SERVICE_NAME; + } +} +/// Generated client implementations. +pub mod artifact_service_client { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + use tonic::codegen::http::Uri; + /// + #[derive(Debug, Clone)] + pub struct ArtifactServiceClient { + inner: tonic::client::Grpc, + } + impl ArtifactServiceClient { + /// Attempt to create a new client by connecting to a given endpoint. + pub async fn connect(dst: D) -> Result + where + D: TryInto, + D::Error: Into, + { + let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; + Ok(Self::new(conn)) + } + } + impl ArtifactServiceClient + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + std::marker::Send + 'static, + ::Error: Into + std::marker::Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> ArtifactServiceClient> + where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response< + >::ResponseBody, + >, + >, + , + >>::Error: Into + std::marker::Send + std::marker::Sync, + { + ArtifactServiceClient::new(InterceptedService::new(inner, interceptor)) + } + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } + /// + pub async fn begin_upload_artifact( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.ArtifactService/BeginUploadArtifact", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("forest.v1.ArtifactService", "BeginUploadArtifact"), + ); + self.inner.unary(req, path, codec).await + } + /// + pub async fn upload_artifact( + &mut self, + request: impl tonic::IntoStreamingRequest< + Message = super::UploadArtifactRequest, + >, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.ArtifactService/UploadArtifact", + ); + let mut req = request.into_streaming_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.ArtifactService", "UploadArtifact")); + self.inner.client_streaming(req, path, codec).await + } + /// + pub async fn commit_artifact( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.ArtifactService/CommitArtifact", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.ArtifactService", "CommitArtifact")); + self.inner.unary(req, path, codec).await + } + /// + pub async fn get_artifact_files( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.ArtifactService/GetArtifactFiles", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("forest.v1.ArtifactService", "GetArtifactFiles"), + ); + self.inner.unary(req, path, codec).await + } + /// + pub async fn get_artifact_spec( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.ArtifactService/GetArtifactSpec", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("forest.v1.ArtifactService", "GetArtifactSpec"), + ); + self.inner.unary(req, path, codec).await + } + } +} +/// Generated server implementations. +pub mod artifact_service_server { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + /// Generated trait containing gRPC methods that should be implemented for use with ArtifactServiceServer. + #[async_trait] + pub trait ArtifactService: std::marker::Send + std::marker::Sync + 'static { + /// + async fn begin_upload_artifact( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// + async fn upload_artifact( + &self, + request: tonic::Request>, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// + async fn commit_artifact( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + } + /// + #[derive(Debug)] + pub struct ArtifactServiceServer { + inner: Arc, + accept_compression_encodings: EnabledCompressionEncodings, + send_compression_encodings: EnabledCompressionEncodings, + max_decoding_message_size: Option, + max_encoding_message_size: Option, + } + impl ArtifactServiceServer { + pub fn new(inner: T) -> Self { + Self::from_arc(Arc::new(inner)) + } + pub fn from_arc(inner: Arc) -> Self { + Self { + inner, + accept_compression_encodings: Default::default(), + send_compression_encodings: Default::default(), + max_decoding_message_size: None, + max_encoding_message_size: None, + } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> InterceptedService + where + F: tonic::service::Interceptor, + { + InterceptedService::new(Self::new(inner), interceptor) + } + /// Enable decompressing requests with the given encoding. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.accept_compression_encodings.enable(encoding); + self + } + /// Compress responses with the given encoding, if the client supports it. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.send_compression_encodings.enable(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.max_decoding_message_size = Some(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.max_encoding_message_size = Some(limit); + self + } + } + impl tonic::codegen::Service> for ArtifactServiceServer + where + T: ArtifactService, + B: Body + std::marker::Send + 'static, + B::Error: Into + std::marker::Send + 'static, + { + type Response = http::Response; + type Error = std::convert::Infallible; + type Future = BoxFuture; + fn poll_ready( + &mut self, + _cx: &mut Context<'_>, + ) -> Poll> { + Poll::Ready(Ok(())) + } + fn call(&mut self, req: http::Request) -> Self::Future { + match req.uri().path() { + "/forest.v1.ArtifactService/BeginUploadArtifact" => { + #[allow(non_camel_case_types)] + struct BeginUploadArtifactSvc(pub Arc); + impl< + T: ArtifactService, + > tonic::server::UnaryService + for BeginUploadArtifactSvc { + type Response = super::BeginUploadArtifactResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::begin_upload_artifact( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = BeginUploadArtifactSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.ArtifactService/UploadArtifact" => { + #[allow(non_camel_case_types)] + struct UploadArtifactSvc(pub Arc); + impl< + T: ArtifactService, + > tonic::server::ClientStreamingService + for UploadArtifactSvc { + type Response = super::UploadArtifactResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request< + tonic::Streaming, + >, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::upload_artifact(&inner, request) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = UploadArtifactSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.client_streaming(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.ArtifactService/CommitArtifact" => { + #[allow(non_camel_case_types)] + struct CommitArtifactSvc(pub Arc); + impl< + T: ArtifactService, + > tonic::server::UnaryService + for CommitArtifactSvc { + type Response = super::CommitArtifactResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::commit_artifact(&inner, request) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = CommitArtifactSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + _ => { + Box::pin(async move { + let mut response = http::Response::new( + tonic::body::Body::default(), + ); + let headers = response.headers_mut(); + headers + .insert( + tonic::Status::GRPC_STATUS, + (tonic::Code::Unimplemented as i32).into(), + ); + headers + .insert( + http::header::CONTENT_TYPE, + tonic::metadata::GRPC_CONTENT_TYPE, + ); + Ok(response) + }) + } + } + } + } + impl Clone for ArtifactServiceServer { + fn clone(&self) -> Self { + let inner = self.inner.clone(); + Self { + inner, + accept_compression_encodings: self.accept_compression_encodings, + send_compression_encodings: self.send_compression_encodings, + max_decoding_message_size: self.max_decoding_message_size, + max_encoding_message_size: self.max_encoding_message_size, + } + } + } + /// Generated gRPC service name + pub const SERVICE_NAME: &str = "forest.v1.ArtifactService"; + impl tonic::server::NamedService for ArtifactServiceServer { + const NAME: &'static str = SERVICE_NAME; + } +} +/// Generated client implementations. +pub mod destination_service_client { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + use tonic::codegen::http::Uri; + #[derive(Debug, Clone)] + pub struct DestinationServiceClient { + inner: tonic::client::Grpc, + } + impl DestinationServiceClient { + /// Attempt to create a new client by connecting to a given endpoint. + pub async fn connect(dst: D) -> Result + where + D: TryInto, + D::Error: Into, + { + let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; + Ok(Self::new(conn)) + } + } + impl DestinationServiceClient + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + std::marker::Send + 'static, + ::Error: Into + std::marker::Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> DestinationServiceClient> + where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response< + >::ResponseBody, + >, + >, + , + >>::Error: Into + std::marker::Send + std::marker::Sync, + { + DestinationServiceClient::new(InterceptedService::new(inner, interceptor)) + } + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } + pub async fn create_destination( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.DestinationService/CreateDestination", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("forest.v1.DestinationService", "CreateDestination"), + ); + self.inner.unary(req, path, codec).await + } + pub async fn update_destination( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.DestinationService/UpdateDestination", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("forest.v1.DestinationService", "UpdateDestination"), + ); + self.inner.unary(req, path, codec).await + } + pub async fn delete_destination( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.DestinationService/DeleteDestination", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("forest.v1.DestinationService", "DeleteDestination"), + ); + self.inner.unary(req, path, codec).await + } + pub async fn get_destinations( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.DestinationService/GetDestinations", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("forest.v1.DestinationService", "GetDestinations"), + ); + self.inner.unary(req, path, codec).await + } + pub async fn list_destination_types( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.DestinationService/ListDestinationTypes", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new( + "forest.v1.DestinationService", + "ListDestinationTypes", + ), + ); + self.inner.unary(req, path, codec).await + } + } +} +/// Generated server implementations. +pub mod destination_service_server { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + /// Generated trait containing gRPC methods that should be implemented for use with DestinationServiceServer. + #[async_trait] + pub trait DestinationService: std::marker::Send + std::marker::Sync + 'static { + async fn create_destination( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + async fn update_destination( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + async fn delete_destination( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + async fn get_destinations( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + async fn list_destination_types( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + } + #[derive(Debug)] + pub struct DestinationServiceServer { + inner: Arc, + accept_compression_encodings: EnabledCompressionEncodings, + send_compression_encodings: EnabledCompressionEncodings, + max_decoding_message_size: Option, + max_encoding_message_size: Option, + } + impl DestinationServiceServer { + pub fn new(inner: T) -> Self { + Self::from_arc(Arc::new(inner)) + } + pub fn from_arc(inner: Arc) -> Self { + Self { + inner, + accept_compression_encodings: Default::default(), + send_compression_encodings: Default::default(), + max_decoding_message_size: None, + max_encoding_message_size: None, + } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> InterceptedService + where + F: tonic::service::Interceptor, + { + InterceptedService::new(Self::new(inner), interceptor) + } + /// Enable decompressing requests with the given encoding. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.accept_compression_encodings.enable(encoding); + self + } + /// Compress responses with the given encoding, if the client supports it. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.send_compression_encodings.enable(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.max_decoding_message_size = Some(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.max_encoding_message_size = Some(limit); + self + } + } + impl tonic::codegen::Service> for DestinationServiceServer + where + T: DestinationService, + B: Body + std::marker::Send + 'static, + B::Error: Into + std::marker::Send + 'static, + { + type Response = http::Response; + type Error = std::convert::Infallible; + type Future = BoxFuture; + fn poll_ready( + &mut self, + _cx: &mut Context<'_>, + ) -> Poll> { + Poll::Ready(Ok(())) + } + fn call(&mut self, req: http::Request) -> Self::Future { + match req.uri().path() { + "/forest.v1.DestinationService/CreateDestination" => { + #[allow(non_camel_case_types)] + struct CreateDestinationSvc(pub Arc); + impl< + T: DestinationService, + > tonic::server::UnaryService + for CreateDestinationSvc { + type Response = super::CreateDestinationResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::create_destination( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = CreateDestinationSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.DestinationService/UpdateDestination" => { + #[allow(non_camel_case_types)] + struct UpdateDestinationSvc(pub Arc); + impl< + T: DestinationService, + > tonic::server::UnaryService + for UpdateDestinationSvc { + type Response = super::UpdateDestinationResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::update_destination( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = UpdateDestinationSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.DestinationService/DeleteDestination" => { + #[allow(non_camel_case_types)] + struct DeleteDestinationSvc(pub Arc); + impl< + T: DestinationService, + > tonic::server::UnaryService + for DeleteDestinationSvc { + type Response = super::DeleteDestinationResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::delete_destination( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = DeleteDestinationSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.DestinationService/GetDestinations" => { + #[allow(non_camel_case_types)] + struct GetDestinationsSvc(pub Arc); + impl< + T: DestinationService, + > tonic::server::UnaryService + for GetDestinationsSvc { + type Response = super::GetDestinationsResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::get_destinations(&inner, request) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = GetDestinationsSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.DestinationService/ListDestinationTypes" => { + #[allow(non_camel_case_types)] + struct ListDestinationTypesSvc(pub Arc); + impl< + T: DestinationService, + > tonic::server::UnaryService + for ListDestinationTypesSvc { + type Response = super::ListDestinationTypesResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::list_destination_types( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = ListDestinationTypesSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + _ => { + Box::pin(async move { + let mut response = http::Response::new( + tonic::body::Body::default(), + ); + let headers = response.headers_mut(); + headers + .insert( + tonic::Status::GRPC_STATUS, + (tonic::Code::Unimplemented as i32).into(), + ); + headers + .insert( + http::header::CONTENT_TYPE, + tonic::metadata::GRPC_CONTENT_TYPE, + ); + Ok(response) + }) + } + } + } + } + impl Clone for DestinationServiceServer { + fn clone(&self) -> Self { + let inner = self.inner.clone(); + Self { + inner, + accept_compression_encodings: self.accept_compression_encodings, + send_compression_encodings: self.send_compression_encodings, + max_decoding_message_size: self.max_decoding_message_size, + max_encoding_message_size: self.max_encoding_message_size, + } + } + } + /// Generated gRPC service name + pub const SERVICE_NAME: &str = "forest.v1.DestinationService"; + impl tonic::server::NamedService for DestinationServiceServer { + const NAME: &'static str = SERVICE_NAME; + } +} +/// Generated client implementations. +pub mod environment_service_client { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + use tonic::codegen::http::Uri; + /// + #[derive(Debug, Clone)] + pub struct EnvironmentServiceClient { + inner: tonic::client::Grpc, + } + impl EnvironmentServiceClient { + /// Attempt to create a new client by connecting to a given endpoint. + pub async fn connect(dst: D) -> Result + where + D: TryInto, + D::Error: Into, + { + let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; + Ok(Self::new(conn)) + } + } + impl EnvironmentServiceClient + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + std::marker::Send + 'static, + ::Error: Into + std::marker::Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> EnvironmentServiceClient> + where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response< + >::ResponseBody, + >, + >, + , + >>::Error: Into + std::marker::Send + std::marker::Sync, + { + EnvironmentServiceClient::new(InterceptedService::new(inner, interceptor)) + } + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } + /// + pub async fn create_environment( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.EnvironmentService/CreateEnvironment", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("forest.v1.EnvironmentService", "CreateEnvironment"), + ); + self.inner.unary(req, path, codec).await + } + /// + pub async fn get_environment( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.EnvironmentService/GetEnvironment", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("forest.v1.EnvironmentService", "GetEnvironment"), + ); + self.inner.unary(req, path, codec).await + } + /// + pub async fn list_environments( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.EnvironmentService/ListEnvironments", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("forest.v1.EnvironmentService", "ListEnvironments"), + ); + self.inner.unary(req, path, codec).await + } + /// + pub async fn update_environment( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.EnvironmentService/UpdateEnvironment", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("forest.v1.EnvironmentService", "UpdateEnvironment"), + ); + self.inner.unary(req, path, codec).await + } + /// + pub async fn delete_environment( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.EnvironmentService/DeleteEnvironment", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("forest.v1.EnvironmentService", "DeleteEnvironment"), + ); + self.inner.unary(req, path, codec).await + } + } +} +/// Generated server implementations. +pub mod environment_service_server { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + /// Generated trait containing gRPC methods that should be implemented for use with EnvironmentServiceServer. + #[async_trait] + pub trait EnvironmentService: std::marker::Send + std::marker::Sync + 'static { + /// + async fn create_environment( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// + async fn get_environment( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// + async fn list_environments( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// + async fn update_environment( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// + async fn delete_environment( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + } + /// + #[derive(Debug)] + pub struct EnvironmentServiceServer { + inner: Arc, + accept_compression_encodings: EnabledCompressionEncodings, + send_compression_encodings: EnabledCompressionEncodings, + max_decoding_message_size: Option, + max_encoding_message_size: Option, + } + impl EnvironmentServiceServer { + pub fn new(inner: T) -> Self { + Self::from_arc(Arc::new(inner)) + } + pub fn from_arc(inner: Arc) -> Self { + Self { + inner, + accept_compression_encodings: Default::default(), + send_compression_encodings: Default::default(), + max_decoding_message_size: None, + max_encoding_message_size: None, + } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> InterceptedService + where + F: tonic::service::Interceptor, + { + InterceptedService::new(Self::new(inner), interceptor) + } + /// Enable decompressing requests with the given encoding. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.accept_compression_encodings.enable(encoding); + self + } + /// Compress responses with the given encoding, if the client supports it. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.send_compression_encodings.enable(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.max_decoding_message_size = Some(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.max_encoding_message_size = Some(limit); + self + } + } + impl tonic::codegen::Service> for EnvironmentServiceServer + where + T: EnvironmentService, + B: Body + std::marker::Send + 'static, + B::Error: Into + std::marker::Send + 'static, + { + type Response = http::Response; + type Error = std::convert::Infallible; + type Future = BoxFuture; + fn poll_ready( + &mut self, + _cx: &mut Context<'_>, + ) -> Poll> { + Poll::Ready(Ok(())) + } + fn call(&mut self, req: http::Request) -> Self::Future { + match req.uri().path() { + "/forest.v1.EnvironmentService/CreateEnvironment" => { + #[allow(non_camel_case_types)] + struct CreateEnvironmentSvc(pub Arc); + impl< + T: EnvironmentService, + > tonic::server::UnaryService + for CreateEnvironmentSvc { + type Response = super::CreateEnvironmentResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::create_environment( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = CreateEnvironmentSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.EnvironmentService/GetEnvironment" => { + #[allow(non_camel_case_types)] + struct GetEnvironmentSvc(pub Arc); + impl< + T: EnvironmentService, + > tonic::server::UnaryService + for GetEnvironmentSvc { + type Response = super::GetEnvironmentResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::get_environment(&inner, request) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = GetEnvironmentSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.EnvironmentService/ListEnvironments" => { + #[allow(non_camel_case_types)] + struct ListEnvironmentsSvc(pub Arc); + impl< + T: EnvironmentService, + > tonic::server::UnaryService + for ListEnvironmentsSvc { + type Response = super::ListEnvironmentsResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::list_environments( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = ListEnvironmentsSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.EnvironmentService/UpdateEnvironment" => { + #[allow(non_camel_case_types)] + struct UpdateEnvironmentSvc(pub Arc); + impl< + T: EnvironmentService, + > tonic::server::UnaryService + for UpdateEnvironmentSvc { + type Response = super::UpdateEnvironmentResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::update_environment( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = UpdateEnvironmentSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.EnvironmentService/DeleteEnvironment" => { + #[allow(non_camel_case_types)] + struct DeleteEnvironmentSvc(pub Arc); + impl< + T: EnvironmentService, + > tonic::server::UnaryService + for DeleteEnvironmentSvc { + type Response = super::DeleteEnvironmentResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::delete_environment( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = DeleteEnvironmentSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + _ => { + Box::pin(async move { + let mut response = http::Response::new( + tonic::body::Body::default(), + ); + let headers = response.headers_mut(); + headers + .insert( + tonic::Status::GRPC_STATUS, + (tonic::Code::Unimplemented as i32).into(), + ); + headers + .insert( + http::header::CONTENT_TYPE, + tonic::metadata::GRPC_CONTENT_TYPE, + ); + Ok(response) + }) + } + } + } + } + impl Clone for EnvironmentServiceServer { + fn clone(&self) -> Self { + let inner = self.inner.clone(); + Self { + inner, + accept_compression_encodings: self.accept_compression_encodings, + send_compression_encodings: self.send_compression_encodings, + max_decoding_message_size: self.max_decoding_message_size, + max_encoding_message_size: self.max_encoding_message_size, + } + } + } + /// Generated gRPC service name + pub const SERVICE_NAME: &str = "forest.v1.EnvironmentService"; + impl tonic::server::NamedService for EnvironmentServiceServer { + const NAME: &'static str = SERVICE_NAME; + } +} +/// Generated client implementations. +pub mod event_service_client { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + use tonic::codegen::http::Uri; + #[derive(Debug, Clone)] + pub struct EventServiceClient { + inner: tonic::client::Grpc, + } + impl EventServiceClient { + /// Attempt to create a new client by connecting to a given endpoint. + pub async fn connect(dst: D) -> Result + where + D: TryInto, + D::Error: Into, + { + let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; + Ok(Self::new(conn)) + } + } + impl EventServiceClient + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + std::marker::Send + 'static, + ::Error: Into + std::marker::Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> EventServiceClient> + where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response< + >::ResponseBody, + >, + >, + , + >>::Error: Into + std::marker::Send + std::marker::Sync, + { + EventServiceClient::new(InterceptedService::new(inner, interceptor)) + } + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } + pub async fn subscribe( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response>, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.EventService/Subscribe", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.EventService", "Subscribe")); + self.inner.server_streaming(req, path, codec).await + } + pub async fn subscribe_durable( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response>, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.EventService/SubscribeDurable", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.EventService", "SubscribeDurable")); + self.inner.server_streaming(req, path, codec).await + } + pub async fn acknowledge_events( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.EventService/AcknowledgeEvents", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.EventService", "AcknowledgeEvents")); + self.inner.unary(req, path, codec).await + } + } +} +/// Generated server implementations. +pub mod event_service_server { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + /// Generated trait containing gRPC methods that should be implemented for use with EventServiceServer. + #[async_trait] + pub trait EventService: std::marker::Send + std::marker::Sync + 'static { + /// Server streaming response type for the Subscribe method. + type SubscribeStream: tonic::codegen::tokio_stream::Stream< + Item = std::result::Result, + > + + std::marker::Send + + 'static; + async fn subscribe( + &self, + request: tonic::Request, + ) -> std::result::Result, tonic::Status>; + /// Server streaming response type for the SubscribeDurable method. + type SubscribeDurableStream: tonic::codegen::tokio_stream::Stream< + Item = std::result::Result, + > + + std::marker::Send + + 'static; + async fn subscribe_durable( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + async fn acknowledge_events( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + } + #[derive(Debug)] + pub struct EventServiceServer { + inner: Arc, + accept_compression_encodings: EnabledCompressionEncodings, + send_compression_encodings: EnabledCompressionEncodings, + max_decoding_message_size: Option, + max_encoding_message_size: Option, + } + impl EventServiceServer { + pub fn new(inner: T) -> Self { + Self::from_arc(Arc::new(inner)) + } + pub fn from_arc(inner: Arc) -> Self { + Self { + inner, + accept_compression_encodings: Default::default(), + send_compression_encodings: Default::default(), + max_decoding_message_size: None, + max_encoding_message_size: None, + } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> InterceptedService + where + F: tonic::service::Interceptor, + { + InterceptedService::new(Self::new(inner), interceptor) + } + /// Enable decompressing requests with the given encoding. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.accept_compression_encodings.enable(encoding); + self + } + /// Compress responses with the given encoding, if the client supports it. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.send_compression_encodings.enable(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.max_decoding_message_size = Some(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.max_encoding_message_size = Some(limit); + self + } + } + impl tonic::codegen::Service> for EventServiceServer + where + T: EventService, + B: Body + std::marker::Send + 'static, + B::Error: Into + std::marker::Send + 'static, + { + type Response = http::Response; + type Error = std::convert::Infallible; + type Future = BoxFuture; + fn poll_ready( + &mut self, + _cx: &mut Context<'_>, + ) -> Poll> { + Poll::Ready(Ok(())) + } + fn call(&mut self, req: http::Request) -> Self::Future { + match req.uri().path() { + "/forest.v1.EventService/Subscribe" => { + #[allow(non_camel_case_types)] + struct SubscribeSvc(pub Arc); + impl< + T: EventService, + > tonic::server::ServerStreamingService< + super::SubscribeEventsRequest, + > for SubscribeSvc { + type Response = super::OrgEvent; + type ResponseStream = T::SubscribeStream; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::subscribe(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = SubscribeSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.server_streaming(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.EventService/SubscribeDurable" => { + #[allow(non_camel_case_types)] + struct SubscribeDurableSvc(pub Arc); + impl< + T: EventService, + > tonic::server::ServerStreamingService< + super::SubscribeDurableRequest, + > for SubscribeDurableSvc { + type Response = super::OrgEvent; + type ResponseStream = T::SubscribeDurableStream; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::subscribe_durable(&inner, request) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = SubscribeDurableSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.server_streaming(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.EventService/AcknowledgeEvents" => { + #[allow(non_camel_case_types)] + struct AcknowledgeEventsSvc(pub Arc); + impl< + T: EventService, + > tonic::server::UnaryService + for AcknowledgeEventsSvc { + type Response = super::AcknowledgeEventsResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::acknowledge_events(&inner, request) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = AcknowledgeEventsSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + _ => { + Box::pin(async move { + let mut response = http::Response::new( + tonic::body::Body::default(), + ); + let headers = response.headers_mut(); + headers + .insert( + tonic::Status::GRPC_STATUS, + (tonic::Code::Unimplemented as i32).into(), + ); + headers + .insert( + http::header::CONTENT_TYPE, + tonic::metadata::GRPC_CONTENT_TYPE, + ); + Ok(response) + }) + } + } + } + } + impl Clone for EventServiceServer { + fn clone(&self) -> Self { + let inner = self.inner.clone(); + Self { + inner, + accept_compression_encodings: self.accept_compression_encodings, + send_compression_encodings: self.send_compression_encodings, + max_decoding_message_size: self.max_decoding_message_size, + max_encoding_message_size: self.max_encoding_message_size, + } + } + } + /// Generated gRPC service name + pub const SERVICE_NAME: &str = "forest.v1.EventService"; + impl tonic::server::NamedService for EventServiceServer { + const NAME: &'static str = SERVICE_NAME; + } +} +/// Generated client implementations. +pub mod event_subscription_service_client { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + use tonic::codegen::http::Uri; + #[derive(Debug, Clone)] + pub struct EventSubscriptionServiceClient { + inner: tonic::client::Grpc, + } + impl EventSubscriptionServiceClient { + /// Attempt to create a new client by connecting to a given endpoint. + pub async fn connect(dst: D) -> Result + where + D: TryInto, + D::Error: Into, + { + let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; + Ok(Self::new(conn)) + } + } + impl EventSubscriptionServiceClient + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + std::marker::Send + 'static, + ::Error: Into + std::marker::Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> EventSubscriptionServiceClient> + where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response< + >::ResponseBody, + >, + >, + , + >>::Error: Into + std::marker::Send + std::marker::Sync, + { + EventSubscriptionServiceClient::new( + InterceptedService::new(inner, interceptor), + ) + } + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } + pub async fn create_event_subscription( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.EventSubscriptionService/CreateEventSubscription", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new( + "forest.v1.EventSubscriptionService", + "CreateEventSubscription", + ), + ); + self.inner.unary(req, path, codec).await + } + pub async fn update_event_subscription( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.EventSubscriptionService/UpdateEventSubscription", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new( + "forest.v1.EventSubscriptionService", + "UpdateEventSubscription", + ), + ); + self.inner.unary(req, path, codec).await + } + pub async fn delete_event_subscription( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.EventSubscriptionService/DeleteEventSubscription", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new( + "forest.v1.EventSubscriptionService", + "DeleteEventSubscription", + ), + ); + self.inner.unary(req, path, codec).await + } + pub async fn list_event_subscriptions( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.EventSubscriptionService/ListEventSubscriptions", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new( + "forest.v1.EventSubscriptionService", + "ListEventSubscriptions", + ), + ); + self.inner.unary(req, path, codec).await + } + } +} +/// Generated server implementations. +pub mod event_subscription_service_server { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + /// Generated trait containing gRPC methods that should be implemented for use with EventSubscriptionServiceServer. + #[async_trait] + pub trait EventSubscriptionService: std::marker::Send + std::marker::Sync + 'static { + async fn create_event_subscription( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + async fn update_event_subscription( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + async fn delete_event_subscription( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + async fn list_event_subscriptions( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + } + #[derive(Debug)] + pub struct EventSubscriptionServiceServer { + inner: Arc, + accept_compression_encodings: EnabledCompressionEncodings, + send_compression_encodings: EnabledCompressionEncodings, + max_decoding_message_size: Option, + max_encoding_message_size: Option, + } + impl EventSubscriptionServiceServer { + pub fn new(inner: T) -> Self { + Self::from_arc(Arc::new(inner)) + } + pub fn from_arc(inner: Arc) -> Self { + Self { + inner, + accept_compression_encodings: Default::default(), + send_compression_encodings: Default::default(), + max_decoding_message_size: None, + max_encoding_message_size: None, + } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> InterceptedService + where + F: tonic::service::Interceptor, + { + InterceptedService::new(Self::new(inner), interceptor) + } + /// Enable decompressing requests with the given encoding. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.accept_compression_encodings.enable(encoding); + self + } + /// Compress responses with the given encoding, if the client supports it. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.send_compression_encodings.enable(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.max_decoding_message_size = Some(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.max_encoding_message_size = Some(limit); + self + } + } + impl tonic::codegen::Service> + for EventSubscriptionServiceServer + where + T: EventSubscriptionService, + B: Body + std::marker::Send + 'static, + B::Error: Into + std::marker::Send + 'static, + { + type Response = http::Response; + type Error = std::convert::Infallible; + type Future = BoxFuture; + fn poll_ready( + &mut self, + _cx: &mut Context<'_>, + ) -> Poll> { + Poll::Ready(Ok(())) + } + fn call(&mut self, req: http::Request) -> Self::Future { + match req.uri().path() { + "/forest.v1.EventSubscriptionService/CreateEventSubscription" => { + #[allow(non_camel_case_types)] + struct CreateEventSubscriptionSvc( + pub Arc, + ); + impl< + T: EventSubscriptionService, + > tonic::server::UnaryService + for CreateEventSubscriptionSvc { + type Response = super::CreateEventSubscriptionResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request< + super::CreateEventSubscriptionRequest, + >, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::create_event_subscription( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = CreateEventSubscriptionSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.EventSubscriptionService/UpdateEventSubscription" => { + #[allow(non_camel_case_types)] + struct UpdateEventSubscriptionSvc( + pub Arc, + ); + impl< + T: EventSubscriptionService, + > tonic::server::UnaryService + for UpdateEventSubscriptionSvc { + type Response = super::UpdateEventSubscriptionResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request< + super::UpdateEventSubscriptionRequest, + >, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::update_event_subscription( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = UpdateEventSubscriptionSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.EventSubscriptionService/DeleteEventSubscription" => { + #[allow(non_camel_case_types)] + struct DeleteEventSubscriptionSvc( + pub Arc, + ); + impl< + T: EventSubscriptionService, + > tonic::server::UnaryService + for DeleteEventSubscriptionSvc { + type Response = super::DeleteEventSubscriptionResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request< + super::DeleteEventSubscriptionRequest, + >, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::delete_event_subscription( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = DeleteEventSubscriptionSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.EventSubscriptionService/ListEventSubscriptions" => { + #[allow(non_camel_case_types)] + struct ListEventSubscriptionsSvc( + pub Arc, + ); + impl< + T: EventSubscriptionService, + > tonic::server::UnaryService + for ListEventSubscriptionsSvc { + type Response = super::ListEventSubscriptionsResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::list_event_subscriptions( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = ListEventSubscriptionsSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + _ => { + Box::pin(async move { + let mut response = http::Response::new( + tonic::body::Body::default(), + ); + let headers = response.headers_mut(); + headers + .insert( + tonic::Status::GRPC_STATUS, + (tonic::Code::Unimplemented as i32).into(), + ); + headers + .insert( + http::header::CONTENT_TYPE, + tonic::metadata::GRPC_CONTENT_TYPE, + ); + Ok(response) + }) + } + } + } + } + impl Clone for EventSubscriptionServiceServer { + fn clone(&self) -> Self { + let inner = self.inner.clone(); + Self { + inner, + accept_compression_encodings: self.accept_compression_encodings, + send_compression_encodings: self.send_compression_encodings, + max_decoding_message_size: self.max_decoding_message_size, + max_encoding_message_size: self.max_encoding_message_size, + } + } + } + /// Generated gRPC service name + pub const SERVICE_NAME: &str = "forest.v1.EventSubscriptionService"; + impl tonic::server::NamedService for EventSubscriptionServiceServer { + const NAME: &'static str = SERVICE_NAME; + } +} +/// Generated client implementations. +pub mod status_service_client { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + use tonic::codegen::http::Uri; + /// + #[derive(Debug, Clone)] + pub struct StatusServiceClient { + inner: tonic::client::Grpc, + } + impl StatusServiceClient { + /// Attempt to create a new client by connecting to a given endpoint. + pub async fn connect(dst: D) -> Result + where + D: TryInto, + D::Error: Into, + { + let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; + Ok(Self::new(conn)) + } + } + impl StatusServiceClient + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + std::marker::Send + 'static, + ::Error: Into + std::marker::Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> StatusServiceClient> + where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response< + >::ResponseBody, + >, + >, + , + >>::Error: Into + std::marker::Send + std::marker::Sync, + { + StatusServiceClient::new(InterceptedService::new(inner, interceptor)) + } + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } + /// + pub async fn status( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.StatusService/Status", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.StatusService", "Status")); + self.inner.unary(req, path, codec).await + } + } +} +/// Generated server implementations. +pub mod status_service_server { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + /// Generated trait containing gRPC methods that should be implemented for use with StatusServiceServer. + #[async_trait] + pub trait StatusService: std::marker::Send + std::marker::Sync + 'static { + /// + async fn status( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + } + /// + #[derive(Debug)] + pub struct StatusServiceServer { + inner: Arc, + accept_compression_encodings: EnabledCompressionEncodings, + send_compression_encodings: EnabledCompressionEncodings, + max_decoding_message_size: Option, + max_encoding_message_size: Option, + } + impl StatusServiceServer { + pub fn new(inner: T) -> Self { + Self::from_arc(Arc::new(inner)) + } + pub fn from_arc(inner: Arc) -> Self { + Self { + inner, + accept_compression_encodings: Default::default(), + send_compression_encodings: Default::default(), + max_decoding_message_size: None, + max_encoding_message_size: None, + } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> InterceptedService + where + F: tonic::service::Interceptor, + { + InterceptedService::new(Self::new(inner), interceptor) + } + /// Enable decompressing requests with the given encoding. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.accept_compression_encodings.enable(encoding); + self + } + /// Compress responses with the given encoding, if the client supports it. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.send_compression_encodings.enable(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.max_decoding_message_size = Some(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.max_encoding_message_size = Some(limit); + self + } + } + impl tonic::codegen::Service> for StatusServiceServer + where + T: StatusService, + B: Body + std::marker::Send + 'static, + B::Error: Into + std::marker::Send + 'static, + { + type Response = http::Response; + type Error = std::convert::Infallible; + type Future = BoxFuture; + fn poll_ready( + &mut self, + _cx: &mut Context<'_>, + ) -> Poll> { + Poll::Ready(Ok(())) + } + fn call(&mut self, req: http::Request) -> Self::Future { + match req.uri().path() { + "/forest.v1.StatusService/Status" => { + #[allow(non_camel_case_types)] + struct StatusSvc(pub Arc); + impl< + T: StatusService, + > tonic::server::UnaryService + for StatusSvc { + type Response = super::GetStatusResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::status(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = StatusSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + _ => { + Box::pin(async move { + let mut response = http::Response::new( + tonic::body::Body::default(), + ); + let headers = response.headers_mut(); + headers + .insert( + tonic::Status::GRPC_STATUS, + (tonic::Code::Unimplemented as i32).into(), + ); + headers + .insert( + http::header::CONTENT_TYPE, + tonic::metadata::GRPC_CONTENT_TYPE, + ); + Ok(response) + }) + } + } + } + } + impl Clone for StatusServiceServer { + fn clone(&self) -> Self { + let inner = self.inner.clone(); + Self { + inner, + accept_compression_encodings: self.accept_compression_encodings, + send_compression_encodings: self.send_compression_encodings, + max_decoding_message_size: self.max_decoding_message_size, + max_encoding_message_size: self.max_encoding_message_size, + } + } + } + /// Generated gRPC service name + pub const SERVICE_NAME: &str = "forest.v1.StatusService"; + impl tonic::server::NamedService for StatusServiceServer { + const NAME: &'static str = SERVICE_NAME; + } +} +/// Generated client implementations. +pub mod notification_service_client { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + use tonic::codegen::http::Uri; + /// + #[derive(Debug, Clone)] + pub struct NotificationServiceClient { + inner: tonic::client::Grpc, + } + impl NotificationServiceClient { + /// Attempt to create a new client by connecting to a given endpoint. + pub async fn connect(dst: D) -> Result + where + D: TryInto, + D::Error: Into, + { + let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; + Ok(Self::new(conn)) + } + } + impl NotificationServiceClient + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + std::marker::Send + 'static, + ::Error: Into + std::marker::Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> NotificationServiceClient> + where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response< + >::ResponseBody, + >, + >, + , + >>::Error: Into + std::marker::Send + std::marker::Sync, + { + NotificationServiceClient::new(InterceptedService::new(inner, interceptor)) + } + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } + /// + pub async fn get_notification_preferences( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.NotificationService/GetNotificationPreferences", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new( + "forest.v1.NotificationService", + "GetNotificationPreferences", + ), + ); + self.inner.unary(req, path, codec).await + } + /// + pub async fn set_notification_preference( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.NotificationService/SetNotificationPreference", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new( + "forest.v1.NotificationService", + "SetNotificationPreference", + ), + ); + self.inner.unary(req, path, codec).await + } + /// + pub async fn listen_notifications( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response>, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.NotificationService/ListenNotifications", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new( + "forest.v1.NotificationService", + "ListenNotifications", + ), + ); + self.inner.server_streaming(req, path, codec).await + } + /// + pub async fn list_notifications( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.NotificationService/ListNotifications", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("forest.v1.NotificationService", "ListNotifications"), + ); + self.inner.unary(req, path, codec).await + } + } +} +/// Generated server implementations. +pub mod notification_service_server { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + /// Generated trait containing gRPC methods that should be implemented for use with NotificationServiceServer. + #[async_trait] + pub trait NotificationService: std::marker::Send + std::marker::Sync + 'static { + /// + async fn get_notification_preferences( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// + async fn set_notification_preference( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// Server streaming response type for the ListenNotifications method. + type ListenNotificationsStream: tonic::codegen::tokio_stream::Stream< + Item = std::result::Result, + > + + std::marker::Send + + 'static; + /// + async fn listen_notifications( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// + async fn list_notifications( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + } + /// + #[derive(Debug)] + pub struct NotificationServiceServer { + inner: Arc, + accept_compression_encodings: EnabledCompressionEncodings, + send_compression_encodings: EnabledCompressionEncodings, + max_decoding_message_size: Option, + max_encoding_message_size: Option, + } + impl NotificationServiceServer { + pub fn new(inner: T) -> Self { + Self::from_arc(Arc::new(inner)) + } + pub fn from_arc(inner: Arc) -> Self { + Self { + inner, + accept_compression_encodings: Default::default(), + send_compression_encodings: Default::default(), + max_decoding_message_size: None, + max_encoding_message_size: None, + } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> InterceptedService + where + F: tonic::service::Interceptor, + { + InterceptedService::new(Self::new(inner), interceptor) + } + /// Enable decompressing requests with the given encoding. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.accept_compression_encodings.enable(encoding); + self + } + /// Compress responses with the given encoding, if the client supports it. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.send_compression_encodings.enable(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.max_decoding_message_size = Some(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.max_encoding_message_size = Some(limit); + self + } + } + impl tonic::codegen::Service> for NotificationServiceServer + where + T: NotificationService, + B: Body + std::marker::Send + 'static, + B::Error: Into + std::marker::Send + 'static, + { + type Response = http::Response; + type Error = std::convert::Infallible; + type Future = BoxFuture; + fn poll_ready( + &mut self, + _cx: &mut Context<'_>, + ) -> Poll> { + Poll::Ready(Ok(())) + } + fn call(&mut self, req: http::Request) -> Self::Future { + match req.uri().path() { + "/forest.v1.NotificationService/GetNotificationPreferences" => { + #[allow(non_camel_case_types)] + struct GetNotificationPreferencesSvc( + pub Arc, + ); + impl< + T: NotificationService, + > tonic::server::UnaryService< + super::GetNotificationPreferencesRequest, + > for GetNotificationPreferencesSvc { + type Response = super::GetNotificationPreferencesResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request< + super::GetNotificationPreferencesRequest, + >, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::get_notification_preferences( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = GetNotificationPreferencesSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.NotificationService/SetNotificationPreference" => { + #[allow(non_camel_case_types)] + struct SetNotificationPreferenceSvc( + pub Arc, + ); + impl< + T: NotificationService, + > tonic::server::UnaryService< + super::SetNotificationPreferenceRequest, + > for SetNotificationPreferenceSvc { + type Response = super::SetNotificationPreferenceResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request< + super::SetNotificationPreferenceRequest, + >, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::set_notification_preference( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = SetNotificationPreferenceSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.NotificationService/ListenNotifications" => { + #[allow(non_camel_case_types)] + struct ListenNotificationsSvc(pub Arc); + impl< + T: NotificationService, + > tonic::server::ServerStreamingService< + super::ListenNotificationsRequest, + > for ListenNotificationsSvc { + type Response = super::Notification; + type ResponseStream = T::ListenNotificationsStream; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::listen_notifications( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = ListenNotificationsSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.server_streaming(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.NotificationService/ListNotifications" => { + #[allow(non_camel_case_types)] + struct ListNotificationsSvc(pub Arc); + impl< + T: NotificationService, + > tonic::server::UnaryService + for ListNotificationsSvc { + type Response = super::ListNotificationsResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::list_notifications( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = ListNotificationsSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + _ => { + Box::pin(async move { + let mut response = http::Response::new( + tonic::body::Body::default(), + ); + let headers = response.headers_mut(); + headers + .insert( + tonic::Status::GRPC_STATUS, + (tonic::Code::Unimplemented as i32).into(), + ); + headers + .insert( + http::header::CONTENT_TYPE, + tonic::metadata::GRPC_CONTENT_TYPE, + ); + Ok(response) + }) + } + } + } + } + impl Clone for NotificationServiceServer { + fn clone(&self) -> Self { + let inner = self.inner.clone(); + Self { + inner, + accept_compression_encodings: self.accept_compression_encodings, + send_compression_encodings: self.send_compression_encodings, + max_decoding_message_size: self.max_decoding_message_size, + max_encoding_message_size: self.max_encoding_message_size, + } + } + } + /// Generated gRPC service name + pub const SERVICE_NAME: &str = "forest.v1.NotificationService"; + impl tonic::server::NamedService for NotificationServiceServer { + const NAME: &'static str = SERVICE_NAME; + } +} +/// Generated client implementations. pub mod organisation_service_client { #![allow( unused_variables, @@ -1107,6 +5667,32 @@ pub mod release_service_client { ); self.inner.unary(req, path, codec).await } + pub async fn get_releases_by_actor( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.ReleaseService/GetReleasesByActor", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("forest.v1.ReleaseService", "GetReleasesByActor"), + ); + self.inner.unary(req, path, codec).await + } pub async fn get_organisations( &mut self, request: impl tonic::IntoRequest, @@ -1155,6 +5741,82 @@ pub mod release_service_client { .insert(GrpcMethod::new("forest.v1.ReleaseService", "GetProjects")); self.inner.unary(req, path, codec).await } + pub async fn create_project( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.ReleaseService/CreateProject", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.ReleaseService", "CreateProject")); + self.inner.unary(req, path, codec).await + } + pub async fn get_destination_states( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.ReleaseService/GetDestinationStates", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("forest.v1.ReleaseService", "GetDestinationStates"), + ); + self.inner.unary(req, path, codec).await + } + pub async fn get_release_intent_states( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.ReleaseService/GetReleaseIntentStates", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("forest.v1.ReleaseService", "GetReleaseIntentStates"), + ); + self.inner.unary(req, path, codec).await + } } } /// Generated server implementations. @@ -1208,6 +5870,13 @@ pub mod release_service_server { tonic::Response, tonic::Status, >; + async fn get_releases_by_actor( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; async fn get_organisations( &self, request: tonic::Request, @@ -1222,6 +5891,27 @@ pub mod release_service_server { tonic::Response, tonic::Status, >; + async fn create_project( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + async fn get_destination_states( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + async fn get_release_intent_states( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; } #[derive(Debug)] pub struct ReleaseServiceServer { @@ -1531,6 +6221,55 @@ pub mod release_service_server { }; Box::pin(fut) } + "/forest.v1.ReleaseService/GetReleasesByActor" => { + #[allow(non_camel_case_types)] + struct GetReleasesByActorSvc(pub Arc); + impl< + T: ReleaseService, + > tonic::server::UnaryService + for GetReleasesByActorSvc { + type Response = super::GetReleasesByActorResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::get_releases_by_actor( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = GetReleasesByActorSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } "/forest.v1.ReleaseService/GetOrganisations" => { #[allow(non_camel_case_types)] struct GetOrganisationsSvc(pub Arc); @@ -1622,6 +6361,149 @@ pub mod release_service_server { }; Box::pin(fut) } + "/forest.v1.ReleaseService/CreateProject" => { + #[allow(non_camel_case_types)] + struct CreateProjectSvc(pub Arc); + impl< + T: ReleaseService, + > tonic::server::UnaryService + for CreateProjectSvc { + type Response = super::CreateProjectResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::create_project(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = CreateProjectSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.ReleaseService/GetDestinationStates" => { + #[allow(non_camel_case_types)] + struct GetDestinationStatesSvc(pub Arc); + impl< + T: ReleaseService, + > tonic::server::UnaryService + for GetDestinationStatesSvc { + type Response = super::GetDestinationStatesResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::get_destination_states( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = GetDestinationStatesSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.ReleaseService/GetReleaseIntentStates" => { + #[allow(non_camel_case_types)] + struct GetReleaseIntentStatesSvc(pub Arc); + impl< + T: ReleaseService, + > tonic::server::UnaryService + for GetReleaseIntentStatesSvc { + type Response = super::GetReleaseIntentStatesResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::get_release_intent_states( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = GetReleaseIntentStatesSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } _ => { Box::pin(async move { let mut response = http::Response::new( @@ -1663,6 +6545,3306 @@ pub mod release_service_server { } } /// Generated client implementations. +pub mod policy_service_client { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + use tonic::codegen::http::Uri; + /// + #[derive(Debug, Clone)] + pub struct PolicyServiceClient { + inner: tonic::client::Grpc, + } + impl PolicyServiceClient { + /// Attempt to create a new client by connecting to a given endpoint. + pub async fn connect(dst: D) -> Result + where + D: TryInto, + D::Error: Into, + { + let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; + Ok(Self::new(conn)) + } + } + impl PolicyServiceClient + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + std::marker::Send + 'static, + ::Error: Into + std::marker::Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> PolicyServiceClient> + where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response< + >::ResponseBody, + >, + >, + , + >>::Error: Into + std::marker::Send + std::marker::Sync, + { + PolicyServiceClient::new(InterceptedService::new(inner, interceptor)) + } + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } + /// + pub async fn create_policy( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.PolicyService/CreatePolicy", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.PolicyService", "CreatePolicy")); + self.inner.unary(req, path, codec).await + } + /// + pub async fn update_policy( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.PolicyService/UpdatePolicy", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.PolicyService", "UpdatePolicy")); + self.inner.unary(req, path, codec).await + } + /// + pub async fn delete_policy( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.PolicyService/DeletePolicy", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.PolicyService", "DeletePolicy")); + self.inner.unary(req, path, codec).await + } + /// + pub async fn list_policies( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.PolicyService/ListPolicies", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.PolicyService", "ListPolicies")); + self.inner.unary(req, path, codec).await + } + /// + pub async fn evaluate_policies( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.PolicyService/EvaluatePolicies", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.PolicyService", "EvaluatePolicies")); + self.inner.unary(req, path, codec).await + } + } +} +/// Generated server implementations. +pub mod policy_service_server { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + /// Generated trait containing gRPC methods that should be implemented for use with PolicyServiceServer. + #[async_trait] + pub trait PolicyService: std::marker::Send + std::marker::Sync + 'static { + /// + async fn create_policy( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// + async fn update_policy( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// + async fn delete_policy( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// + async fn list_policies( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// + async fn evaluate_policies( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + } + /// + #[derive(Debug)] + pub struct PolicyServiceServer { + inner: Arc, + accept_compression_encodings: EnabledCompressionEncodings, + send_compression_encodings: EnabledCompressionEncodings, + max_decoding_message_size: Option, + max_encoding_message_size: Option, + } + impl PolicyServiceServer { + pub fn new(inner: T) -> Self { + Self::from_arc(Arc::new(inner)) + } + pub fn from_arc(inner: Arc) -> Self { + Self { + inner, + accept_compression_encodings: Default::default(), + send_compression_encodings: Default::default(), + max_decoding_message_size: None, + max_encoding_message_size: None, + } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> InterceptedService + where + F: tonic::service::Interceptor, + { + InterceptedService::new(Self::new(inner), interceptor) + } + /// Enable decompressing requests with the given encoding. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.accept_compression_encodings.enable(encoding); + self + } + /// Compress responses with the given encoding, if the client supports it. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.send_compression_encodings.enable(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.max_decoding_message_size = Some(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.max_encoding_message_size = Some(limit); + self + } + } + impl tonic::codegen::Service> for PolicyServiceServer + where + T: PolicyService, + B: Body + std::marker::Send + 'static, + B::Error: Into + std::marker::Send + 'static, + { + type Response = http::Response; + type Error = std::convert::Infallible; + type Future = BoxFuture; + fn poll_ready( + &mut self, + _cx: &mut Context<'_>, + ) -> Poll> { + Poll::Ready(Ok(())) + } + fn call(&mut self, req: http::Request) -> Self::Future { + match req.uri().path() { + "/forest.v1.PolicyService/CreatePolicy" => { + #[allow(non_camel_case_types)] + struct CreatePolicySvc(pub Arc); + impl< + T: PolicyService, + > tonic::server::UnaryService + for CreatePolicySvc { + type Response = super::CreatePolicyResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::create_policy(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = CreatePolicySvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.PolicyService/UpdatePolicy" => { + #[allow(non_camel_case_types)] + struct UpdatePolicySvc(pub Arc); + impl< + T: PolicyService, + > tonic::server::UnaryService + for UpdatePolicySvc { + type Response = super::UpdatePolicyResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::update_policy(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = UpdatePolicySvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.PolicyService/DeletePolicy" => { + #[allow(non_camel_case_types)] + struct DeletePolicySvc(pub Arc); + impl< + T: PolicyService, + > tonic::server::UnaryService + for DeletePolicySvc { + type Response = super::DeletePolicyResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::delete_policy(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = DeletePolicySvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.PolicyService/ListPolicies" => { + #[allow(non_camel_case_types)] + struct ListPoliciesSvc(pub Arc); + impl< + T: PolicyService, + > tonic::server::UnaryService + for ListPoliciesSvc { + type Response = super::ListPoliciesResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::list_policies(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = ListPoliciesSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.PolicyService/EvaluatePolicies" => { + #[allow(non_camel_case_types)] + struct EvaluatePoliciesSvc(pub Arc); + impl< + T: PolicyService, + > tonic::server::UnaryService + for EvaluatePoliciesSvc { + type Response = super::EvaluatePoliciesResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::evaluate_policies(&inner, request) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = EvaluatePoliciesSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + _ => { + Box::pin(async move { + let mut response = http::Response::new( + tonic::body::Body::default(), + ); + let headers = response.headers_mut(); + headers + .insert( + tonic::Status::GRPC_STATUS, + (tonic::Code::Unimplemented as i32).into(), + ); + headers + .insert( + http::header::CONTENT_TYPE, + tonic::metadata::GRPC_CONTENT_TYPE, + ); + Ok(response) + }) + } + } + } + } + impl Clone for PolicyServiceServer { + fn clone(&self) -> Self { + let inner = self.inner.clone(); + Self { + inner, + accept_compression_encodings: self.accept_compression_encodings, + send_compression_encodings: self.send_compression_encodings, + max_decoding_message_size: self.max_decoding_message_size, + max_encoding_message_size: self.max_encoding_message_size, + } + } + } + /// Generated gRPC service name + pub const SERVICE_NAME: &str = "forest.v1.PolicyService"; + impl tonic::server::NamedService for PolicyServiceServer { + const NAME: &'static str = SERVICE_NAME; + } +} +/// Generated client implementations. +pub mod registry_service_client { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + use tonic::codegen::http::Uri; + #[derive(Debug, Clone)] + pub struct RegistryServiceClient { + inner: tonic::client::Grpc, + } + impl RegistryServiceClient { + /// Attempt to create a new client by connecting to a given endpoint. + pub async fn connect(dst: D) -> Result + where + D: TryInto, + D::Error: Into, + { + let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; + Ok(Self::new(conn)) + } + } + impl RegistryServiceClient + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + std::marker::Send + 'static, + ::Error: Into + std::marker::Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> RegistryServiceClient> + where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response< + >::ResponseBody, + >, + >, + , + >>::Error: Into + std::marker::Send + std::marker::Sync, + { + RegistryServiceClient::new(InterceptedService::new(inner, interceptor)) + } + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } + pub async fn get_components( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.RegistryService/GetComponents", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.RegistryService", "GetComponents")); + self.inner.unary(req, path, codec).await + } + pub async fn get_component( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.RegistryService/GetComponent", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.RegistryService", "GetComponent")); + self.inner.unary(req, path, codec).await + } + pub async fn get_component_version( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.RegistryService/GetComponentVersion", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("forest.v1.RegistryService", "GetComponentVersion"), + ); + self.inner.unary(req, path, codec).await + } + pub async fn begin_upload( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.RegistryService/BeginUpload", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.RegistryService", "BeginUpload")); + self.inner.unary(req, path, codec).await + } + pub async fn upload_file( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.RegistryService/UploadFile", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.RegistryService", "UploadFile")); + self.inner.unary(req, path, codec).await + } + pub async fn commit_upload( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.RegistryService/CommitUpload", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.RegistryService", "CommitUpload")); + self.inner.unary(req, path, codec).await + } + pub async fn get_component_files( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response>, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.RegistryService/GetComponentFiles", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("forest.v1.RegistryService", "GetComponentFiles"), + ); + self.inner.server_streaming(req, path, codec).await + } + } +} +/// Generated server implementations. +pub mod registry_service_server { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + /// Generated trait containing gRPC methods that should be implemented for use with RegistryServiceServer. + #[async_trait] + pub trait RegistryService: std::marker::Send + std::marker::Sync + 'static { + async fn get_components( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + async fn get_component( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + async fn get_component_version( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + async fn begin_upload( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + async fn upload_file( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + async fn commit_upload( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// Server streaming response type for the GetComponentFiles method. + type GetComponentFilesStream: tonic::codegen::tokio_stream::Stream< + Item = std::result::Result< + super::GetComponentFilesResponse, + tonic::Status, + >, + > + + std::marker::Send + + 'static; + async fn get_component_files( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + } + #[derive(Debug)] + pub struct RegistryServiceServer { + inner: Arc, + accept_compression_encodings: EnabledCompressionEncodings, + send_compression_encodings: EnabledCompressionEncodings, + max_decoding_message_size: Option, + max_encoding_message_size: Option, + } + impl RegistryServiceServer { + pub fn new(inner: T) -> Self { + Self::from_arc(Arc::new(inner)) + } + pub fn from_arc(inner: Arc) -> Self { + Self { + inner, + accept_compression_encodings: Default::default(), + send_compression_encodings: Default::default(), + max_decoding_message_size: None, + max_encoding_message_size: None, + } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> InterceptedService + where + F: tonic::service::Interceptor, + { + InterceptedService::new(Self::new(inner), interceptor) + } + /// Enable decompressing requests with the given encoding. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.accept_compression_encodings.enable(encoding); + self + } + /// Compress responses with the given encoding, if the client supports it. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.send_compression_encodings.enable(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.max_decoding_message_size = Some(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.max_encoding_message_size = Some(limit); + self + } + } + impl tonic::codegen::Service> for RegistryServiceServer + where + T: RegistryService, + B: Body + std::marker::Send + 'static, + B::Error: Into + std::marker::Send + 'static, + { + type Response = http::Response; + type Error = std::convert::Infallible; + type Future = BoxFuture; + fn poll_ready( + &mut self, + _cx: &mut Context<'_>, + ) -> Poll> { + Poll::Ready(Ok(())) + } + fn call(&mut self, req: http::Request) -> Self::Future { + match req.uri().path() { + "/forest.v1.RegistryService/GetComponents" => { + #[allow(non_camel_case_types)] + struct GetComponentsSvc(pub Arc); + impl< + T: RegistryService, + > tonic::server::UnaryService + for GetComponentsSvc { + type Response = super::GetComponentsResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::get_components(&inner, request) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = GetComponentsSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.RegistryService/GetComponent" => { + #[allow(non_camel_case_types)] + struct GetComponentSvc(pub Arc); + impl< + T: RegistryService, + > tonic::server::UnaryService + for GetComponentSvc { + type Response = super::GetComponentResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::get_component(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = GetComponentSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.RegistryService/GetComponentVersion" => { + #[allow(non_camel_case_types)] + struct GetComponentVersionSvc(pub Arc); + impl< + T: RegistryService, + > tonic::server::UnaryService + for GetComponentVersionSvc { + type Response = super::GetComponentVersionResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::get_component_version( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = GetComponentVersionSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.RegistryService/BeginUpload" => { + #[allow(non_camel_case_types)] + struct BeginUploadSvc(pub Arc); + impl< + T: RegistryService, + > tonic::server::UnaryService + for BeginUploadSvc { + type Response = super::BeginUploadResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::begin_upload(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = BeginUploadSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.RegistryService/UploadFile" => { + #[allow(non_camel_case_types)] + struct UploadFileSvc(pub Arc); + impl< + T: RegistryService, + > tonic::server::UnaryService + for UploadFileSvc { + type Response = super::UploadFileResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::upload_file(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = UploadFileSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.RegistryService/CommitUpload" => { + #[allow(non_camel_case_types)] + struct CommitUploadSvc(pub Arc); + impl< + T: RegistryService, + > tonic::server::UnaryService + for CommitUploadSvc { + type Response = super::CommitUploadResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::commit_upload(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = CommitUploadSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.RegistryService/GetComponentFiles" => { + #[allow(non_camel_case_types)] + struct GetComponentFilesSvc(pub Arc); + impl< + T: RegistryService, + > tonic::server::ServerStreamingService< + super::GetComponentFilesRequest, + > for GetComponentFilesSvc { + type Response = super::GetComponentFilesResponse; + type ResponseStream = T::GetComponentFilesStream; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::get_component_files(&inner, request) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = GetComponentFilesSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.server_streaming(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + _ => { + Box::pin(async move { + let mut response = http::Response::new( + tonic::body::Body::default(), + ); + let headers = response.headers_mut(); + headers + .insert( + tonic::Status::GRPC_STATUS, + (tonic::Code::Unimplemented as i32).into(), + ); + headers + .insert( + http::header::CONTENT_TYPE, + tonic::metadata::GRPC_CONTENT_TYPE, + ); + Ok(response) + }) + } + } + } + } + impl Clone for RegistryServiceServer { + fn clone(&self) -> Self { + let inner = self.inner.clone(); + Self { + inner, + accept_compression_encodings: self.accept_compression_encodings, + send_compression_encodings: self.send_compression_encodings, + max_decoding_message_size: self.max_decoding_message_size, + max_encoding_message_size: self.max_encoding_message_size, + } + } + } + /// Generated gRPC service name + pub const SERVICE_NAME: &str = "forest.v1.RegistryService"; + impl tonic::server::NamedService for RegistryServiceServer { + const NAME: &'static str = SERVICE_NAME; + } +} +/// Generated client implementations. +pub mod release_pipeline_service_client { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + use tonic::codegen::http::Uri; + /// + #[derive(Debug, Clone)] + pub struct ReleasePipelineServiceClient { + inner: tonic::client::Grpc, + } + impl ReleasePipelineServiceClient { + /// Attempt to create a new client by connecting to a given endpoint. + pub async fn connect(dst: D) -> Result + where + D: TryInto, + D::Error: Into, + { + let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; + Ok(Self::new(conn)) + } + } + impl ReleasePipelineServiceClient + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + std::marker::Send + 'static, + ::Error: Into + std::marker::Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> ReleasePipelineServiceClient> + where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response< + >::ResponseBody, + >, + >, + , + >>::Error: Into + std::marker::Send + std::marker::Sync, + { + ReleasePipelineServiceClient::new( + InterceptedService::new(inner, interceptor), + ) + } + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } + /// + pub async fn create_release_pipeline( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.ReleasePipelineService/CreateReleasePipeline", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new( + "forest.v1.ReleasePipelineService", + "CreateReleasePipeline", + ), + ); + self.inner.unary(req, path, codec).await + } + /// + pub async fn update_release_pipeline( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.ReleasePipelineService/UpdateReleasePipeline", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new( + "forest.v1.ReleasePipelineService", + "UpdateReleasePipeline", + ), + ); + self.inner.unary(req, path, codec).await + } + /// + pub async fn delete_release_pipeline( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.ReleasePipelineService/DeleteReleasePipeline", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new( + "forest.v1.ReleasePipelineService", + "DeleteReleasePipeline", + ), + ); + self.inner.unary(req, path, codec).await + } + /// + pub async fn list_release_pipelines( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.ReleasePipelineService/ListReleasePipelines", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new( + "forest.v1.ReleasePipelineService", + "ListReleasePipelines", + ), + ); + self.inner.unary(req, path, codec).await + } + } +} +/// Generated server implementations. +pub mod release_pipeline_service_server { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + /// Generated trait containing gRPC methods that should be implemented for use with ReleasePipelineServiceServer. + #[async_trait] + pub trait ReleasePipelineService: std::marker::Send + std::marker::Sync + 'static { + /// + async fn create_release_pipeline( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// + async fn update_release_pipeline( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// + async fn delete_release_pipeline( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// + async fn list_release_pipelines( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + } + /// + #[derive(Debug)] + pub struct ReleasePipelineServiceServer { + inner: Arc, + accept_compression_encodings: EnabledCompressionEncodings, + send_compression_encodings: EnabledCompressionEncodings, + max_decoding_message_size: Option, + max_encoding_message_size: Option, + } + impl ReleasePipelineServiceServer { + pub fn new(inner: T) -> Self { + Self::from_arc(Arc::new(inner)) + } + pub fn from_arc(inner: Arc) -> Self { + Self { + inner, + accept_compression_encodings: Default::default(), + send_compression_encodings: Default::default(), + max_decoding_message_size: None, + max_encoding_message_size: None, + } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> InterceptedService + where + F: tonic::service::Interceptor, + { + InterceptedService::new(Self::new(inner), interceptor) + } + /// Enable decompressing requests with the given encoding. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.accept_compression_encodings.enable(encoding); + self + } + /// Compress responses with the given encoding, if the client supports it. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.send_compression_encodings.enable(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.max_decoding_message_size = Some(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.max_encoding_message_size = Some(limit); + self + } + } + impl tonic::codegen::Service> + for ReleasePipelineServiceServer + where + T: ReleasePipelineService, + B: Body + std::marker::Send + 'static, + B::Error: Into + std::marker::Send + 'static, + { + type Response = http::Response; + type Error = std::convert::Infallible; + type Future = BoxFuture; + fn poll_ready( + &mut self, + _cx: &mut Context<'_>, + ) -> Poll> { + Poll::Ready(Ok(())) + } + fn call(&mut self, req: http::Request) -> Self::Future { + match req.uri().path() { + "/forest.v1.ReleasePipelineService/CreateReleasePipeline" => { + #[allow(non_camel_case_types)] + struct CreateReleasePipelineSvc( + pub Arc, + ); + impl< + T: ReleasePipelineService, + > tonic::server::UnaryService + for CreateReleasePipelineSvc { + type Response = super::CreateReleasePipelineResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::create_release_pipeline( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = CreateReleasePipelineSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.ReleasePipelineService/UpdateReleasePipeline" => { + #[allow(non_camel_case_types)] + struct UpdateReleasePipelineSvc( + pub Arc, + ); + impl< + T: ReleasePipelineService, + > tonic::server::UnaryService + for UpdateReleasePipelineSvc { + type Response = super::UpdateReleasePipelineResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::update_release_pipeline( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = UpdateReleasePipelineSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.ReleasePipelineService/DeleteReleasePipeline" => { + #[allow(non_camel_case_types)] + struct DeleteReleasePipelineSvc( + pub Arc, + ); + impl< + T: ReleasePipelineService, + > tonic::server::UnaryService + for DeleteReleasePipelineSvc { + type Response = super::DeleteReleasePipelineResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::delete_release_pipeline( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = DeleteReleasePipelineSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.ReleasePipelineService/ListReleasePipelines" => { + #[allow(non_camel_case_types)] + struct ListReleasePipelinesSvc( + pub Arc, + ); + impl< + T: ReleasePipelineService, + > tonic::server::UnaryService + for ListReleasePipelinesSvc { + type Response = super::ListReleasePipelinesResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::list_release_pipelines( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = ListReleasePipelinesSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + _ => { + Box::pin(async move { + let mut response = http::Response::new( + tonic::body::Body::default(), + ); + let headers = response.headers_mut(); + headers + .insert( + tonic::Status::GRPC_STATUS, + (tonic::Code::Unimplemented as i32).into(), + ); + headers + .insert( + http::header::CONTENT_TYPE, + tonic::metadata::GRPC_CONTENT_TYPE, + ); + Ok(response) + }) + } + } + } + } + impl Clone for ReleasePipelineServiceServer { + fn clone(&self) -> Self { + let inner = self.inner.clone(); + Self { + inner, + accept_compression_encodings: self.accept_compression_encodings, + send_compression_encodings: self.send_compression_encodings, + max_decoding_message_size: self.max_decoding_message_size, + max_encoding_message_size: self.max_encoding_message_size, + } + } + } + /// Generated gRPC service name + pub const SERVICE_NAME: &str = "forest.v1.ReleasePipelineService"; + impl tonic::server::NamedService for ReleasePipelineServiceServer { + const NAME: &'static str = SERVICE_NAME; + } +} +/// Generated client implementations. +pub mod runner_service_client { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + use tonic::codegen::http::Uri; + #[derive(Debug, Clone)] + pub struct RunnerServiceClient { + inner: tonic::client::Grpc, + } + impl RunnerServiceClient { + /// Attempt to create a new client by connecting to a given endpoint. + pub async fn connect(dst: D) -> Result + where + D: TryInto, + D::Error: Into, + { + let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; + Ok(Self::new(conn)) + } + } + impl RunnerServiceClient + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + std::marker::Send + 'static, + ::Error: Into + std::marker::Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> RunnerServiceClient> + where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response< + >::ResponseBody, + >, + >, + , + >>::Error: Into + std::marker::Send + std::marker::Sync, + { + RunnerServiceClient::new(InterceptedService::new(inner, interceptor)) + } + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } + pub async fn register_runner( + &mut self, + request: impl tonic::IntoStreamingRequest, + ) -> std::result::Result< + tonic::Response>, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.RunnerService/RegisterRunner", + ); + let mut req = request.into_streaming_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.RunnerService", "RegisterRunner")); + self.inner.streaming(req, path, codec).await + } + pub async fn get_release_files( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response>, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.RunnerService/GetReleaseFiles", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.RunnerService", "GetReleaseFiles")); + self.inner.server_streaming(req, path, codec).await + } + pub async fn push_logs( + &mut self, + request: impl tonic::IntoStreamingRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.RunnerService/PushLogs", + ); + let mut req = request.into_streaming_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.RunnerService", "PushLogs")); + self.inner.client_streaming(req, path, codec).await + } + pub async fn get_spec_files( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response>, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.RunnerService/GetSpecFiles", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.RunnerService", "GetSpecFiles")); + self.inner.server_streaming(req, path, codec).await + } + pub async fn get_release_annotation( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.RunnerService/GetReleaseAnnotation", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("forest.v1.RunnerService", "GetReleaseAnnotation"), + ); + self.inner.unary(req, path, codec).await + } + pub async fn get_project_info( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.RunnerService/GetProjectInfo", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.RunnerService", "GetProjectInfo")); + self.inner.unary(req, path, codec).await + } + pub async fn complete_release( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.RunnerService/CompleteRelease", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.RunnerService", "CompleteRelease")); + self.inner.unary(req, path, codec).await + } + } +} +/// Generated server implementations. +pub mod runner_service_server { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + /// Generated trait containing gRPC methods that should be implemented for use with RunnerServiceServer. + #[async_trait] + pub trait RunnerService: std::marker::Send + std::marker::Sync + 'static { + /// Server streaming response type for the RegisterRunner method. + type RegisterRunnerStream: tonic::codegen::tokio_stream::Stream< + Item = std::result::Result, + > + + std::marker::Send + + 'static; + async fn register_runner( + &self, + request: tonic::Request>, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// Server streaming response type for the GetReleaseFiles method. + type GetReleaseFilesStream: tonic::codegen::tokio_stream::Stream< + Item = std::result::Result, + > + + std::marker::Send + + 'static; + async fn get_release_files( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + async fn push_logs( + &self, + request: tonic::Request>, + ) -> std::result::Result, tonic::Status>; + /// Server streaming response type for the GetSpecFiles method. + type GetSpecFilesStream: tonic::codegen::tokio_stream::Stream< + Item = std::result::Result, + > + + std::marker::Send + + 'static; + async fn get_spec_files( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + async fn get_release_annotation( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + async fn get_project_info( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + async fn complete_release( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + } + #[derive(Debug)] + pub struct RunnerServiceServer { + inner: Arc, + accept_compression_encodings: EnabledCompressionEncodings, + send_compression_encodings: EnabledCompressionEncodings, + max_decoding_message_size: Option, + max_encoding_message_size: Option, + } + impl RunnerServiceServer { + pub fn new(inner: T) -> Self { + Self::from_arc(Arc::new(inner)) + } + pub fn from_arc(inner: Arc) -> Self { + Self { + inner, + accept_compression_encodings: Default::default(), + send_compression_encodings: Default::default(), + max_decoding_message_size: None, + max_encoding_message_size: None, + } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> InterceptedService + where + F: tonic::service::Interceptor, + { + InterceptedService::new(Self::new(inner), interceptor) + } + /// Enable decompressing requests with the given encoding. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.accept_compression_encodings.enable(encoding); + self + } + /// Compress responses with the given encoding, if the client supports it. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.send_compression_encodings.enable(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.max_decoding_message_size = Some(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.max_encoding_message_size = Some(limit); + self + } + } + impl tonic::codegen::Service> for RunnerServiceServer + where + T: RunnerService, + B: Body + std::marker::Send + 'static, + B::Error: Into + std::marker::Send + 'static, + { + type Response = http::Response; + type Error = std::convert::Infallible; + type Future = BoxFuture; + fn poll_ready( + &mut self, + _cx: &mut Context<'_>, + ) -> Poll> { + Poll::Ready(Ok(())) + } + fn call(&mut self, req: http::Request) -> Self::Future { + match req.uri().path() { + "/forest.v1.RunnerService/RegisterRunner" => { + #[allow(non_camel_case_types)] + struct RegisterRunnerSvc(pub Arc); + impl< + T: RunnerService, + > tonic::server::StreamingService + for RegisterRunnerSvc { + type Response = super::ServerMessage; + type ResponseStream = T::RegisterRunnerStream; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request< + tonic::Streaming, + >, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::register_runner(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = RegisterRunnerSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.streaming(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.RunnerService/GetReleaseFiles" => { + #[allow(non_camel_case_types)] + struct GetReleaseFilesSvc(pub Arc); + impl< + T: RunnerService, + > tonic::server::ServerStreamingService< + super::GetReleaseFilesRequest, + > for GetReleaseFilesSvc { + type Response = super::ReleaseFile; + type ResponseStream = T::GetReleaseFilesStream; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::get_release_files(&inner, request) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = GetReleaseFilesSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.server_streaming(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.RunnerService/PushLogs" => { + #[allow(non_camel_case_types)] + struct PushLogsSvc(pub Arc); + impl< + T: RunnerService, + > tonic::server::ClientStreamingService + for PushLogsSvc { + type Response = super::PushLogResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request< + tonic::Streaming, + >, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::push_logs(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = PushLogsSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.client_streaming(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.RunnerService/GetSpecFiles" => { + #[allow(non_camel_case_types)] + struct GetSpecFilesSvc(pub Arc); + impl< + T: RunnerService, + > tonic::server::ServerStreamingService + for GetSpecFilesSvc { + type Response = super::ReleaseFile; + type ResponseStream = T::GetSpecFilesStream; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::get_spec_files(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = GetSpecFilesSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.server_streaming(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.RunnerService/GetReleaseAnnotation" => { + #[allow(non_camel_case_types)] + struct GetReleaseAnnotationSvc(pub Arc); + impl< + T: RunnerService, + > tonic::server::UnaryService + for GetReleaseAnnotationSvc { + type Response = super::ReleaseAnnotationResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::get_release_annotation( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = GetReleaseAnnotationSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.RunnerService/GetProjectInfo" => { + #[allow(non_camel_case_types)] + struct GetProjectInfoSvc(pub Arc); + impl< + T: RunnerService, + > tonic::server::UnaryService + for GetProjectInfoSvc { + type Response = super::ProjectInfoResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::get_project_info(&inner, request) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = GetProjectInfoSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.RunnerService/CompleteRelease" => { + #[allow(non_camel_case_types)] + struct CompleteReleaseSvc(pub Arc); + impl< + T: RunnerService, + > tonic::server::UnaryService + for CompleteReleaseSvc { + type Response = super::CompleteReleaseResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::complete_release(&inner, request) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = CompleteReleaseSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + _ => { + Box::pin(async move { + let mut response = http::Response::new( + tonic::body::Body::default(), + ); + let headers = response.headers_mut(); + headers + .insert( + tonic::Status::GRPC_STATUS, + (tonic::Code::Unimplemented as i32).into(), + ); + headers + .insert( + http::header::CONTENT_TYPE, + tonic::metadata::GRPC_CONTENT_TYPE, + ); + Ok(response) + }) + } + } + } + } + impl Clone for RunnerServiceServer { + fn clone(&self) -> Self { + let inner = self.inner.clone(); + Self { + inner, + accept_compression_encodings: self.accept_compression_encodings, + send_compression_encodings: self.send_compression_encodings, + max_decoding_message_size: self.max_decoding_message_size, + max_encoding_message_size: self.max_encoding_message_size, + } + } + } + /// Generated gRPC service name + pub const SERVICE_NAME: &str = "forest.v1.RunnerService"; + impl tonic::server::NamedService for RunnerServiceServer { + const NAME: &'static str = SERVICE_NAME; + } +} +/// Generated client implementations. +pub mod trigger_service_client { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + use tonic::codegen::http::Uri; + /// + #[derive(Debug, Clone)] + pub struct TriggerServiceClient { + inner: tonic::client::Grpc, + } + impl TriggerServiceClient { + /// Attempt to create a new client by connecting to a given endpoint. + pub async fn connect(dst: D) -> Result + where + D: TryInto, + D::Error: Into, + { + let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; + Ok(Self::new(conn)) + } + } + impl TriggerServiceClient + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + std::marker::Send + 'static, + ::Error: Into + std::marker::Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> TriggerServiceClient> + where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response< + >::ResponseBody, + >, + >, + , + >>::Error: Into + std::marker::Send + std::marker::Sync, + { + TriggerServiceClient::new(InterceptedService::new(inner, interceptor)) + } + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } + /// + pub async fn create_trigger( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.TriggerService/CreateTrigger", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.TriggerService", "CreateTrigger")); + self.inner.unary(req, path, codec).await + } + /// + pub async fn update_trigger( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.TriggerService/UpdateTrigger", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.TriggerService", "UpdateTrigger")); + self.inner.unary(req, path, codec).await + } + /// + pub async fn delete_trigger( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.TriggerService/DeleteTrigger", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.TriggerService", "DeleteTrigger")); + self.inner.unary(req, path, codec).await + } + /// + pub async fn list_triggers( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.TriggerService/ListTriggers", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.TriggerService", "ListTriggers")); + self.inner.unary(req, path, codec).await + } + } +} +/// Generated server implementations. +pub mod trigger_service_server { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + /// Generated trait containing gRPC methods that should be implemented for use with TriggerServiceServer. + #[async_trait] + pub trait TriggerService: std::marker::Send + std::marker::Sync + 'static { + /// + async fn create_trigger( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// + async fn update_trigger( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// + async fn delete_trigger( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// + async fn list_triggers( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + } + /// + #[derive(Debug)] + pub struct TriggerServiceServer { + inner: Arc, + accept_compression_encodings: EnabledCompressionEncodings, + send_compression_encodings: EnabledCompressionEncodings, + max_decoding_message_size: Option, + max_encoding_message_size: Option, + } + impl TriggerServiceServer { + pub fn new(inner: T) -> Self { + Self::from_arc(Arc::new(inner)) + } + pub fn from_arc(inner: Arc) -> Self { + Self { + inner, + accept_compression_encodings: Default::default(), + send_compression_encodings: Default::default(), + max_decoding_message_size: None, + max_encoding_message_size: None, + } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> InterceptedService + where + F: tonic::service::Interceptor, + { + InterceptedService::new(Self::new(inner), interceptor) + } + /// Enable decompressing requests with the given encoding. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.accept_compression_encodings.enable(encoding); + self + } + /// Compress responses with the given encoding, if the client supports it. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.send_compression_encodings.enable(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.max_decoding_message_size = Some(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.max_encoding_message_size = Some(limit); + self + } + } + impl tonic::codegen::Service> for TriggerServiceServer + where + T: TriggerService, + B: Body + std::marker::Send + 'static, + B::Error: Into + std::marker::Send + 'static, + { + type Response = http::Response; + type Error = std::convert::Infallible; + type Future = BoxFuture; + fn poll_ready( + &mut self, + _cx: &mut Context<'_>, + ) -> Poll> { + Poll::Ready(Ok(())) + } + fn call(&mut self, req: http::Request) -> Self::Future { + match req.uri().path() { + "/forest.v1.TriggerService/CreateTrigger" => { + #[allow(non_camel_case_types)] + struct CreateTriggerSvc(pub Arc); + impl< + T: TriggerService, + > tonic::server::UnaryService + for CreateTriggerSvc { + type Response = super::CreateTriggerResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::create_trigger(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = CreateTriggerSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.TriggerService/UpdateTrigger" => { + #[allow(non_camel_case_types)] + struct UpdateTriggerSvc(pub Arc); + impl< + T: TriggerService, + > tonic::server::UnaryService + for UpdateTriggerSvc { + type Response = super::UpdateTriggerResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::update_trigger(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = UpdateTriggerSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.TriggerService/DeleteTrigger" => { + #[allow(non_camel_case_types)] + struct DeleteTriggerSvc(pub Arc); + impl< + T: TriggerService, + > tonic::server::UnaryService + for DeleteTriggerSvc { + type Response = super::DeleteTriggerResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::delete_trigger(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = DeleteTriggerSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/forest.v1.TriggerService/ListTriggers" => { + #[allow(non_camel_case_types)] + struct ListTriggersSvc(pub Arc); + impl< + T: TriggerService, + > tonic::server::UnaryService + for ListTriggersSvc { + type Response = super::ListTriggersResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::list_triggers(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = ListTriggersSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + _ => { + Box::pin(async move { + let mut response = http::Response::new( + tonic::body::Body::default(), + ); + let headers = response.headers_mut(); + headers + .insert( + tonic::Status::GRPC_STATUS, + (tonic::Code::Unimplemented as i32).into(), + ); + headers + .insert( + http::header::CONTENT_TYPE, + tonic::metadata::GRPC_CONTENT_TYPE, + ); + Ok(response) + }) + } + } + } + } + impl Clone for TriggerServiceServer { + fn clone(&self) -> Self { + let inner = self.inner.clone(); + Self { + inner, + accept_compression_encodings: self.accept_compression_encodings, + send_compression_encodings: self.send_compression_encodings, + max_decoding_message_size: self.max_decoding_message_size, + max_encoding_message_size: self.max_encoding_message_size, + } + } + } + /// Generated gRPC service name + pub const SERVICE_NAME: &str = "forest.v1.TriggerService"; + impl tonic::server::NamedService for TriggerServiceServer { + const NAME: &'static str = SERVICE_NAME; + } +} +/// Generated client implementations. pub mod users_service_client { #![allow( unused_variables, @@ -1939,6 +10121,30 @@ pub mod users_service_client { .insert(GrpcMethod::new("forest.v1.UsersService", "ListUsers")); self.inner.unary(req, path, codec).await } + pub async fn get_user_stats( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic_prost::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/forest.v1.UsersService/GetUserStats", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("forest.v1.UsersService", "GetUserStats")); + self.inner.unary(req, path, codec).await + } pub async fn change_password( &mut self, request: impl tonic::IntoRequest, @@ -2351,6 +10557,13 @@ pub mod users_service_server { tonic::Response, tonic::Status, >; + async fn get_user_stats( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; async fn change_password( &self, request: tonic::Request, @@ -2885,6 +11098,51 @@ pub mod users_service_server { }; Box::pin(fut) } + "/forest.v1.UsersService/GetUserStats" => { + #[allow(non_camel_case_types)] + struct GetUserStatsSvc(pub Arc); + impl< + T: UsersService, + > tonic::server::UnaryService + for GetUserStatsSvc { + type Response = super::GetUserStatsResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::get_user_stats(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = GetUserStatsSvc(inner); + let codec = tonic_prost::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } "/forest.v1.UsersService/ChangePassword" => { #[allow(non_camel_case_types)] struct ChangePasswordSvc(pub Arc); diff --git a/crates/forage-server/Cargo.toml b/crates/forage-server/Cargo.toml index 327763f..7f2821b 100644 --- a/crates/forage-server/Cargo.toml +++ b/crates/forage-server/Cargo.toml @@ -24,3 +24,10 @@ tracing.workspace = true tracing-subscriber.workspace = true time.workspace = true uuid.workspace = true +urlencoding = "2.1.3" +opentelemetry.workspace = true +opentelemetry_sdk.workspace = true +opentelemetry-otlp.workspace = true +tracing-opentelemetry.workspace = true +futures-util = "0.3" +tokio-stream = "0.1" diff --git a/crates/forage-server/src/forest_client.rs b/crates/forage-server/src/forest_client.rs index ce1e1ec..e37125c 100644 --- a/crates/forage-server/src/forest_client.rs +++ b/crates/forage-server/src/forest_client.rs @@ -1,10 +1,19 @@ use forage_core::auth::{ AuthError, AuthTokens, CreatedToken, ForestAuth, PersonalAccessToken, User, UserEmail, + UserProfile, }; use forage_core::platform::{ - Artifact, ArtifactContext, ArtifactDestination, ArtifactSource, Destination, ForestPlatform, - Organisation, OrgMember, PlatformError, + Artifact, ArtifactContext, ArtifactDestination, ArtifactRef, ArtifactSource, CreatePolicyInput, + CreateReleasePipelineInput, CreateTriggerInput, Destination, DestinationType, Environment, + ForestPlatform, Organisation, OrgMember, PipelineStage, PipelineStageConfig, PlatformError, + Policy, PolicyConfig, ReleasePipeline, Trigger, UpdatePolicyInput, + UpdateReleasePipelineInput, UpdateTriggerInput, }; +use forage_grpc::policy_service_client::PolicyServiceClient; +use forage_grpc::release_pipeline_service_client::ReleasePipelineServiceClient; +use forage_grpc::trigger_service_client::TriggerServiceClient; +use forage_grpc::destination_service_client::DestinationServiceClient; +use forage_grpc::environment_service_client::EnvironmentServiceClient; use forage_grpc::organisation_service_client::OrganisationServiceClient; use forage_grpc::release_service_client::ReleaseServiceClient; use forage_grpc::users_service_client::UsersServiceClient; @@ -42,10 +51,42 @@ impl GrpcForestClient { OrganisationServiceClient::new(self.channel.clone()) } - fn release_client(&self) -> ReleaseServiceClient { + pub(crate) fn artifact_client( + &self, + ) -> forage_grpc::artifact_service_client::ArtifactServiceClient { + forage_grpc::artifact_service_client::ArtifactServiceClient::new(self.channel.clone()) + } + + pub(crate) fn release_client(&self) -> ReleaseServiceClient { ReleaseServiceClient::new(self.channel.clone()) } + fn env_client(&self) -> EnvironmentServiceClient { + EnvironmentServiceClient::new(self.channel.clone()) + } + + fn dest_client(&self) -> DestinationServiceClient { + DestinationServiceClient::new(self.channel.clone()) + } + + fn trigger_client(&self) -> TriggerServiceClient { + TriggerServiceClient::new(self.channel.clone()) + } + + fn policy_client(&self) -> PolicyServiceClient { + PolicyServiceClient::new(self.channel.clone()) + } + + fn pipeline_client(&self) -> ReleasePipelineServiceClient { + ReleasePipelineServiceClient::new(self.channel.clone()) + } + + pub fn event_client( + &self, + ) -> forage_grpc::event_service_client::EventServiceClient { + forage_grpc::event_service_client::EventServiceClient::new(self.channel.clone()) + } + fn authed_request(access_token: &str, msg: T) -> Result, AuthError> { bearer_request(access_token, msg).map_err(AuthError::Other) } @@ -202,6 +243,41 @@ impl ForestAuth for GrpcForestClient { Ok(convert_user(user)) } + async fn get_user_by_username( + &self, + access_token: &str, + username: &str, + ) -> Result { + let req = Self::authed_request( + access_token, + forage_grpc::GetUserRequest { + identifier: Some(forage_grpc::get_user_request::Identifier::Username( + username.into(), + )), + }, + )?; + + let resp = self + .client() + .get_user(req) + .await + .map_err(map_status)? + .into_inner(); + + let user = resp + .user + .ok_or(AuthError::Other("no user in response".into()))?; + Ok(UserProfile { + user_id: user.user_id, + username: user.username, + created_at: user.created_at.map(|ts| { + chrono::DateTime::from_timestamp(ts.seconds, ts.nanos as u32) + .map(|dt| dt.to_rfc3339()) + .unwrap_or_default() + }), + }) + } + async fn list_tokens( &self, access_token: &str, @@ -396,8 +472,13 @@ fn convert_artifact(a: forage_grpc::Artifact) -> Artifact { source_type: s.source_type.filter(|v| !v.is_empty()), run_url: s.run_url.filter(|v| !v.is_empty()), }); - // Artifact proto does not carry git ref directly; git info comes from AnnotateRelease. - // We leave git_ref as None for now. + let git_ref = a.r#ref.map(|r| ArtifactRef { + commit_sha: r.commit_sha, + branch: r.branch.filter(|v| !v.is_empty()), + commit_message: r.commit_message.filter(|v| !v.is_empty()), + version: r.version.filter(|v| !v.is_empty()), + repo_url: r.repo_url.filter(|v| !v.is_empty()), + }); let destinations = a .destinations .into_iter() @@ -419,6 +500,11 @@ fn convert_artifact(a: forage_grpc::Artifact) -> Artifact { } else { Some(d.type_version) }, + status: if d.status.is_empty() { + None + } else { + Some(d.status) + }, }) .collect(); Artifact { @@ -435,12 +521,202 @@ fn convert_artifact(a: forage_grpc::Artifact) -> Artifact { pr: ctx.pr.filter(|v| !v.is_empty()), }, source, - git_ref: None, + git_ref, destinations, created_at: a.created_at, } } +fn convert_pipeline_stage(s: forage_grpc::PipelineStage) -> PipelineStage { + let config = match s.config { + Some(forage_grpc::pipeline_stage::Config::Deploy(d)) => { + PipelineStageConfig::Deploy { environment: d.environment } + } + Some(forage_grpc::pipeline_stage::Config::Wait(w)) => { + PipelineStageConfig::Wait { duration_seconds: w.duration_seconds } + } + None => PipelineStageConfig::Deploy { environment: String::new() }, + }; + PipelineStage { + id: s.id, + depends_on: s.depends_on, + config, + } +} + +/// Convert a `PipelineStageState` proto message (from GetReleaseIntentStates) +/// to the domain type. Same enum mapping as `convert_pipeline_run_stage`. +fn convert_pipeline_stage_state( + s: forage_grpc::PipelineStageState, +) -> forage_core::platform::PipelineRunStageState { + let stage_type = match forage_grpc::PipelineRunStageType::try_from(s.stage_type) { + Ok(forage_grpc::PipelineRunStageType::Deploy) => "deploy", + Ok(forage_grpc::PipelineRunStageType::Wait) => "wait", + _ => "unknown", + }; + let status = match forage_grpc::PipelineRunStageStatus::try_from(s.status) { + Ok(forage_grpc::PipelineRunStageStatus::Pending) => "PENDING", + Ok(forage_grpc::PipelineRunStageStatus::Active) => "RUNNING", + Ok(forage_grpc::PipelineRunStageStatus::Succeeded) => "SUCCEEDED", + Ok(forage_grpc::PipelineRunStageStatus::Failed) => "FAILED", + Ok(forage_grpc::PipelineRunStageStatus::Cancelled) => "CANCELLED", + _ => "PENDING", + }; + forage_core::platform::PipelineRunStageState { + stage_id: s.stage_id, + depends_on: s.depends_on, + stage_type: stage_type.into(), + status: status.into(), + environment: s.environment, + duration_seconds: s.duration_seconds, + queued_at: s.queued_at, + started_at: s.started_at, + completed_at: s.completed_at, + error_message: s.error_message, + wait_until: s.wait_until, + release_ids: s.release_ids, + } +} + +fn convert_release_step_state( + s: forage_grpc::ReleaseStepState, +) -> forage_core::platform::ReleaseStepState { + forage_core::platform::ReleaseStepState { + release_id: s.release_id, + stage_id: s.stage_id, + destination_name: s.destination_name, + environment: s.environment, + status: s.status, + queued_at: s.queued_at, + assigned_at: s.assigned_at, + started_at: s.started_at, + completed_at: s.completed_at, + error_message: s.error_message, + } +} + +fn convert_stages_to_grpc(stages: &[PipelineStage]) -> Vec { + stages + .iter() + .map(|s| forage_grpc::PipelineStage { + id: s.id.clone(), + depends_on: s.depends_on.clone(), + config: Some(match &s.config { + PipelineStageConfig::Deploy { environment } => { + forage_grpc::pipeline_stage::Config::Deploy(forage_grpc::DeployStageConfig { + environment: environment.clone(), + }) + } + PipelineStageConfig::Wait { duration_seconds } => { + forage_grpc::pipeline_stage::Config::Wait(forage_grpc::WaitStageConfig { + duration_seconds: *duration_seconds, + }) + } + }), + }) + .collect() +} + +fn convert_release_pipeline(p: forage_grpc::ReleasePipeline) -> ReleasePipeline { + ReleasePipeline { + id: p.id, + name: p.name, + enabled: p.enabled, + stages: p.stages.into_iter().map(convert_pipeline_stage).collect(), + created_at: p.created_at, + updated_at: p.updated_at, + } +} + +fn convert_trigger(t: forage_grpc::Trigger) -> Trigger { + Trigger { + id: t.id, + name: t.name, + enabled: t.enabled, + branch_pattern: t.branch_pattern, + title_pattern: t.title_pattern, + author_pattern: t.author_pattern, + commit_message_pattern: t.commit_message_pattern, + source_type_pattern: t.source_type_pattern, + target_environments: t.target_environments, + target_destinations: t.target_destinations, + force_release: t.force_release, + use_pipeline: t.use_pipeline, + created_at: t.created_at, + updated_at: t.updated_at, + } +} + +fn convert_policy(p: forage_grpc::Policy) -> Policy { + let policy_type_str = match forage_grpc::PolicyType::try_from(p.policy_type) { + Ok(forage_grpc::PolicyType::SoakTime) => "soak_time", + Ok(forage_grpc::PolicyType::BranchRestriction) => "branch_restriction", + _ => "unknown", + }; + let config = match p.config { + Some(forage_grpc::policy::Config::SoakTime(c)) => PolicyConfig::SoakTime { + source_environment: c.source_environment, + target_environment: c.target_environment, + duration_seconds: c.duration_seconds, + }, + Some(forage_grpc::policy::Config::BranchRestriction(c)) => { + PolicyConfig::BranchRestriction { + target_environment: c.target_environment, + branch_pattern: c.branch_pattern, + } + } + None => PolicyConfig::SoakTime { + source_environment: String::new(), + target_environment: String::new(), + duration_seconds: 0, + }, + }; + Policy { + id: p.id, + name: p.name, + enabled: p.enabled, + policy_type: policy_type_str.into(), + config, + created_at: p.created_at, + updated_at: p.updated_at, + } +} + +fn policy_config_to_grpc( + config: &PolicyConfig, +) -> (i32, Option) { + match config { + PolicyConfig::SoakTime { + source_environment, + target_environment, + duration_seconds, + } => ( + forage_grpc::PolicyType::SoakTime as i32, + Some(forage_grpc::create_policy_request::Config::SoakTime( + forage_grpc::SoakTimeConfig { + source_environment: source_environment.clone(), + target_environment: target_environment.clone(), + duration_seconds: *duration_seconds, + }, + )), + ), + PolicyConfig::BranchRestriction { + target_environment, + branch_pattern, + } => ( + forage_grpc::PolicyType::BranchRestriction as i32, + Some( + forage_grpc::create_policy_request::Config::BranchRestriction( + forage_grpc::BranchRestrictionConfig { + target_environment: target_environment.clone(), + branch_pattern: branch_pattern.clone(), + }, + ), + ), + ), + } +} + fn convert_member(m: forage_grpc::OrganisationMember) -> OrgMember { OrgMember { user_id: m.user_id, @@ -688,13 +964,661 @@ impl ForestPlatform for GrpcForestClient { Ok(convert_artifact(artifact)) } + async fn list_environments( + &self, + access_token: &str, + organisation: &str, + ) -> Result, PlatformError> { + let req = platform_authed_request( + access_token, + forage_grpc::ListEnvironmentsRequest { + organisation: organisation.into(), + }, + )?; + let resp = self + .env_client() + .list_environments(req) + .await + .map_err(map_platform_status)? + .into_inner(); + Ok(resp + .environments + .into_iter() + .map(|e| Environment { + id: e.id, + organisation: e.organisation, + name: e.name, + description: e.description.filter(|v| !v.is_empty()), + sort_order: e.sort_order, + created_at: e.created_at, + }) + .collect()) + } + async fn list_destinations( &self, - _access_token: &str, - _organisation: &str, + access_token: &str, + organisation: &str, ) -> Result, PlatformError> { - // DestinationService client not yet generated; return empty for now - Ok(vec![]) + let req = platform_authed_request( + access_token, + forage_grpc::GetDestinationsRequest { + organisation: organisation.into(), + }, + )?; + let resp = self + .dest_client() + .get_destinations(req) + .await + .map_err(map_platform_status)? + .into_inner(); + Ok(resp + .destinations + .into_iter() + .map(|d| Destination { + name: d.name, + environment: d.environment, + organisation: d.organisation, + metadata: d.metadata, + dest_type: d.r#type.map(|t| DestinationType { + organisation: t.organisation, + name: t.name, + version: t.version, + }), + }) + .collect()) + } + + async fn create_environment( + &self, + access_token: &str, + organisation: &str, + name: &str, + description: Option<&str>, + sort_order: i32, + ) -> Result { + let req = platform_authed_request( + access_token, + forage_grpc::CreateEnvironmentRequest { + organisation: organisation.into(), + name: name.into(), + description: description.map(|s| s.to_string()), + sort_order, + }, + )?; + let resp = self + .env_client() + .create_environment(req) + .await + .map_err(map_platform_status)? + .into_inner(); + let e = resp + .environment + .ok_or(PlatformError::Other("no environment in response".into()))?; + Ok(Environment { + id: e.id, + organisation: e.organisation, + name: e.name, + description: e.description.filter(|v| !v.is_empty()), + sort_order: e.sort_order, + created_at: e.created_at, + }) + } + + async fn create_destination( + &self, + access_token: &str, + organisation: &str, + name: &str, + environment: &str, + metadata: &std::collections::HashMap, + dest_type: Option<&forage_core::platform::DestinationType>, + ) -> Result<(), PlatformError> { + let req = platform_authed_request( + access_token, + forage_grpc::CreateDestinationRequest { + organisation: organisation.into(), + name: name.into(), + environment: environment.into(), + metadata: metadata.clone(), + r#type: dest_type.map(|t| forage_grpc::DestinationType { + organisation: t.organisation.clone(), + name: t.name.clone(), + version: t.version, + }), + }, + )?; + self.dest_client() + .create_destination(req) + .await + .map_err(map_platform_status)?; + Ok(()) + } + + async fn update_destination( + &self, + access_token: &str, + name: &str, + metadata: &std::collections::HashMap, + ) -> Result<(), PlatformError> { + let req = platform_authed_request( + access_token, + forage_grpc::UpdateDestinationRequest { + name: name.into(), + metadata: metadata.clone(), + }, + )?; + self.dest_client() + .update_destination(req) + .await + .map_err(map_platform_status)?; + Ok(()) + } + + async fn get_destination_states( + &self, + access_token: &str, + organisation: &str, + project: Option<&str>, + ) -> Result { + let req = bearer_request( + access_token, + forage_grpc::GetDestinationStatesRequest { + organisation: organisation.into(), + project: project.map(|p| p.into()), + }, + ) + .map_err(|e| PlatformError::Other(e.to_string()))?; + + let resp = self + .release_client() + .get_destination_states(req) + .await + .map_err(map_platform_status)?; + + let inner = resp.into_inner(); + + let destinations = inner + .destinations + .into_iter() + .map(|d| forage_core::platform::DestinationState { + destination_id: d.destination_id, + destination_name: d.destination_name, + environment: d.environment, + release_id: d.release_id, + artifact_id: d.artifact_id, + status: d.status, + error_message: d.error_message, + queued_at: d.queued_at, + completed_at: d.completed_at, + queue_position: d.queue_position, + started_at: d.started_at, + }) + .collect(); + + Ok(forage_core::platform::DeploymentStates { + destinations, + }) + } + + async fn get_release_intent_states( + &self, + access_token: &str, + organisation: &str, + project: Option<&str>, + include_completed: bool, + ) -> Result, PlatformError> { + let req = bearer_request( + access_token, + forage_grpc::GetReleaseIntentStatesRequest { + organisation: organisation.into(), + project: project.map(|p| p.into()), + include_completed, + }, + ) + .map_err(|e| PlatformError::Other(e.to_string()))?; + + let resp = self + .release_client() + .get_release_intent_states(req) + .await + .map_err(map_platform_status)?; + + Ok(resp + .into_inner() + .release_intents + .into_iter() + .map(|ri| forage_core::platform::ReleaseIntentState { + release_intent_id: ri.release_intent_id, + artifact_id: ri.artifact_id, + project: ri.project, + created_at: ri.created_at, + stages: ri + .stages + .into_iter() + .map(convert_pipeline_stage_state) + .collect(), + steps: ri + .steps + .into_iter() + .map(convert_release_step_state) + .collect(), + }) + .collect()) + } + + async fn release_artifact( + &self, + access_token: &str, + artifact_id: &str, + destinations: &[String], + environments: &[String], + use_pipeline: bool, + ) -> Result<(), PlatformError> { + let req = bearer_request( + access_token, + forage_grpc::ReleaseRequest { + artifact_id: artifact_id.into(), + destinations: destinations.to_vec(), + environments: environments.to_vec(), + force: false, + use_pipeline, + }, + ) + .map_err(|e| PlatformError::Other(e.to_string()))?; + + self.release_client() + .release(req) + .await + .map_err(map_platform_status)?; + + Ok(()) + } + + async fn list_triggers( + &self, + access_token: &str, + organisation: &str, + project: &str, + ) -> Result, PlatformError> { + let req = platform_authed_request( + access_token, + forage_grpc::ListTriggersRequest { + project: Some(forage_grpc::Project { + organisation: organisation.into(), + project: project.into(), + }), + }, + )?; + let resp = self + .trigger_client() + .list_triggers(req) + .await + .map_err(map_platform_status)? + .into_inner(); + Ok(resp.triggers.into_iter().map(convert_trigger).collect()) + } + + async fn create_trigger( + &self, + access_token: &str, + organisation: &str, + project: &str, + input: &CreateTriggerInput, + ) -> Result { + let req = platform_authed_request( + access_token, + forage_grpc::CreateTriggerRequest { + project: Some(forage_grpc::Project { + organisation: organisation.into(), + project: project.into(), + }), + name: input.name.clone(), + branch_pattern: input.branch_pattern.clone(), + title_pattern: input.title_pattern.clone(), + author_pattern: input.author_pattern.clone(), + commit_message_pattern: input.commit_message_pattern.clone(), + source_type_pattern: input.source_type_pattern.clone(), + target_environments: input.target_environments.clone(), + target_destinations: input.target_destinations.clone(), + force_release: input.force_release, + use_pipeline: input.use_pipeline, + }, + )?; + let resp = self + .trigger_client() + .create_trigger(req) + .await + .map_err(map_platform_status)? + .into_inner(); + let trigger = resp + .trigger + .ok_or(PlatformError::Other("no trigger in response".into()))?; + Ok(convert_trigger(trigger)) + } + + async fn update_trigger( + &self, + access_token: &str, + organisation: &str, + project: &str, + name: &str, + input: &UpdateTriggerInput, + ) -> Result { + let req = platform_authed_request( + access_token, + forage_grpc::UpdateTriggerRequest { + project: Some(forage_grpc::Project { + organisation: organisation.into(), + project: project.into(), + }), + name: name.into(), + enabled: input.enabled, + branch_pattern: input.branch_pattern.clone(), + title_pattern: input.title_pattern.clone(), + author_pattern: input.author_pattern.clone(), + commit_message_pattern: input.commit_message_pattern.clone(), + source_type_pattern: input.source_type_pattern.clone(), + target_environments: input.target_environments.clone(), + target_destinations: input.target_destinations.clone(), + force_release: input.force_release, + use_pipeline: input.use_pipeline, + }, + )?; + let resp = self + .trigger_client() + .update_trigger(req) + .await + .map_err(map_platform_status)? + .into_inner(); + let trigger = resp + .trigger + .ok_or(PlatformError::Other("no trigger in response".into()))?; + Ok(convert_trigger(trigger)) + } + + async fn delete_trigger( + &self, + access_token: &str, + organisation: &str, + project: &str, + name: &str, + ) -> Result<(), PlatformError> { + let req = platform_authed_request( + access_token, + forage_grpc::DeleteTriggerRequest { + project: Some(forage_grpc::Project { + organisation: organisation.into(), + project: project.into(), + }), + name: name.into(), + }, + )?; + self.trigger_client() + .delete_trigger(req) + .await + .map_err(map_platform_status)?; + Ok(()) + } + + async fn list_policies( + &self, + access_token: &str, + organisation: &str, + project: &str, + ) -> Result, PlatformError> { + let req = platform_authed_request( + access_token, + forage_grpc::ListPoliciesRequest { + project: Some(forage_grpc::Project { + organisation: organisation.into(), + project: project.into(), + }), + }, + )?; + let resp = self + .policy_client() + .list_policies(req) + .await + .map_err(map_platform_status)? + .into_inner(); + Ok(resp.policies.into_iter().map(convert_policy).collect()) + } + + async fn create_policy( + &self, + access_token: &str, + organisation: &str, + project: &str, + input: &CreatePolicyInput, + ) -> Result { + let (policy_type, config) = policy_config_to_grpc(&input.config); + let req = platform_authed_request( + access_token, + forage_grpc::CreatePolicyRequest { + project: Some(forage_grpc::Project { + organisation: organisation.into(), + project: project.into(), + }), + name: input.name.clone(), + policy_type, + config, + }, + )?; + let resp = self + .policy_client() + .create_policy(req) + .await + .map_err(map_platform_status)? + .into_inner(); + let policy = resp + .policy + .ok_or(PlatformError::Other("no policy in response".into()))?; + Ok(convert_policy(policy)) + } + + async fn update_policy( + &self, + access_token: &str, + organisation: &str, + project: &str, + name: &str, + input: &UpdatePolicyInput, + ) -> Result { + let config = input.config.as_ref().map(|c| { + let (_, grpc_config) = policy_config_to_grpc(c); + match grpc_config { + Some(forage_grpc::create_policy_request::Config::SoakTime(s)) => { + forage_grpc::update_policy_request::Config::SoakTime(s) + } + Some(forage_grpc::create_policy_request::Config::BranchRestriction(b)) => { + forage_grpc::update_policy_request::Config::BranchRestriction(b) + } + None => forage_grpc::update_policy_request::Config::SoakTime( + forage_grpc::SoakTimeConfig::default(), + ), + } + }); + let req = platform_authed_request( + access_token, + forage_grpc::UpdatePolicyRequest { + project: Some(forage_grpc::Project { + organisation: organisation.into(), + project: project.into(), + }), + name: name.into(), + enabled: input.enabled, + config, + }, + )?; + let resp = self + .policy_client() + .update_policy(req) + .await + .map_err(map_platform_status)? + .into_inner(); + let policy = resp + .policy + .ok_or(PlatformError::Other("no policy in response".into()))?; + Ok(convert_policy(policy)) + } + + async fn delete_policy( + &self, + access_token: &str, + organisation: &str, + project: &str, + name: &str, + ) -> Result<(), PlatformError> { + let req = platform_authed_request( + access_token, + forage_grpc::DeletePolicyRequest { + project: Some(forage_grpc::Project { + organisation: organisation.into(), + project: project.into(), + }), + name: name.into(), + }, + )?; + self.policy_client() + .delete_policy(req) + .await + .map_err(map_platform_status)?; + Ok(()) + } + + async fn list_release_pipelines( + &self, + access_token: &str, + organisation: &str, + project: &str, + ) -> Result, PlatformError> { + let req = platform_authed_request( + access_token, + forage_grpc::ListReleasePipelinesRequest { + project: Some(forage_grpc::Project { + organisation: organisation.into(), + project: project.into(), + }), + }, + )?; + let resp = self + .pipeline_client() + .list_release_pipelines(req) + .await + .map_err(map_platform_status)? + .into_inner(); + Ok(resp + .pipelines + .into_iter() + .map(convert_release_pipeline) + .collect()) + } + + async fn create_release_pipeline( + &self, + access_token: &str, + organisation: &str, + project: &str, + input: &CreateReleasePipelineInput, + ) -> Result { + let req = platform_authed_request( + access_token, + forage_grpc::CreateReleasePipelineRequest { + project: Some(forage_grpc::Project { + organisation: organisation.into(), + project: project.into(), + }), + name: input.name.clone(), + stages: convert_stages_to_grpc(&input.stages), + }, + )?; + let resp = self + .pipeline_client() + .create_release_pipeline(req) + .await + .map_err(map_platform_status)? + .into_inner(); + let pipeline = resp + .pipeline + .ok_or(PlatformError::Other("no pipeline in response".into()))?; + Ok(convert_release_pipeline(pipeline)) + } + + async fn update_release_pipeline( + &self, + access_token: &str, + organisation: &str, + project: &str, + name: &str, + input: &UpdateReleasePipelineInput, + ) -> Result { + let req = platform_authed_request( + access_token, + forage_grpc::UpdateReleasePipelineRequest { + project: Some(forage_grpc::Project { + organisation: organisation.into(), + project: project.into(), + }), + name: name.into(), + enabled: input.enabled, + stages: input.stages.as_ref().map(|s| convert_stages_to_grpc(s)).unwrap_or_default(), + update_stages: input.stages.is_some(), + }, + )?; + let resp = self + .pipeline_client() + .update_release_pipeline(req) + .await + .map_err(map_platform_status)? + .into_inner(); + let pipeline = resp + .pipeline + .ok_or(PlatformError::Other("no pipeline in response".into()))?; + Ok(convert_release_pipeline(pipeline)) + } + + async fn delete_release_pipeline( + &self, + access_token: &str, + organisation: &str, + project: &str, + name: &str, + ) -> Result<(), PlatformError> { + let req = platform_authed_request( + access_token, + forage_grpc::DeleteReleasePipelineRequest { + project: Some(forage_grpc::Project { + organisation: organisation.into(), + project: project.into(), + }), + name: name.into(), + }, + )?; + self.pipeline_client() + .delete_release_pipeline(req) + .await + .map_err(map_platform_status)?; + Ok(()) + } + + async fn get_artifact_spec( + &self, + access_token: &str, + artifact_id: &str, + ) -> Result { + let req = platform_authed_request( + access_token, + forage_grpc::GetArtifactSpecRequest { + artifact_id: artifact_id.into(), + }, + )?; + let resp = self + .artifact_client() + .get_artifact_spec(req) + .await + .map_err(map_platform_status)?; + Ok(resp.into_inner().content) } } diff --git a/crates/forage-server/src/main.rs b/crates/forage-server/src/main.rs index 6f7b443..9c818f0 100644 --- a/crates/forage-server/src/main.rs +++ b/crates/forage-server/src/main.rs @@ -8,29 +8,94 @@ use std::net::SocketAddr; use std::sync::Arc; use axum::Router; +use axum::extract::State; +use axum::http::StatusCode; +use axum::response::{Html, IntoResponse, Response}; use forage_core::session::{FileSessionStore, SessionStore}; use forage_db::PgSessionStore; +use minijinja::context; use tower_http::services::ServeDir; use tower_http::trace::TraceLayer; +use opentelemetry::trace::TracerProvider as _; use tracing_subscriber::EnvFilter; +use tracing_subscriber::layer::SubscriberExt; +use tracing_subscriber::util::SubscriberInitExt; use crate::forest_client::GrpcForestClient; use crate::state::AppState; use crate::templates::TemplateEngine; +fn init_telemetry() { + let env_filter = + EnvFilter::try_from_default_env().unwrap_or_else(|_| "info,h2=warn,tonic=info".into()); + let fmt_layer = tracing_subscriber::fmt::layer(); + + if std::env::var("OTEL_EXPORTER_OTLP_ENDPOINT").is_ok() { + // OTLP exporter configured — send spans + logs to collector + let tracer = opentelemetry_otlp::SpanExporter::builder() + .with_tonic() + .build() + .expect("failed to create OTLP span exporter"); + + let tracer_provider = opentelemetry_sdk::trace::SdkTracerProvider::builder() + .with_batch_exporter(tracer) + .with_resource( + opentelemetry_sdk::Resource::builder() + .with_service_name( + std::env::var("OTEL_SERVICE_NAME") + .unwrap_or_else(|_| "forage-server".into()), + ) + .build(), + ) + .build(); + + let otel_layer = tracing_opentelemetry::layer() + .with_tracer(tracer_provider.tracer("forage-server")); + + tracing_subscriber::registry() + .with(env_filter) + .with(fmt_layer) + .with(otel_layer) + .init(); + + tracing::info!("OpenTelemetry enabled — exporting to OTLP endpoint"); + } else { + tracing_subscriber::registry() + .with(env_filter) + .with(fmt_layer) + .init(); + } +} + +async fn fallback_404(State(state): State) -> Response { + let html = state.templates.render( + "pages/error.html.jinja", + context! { + title => "Not Found - Forage", + description => "The page you're looking for doesn't exist.", + status => 404u16, + heading => "Page not found", + message => "The page you're looking for doesn't exist.", + }, + ); + match html { + Ok(body) => (StatusCode::NOT_FOUND, Html(body)).into_response(), + Err(_) => StatusCode::NOT_FOUND.into_response(), + } +} + pub fn build_router(state: AppState) -> Router { Router::new() .merge(routes::router()) .nest_service("/static", ServeDir::new("static")) + .fallback(fallback_404) .layer(TraceLayer::new_for_http()) .with_state(state) } #[tokio::main] async fn main() -> anyhow::Result<()> { - tracing_subscriber::fmt() - .with_env_filter(EnvFilter::try_from_default_env().unwrap_or_else(|_| "info".into())) - .init(); + init_telemetry(); let forest_endpoint = std::env::var("FOREST_SERVER_URL").unwrap_or_else(|_| "http://localhost:4040".into()); @@ -81,7 +146,8 @@ async fn main() -> anyhow::Result<()> { }; let forest_client = Arc::new(forest_client); - let state = AppState::new(template_engine, forest_client.clone(), forest_client, sessions); + let state = AppState::new(template_engine, forest_client.clone(), forest_client.clone(), sessions) + .with_grpc_client(forest_client); let app = build_router(state); let port: u16 = std::env::var("PORT") diff --git a/crates/forage-server/src/routes/auth.rs b/crates/forage-server/src/routes/auth.rs index 445918c..a45506e 100644 --- a/crates/forage-server/src/routes/auth.rs +++ b/crates/forage-server/src/routes/auth.rs @@ -7,7 +7,7 @@ use chrono::Utc; use minijinja::context; use serde::Deserialize; -use super::error_page; +use super::{error_page, internal_error}; use crate::auth::{self, MaybeSession, Session}; use crate::state::AppState; use forage_core::auth::{validate_email, validate_password, validate_username, UserEmail}; @@ -390,8 +390,7 @@ async fn tokens_page( }, ) .map_err(|e| { - tracing::error!("template error: {e:#}"); - error_page(&state, StatusCode::INTERNAL_SERVER_ERROR, "Something went wrong", "Please try again.") + internal_error(&state, "template error", &e) })?; Ok(Html(html).into_response()) @@ -422,8 +421,7 @@ async fn create_token_submit( .create_token(&session.access_token, &session.user.user_id, &form.name) .await .map_err(|e| { - tracing::error!("failed to create token: {e}"); - error_page(&state, StatusCode::INTERNAL_SERVER_ERROR, "Something went wrong", "Please try again.") + internal_error(&state, "failed to create token", &e) })?; let tokens = state @@ -455,8 +453,7 @@ async fn create_token_submit( }, ) .map_err(|e| { - tracing::error!("template error: {e:#}"); - error_page(&state, StatusCode::INTERNAL_SERVER_ERROR, "Something went wrong", "Please try again.") + internal_error(&state, "template error", &e) })?; Ok(Html(html).into_response()) @@ -477,8 +474,7 @@ async fn delete_token_submit( .delete_token(&session.access_token, &token_id) .await .map_err(|e| { - tracing::error!("failed to delete token: {e}"); - error_page(&state, StatusCode::INTERNAL_SERVER_ERROR, "Something went wrong", "Please try again.") + internal_error(&state, "failed to delete token", &e) })?; Ok(Redirect::to("/settings/tokens").into_response()) @@ -522,13 +518,7 @@ fn render_account( }, ) .map_err(|e| { - tracing::error!("template error: {e:#}"); - error_page( - state, - StatusCode::INTERNAL_SERVER_ERROR, - "Something went wrong", - "Please try again.", - ) + internal_error(state, "template error", &e) })?; Ok(Html(html).into_response()) diff --git a/crates/forage-server/src/routes/events.rs b/crates/forage-server/src/routes/events.rs new file mode 100644 index 0000000..e72b62d --- /dev/null +++ b/crates/forage-server/src/routes/events.rs @@ -0,0 +1,312 @@ +use axum::extract::{Path, State}; +use axum::response::sse::{Event, KeepAlive, Sse}; +use axum::response::{IntoResponse, Response}; +use axum::routing::get; +use axum::Router; +use forage_core::platform::validate_slug; +use futures_util::StreamExt; +use std::convert::Infallible; +use tokio_stream::wrappers::ReceiverStream; + +use crate::auth::Session; +use crate::forest_client::GrpcForestClient; +use crate::state::AppState; + +use super::error_page; + +pub fn router() -> Router { + Router::new() + .route( + "/orgs/{org}/projects/{project}/events", + get(project_events_sse), + ) + .route( + "/api/orgs/{org}/projects/{project}/releases/{slug}/logs", + get(release_logs_sse), + ) +} + +async fn project_events_sse( + State(state): State, + session: Session, + Path((org, project)): Path<(String, String)>, +) -> Result { + // Validate access + let orgs = &session.user.orgs; + if !orgs.iter().any(|o| o.name == org) { + return Err(error_page( + &state, + axum::http::StatusCode::FORBIDDEN, + "Access denied", + "You are not a member of this organisation.", + )); + } + if !validate_slug(&project) { + return Err(error_page( + &state, + axum::http::StatusCode::BAD_REQUEST, + "Invalid request", + "Invalid project name.", + )); + } + + let grpc_client = state.grpc_client.as_ref().ok_or_else(|| { + error_page( + &state, + axum::http::StatusCode::SERVICE_UNAVAILABLE, + "Service unavailable", + "Event streaming is not available.", + ) + })?; + + let access_token = session.access_token.clone(); + let mut event_client = grpc_client.event_client(); + + let mut req = tonic::Request::new(forage_grpc::SubscribeEventsRequest { + organisation: org.clone(), + project: project.clone(), + resource_types: vec![], + actions: vec![], + since_sequence: 0, + }); + let bearer: tonic::metadata::MetadataValue<_> = format!("Bearer {access_token}") + .parse() + .map_err(|_| { + error_page( + &state, + axum::http::StatusCode::INTERNAL_SERVER_ERROR, + "Internal error", + "Failed to create auth header.", + ) + })?; + req.metadata_mut().insert("authorization", bearer); + + let grpc_stream = event_client.subscribe(req).await.map_err(|e| { + tracing::error!("event subscribe failed: {e}"); + error_page( + &state, + axum::http::StatusCode::BAD_GATEWAY, + "Connection failed", + "Could not connect to event stream.", + ) + })?; + + let mut grpc_stream = grpc_stream.into_inner(); + + // Bridge gRPC stream -> SSE via a channel + let (tx, rx) = tokio::sync::mpsc::channel::>(32); + + tokio::spawn(async move { + while let Some(result) = grpc_stream.next().await { + match result { + Ok(event) => { + let data = serde_json::json!({ + "sequence": event.sequence, + "event_id": event.event_id, + "timestamp": event.timestamp, + "organisation": event.organisation, + "project": event.project, + "resource_type": event.resource_type, + "action": event.action, + "resource_id": event.resource_id, + "metadata": event.metadata, + }); + let sse_event = Event::default() + .event(&event.resource_type) + .data(data.to_string()) + .id(event.sequence.to_string()); + if tx.send(Ok(sse_event)).await.is_err() { + break; // Client disconnected + } + } + Err(e) => { + tracing::warn!("event stream error: {e}"); + break; + } + } + } + }); + + let stream = ReceiverStream::new(rx); + let sse = Sse::new(stream).keep_alive(KeepAlive::default()); + + Ok(sse.into_response()) +} + +// ─── Release logs SSE ──────────────────────────────────────────────── + +async fn release_logs_sse( + State(state): State, + session: Session, + Path((org, project, slug)): Path<(String, String, String)>, +) -> Result { + let orgs = &session.user.orgs; + if !orgs.iter().any(|o| o.name == org) { + return Err(error_page( + &state, + axum::http::StatusCode::FORBIDDEN, + "Access denied", + "You are not a member of this organisation.", + )); + } + if !validate_slug(&project) { + return Err(error_page( + &state, + axum::http::StatusCode::BAD_REQUEST, + "Invalid request", + "Invalid project name.", + )); + } + + let grpc_client = state.grpc_client.as_ref().ok_or_else(|| { + error_page( + &state, + axum::http::StatusCode::SERVICE_UNAVAILABLE, + "Service unavailable", + "Log streaming is not available.", + ) + })?; + + let access_token = session.access_token.clone(); + + // Fetch the artifact to get its artifact_id. + let artifact = state + .platform_client + .get_artifact_by_slug(&access_token, &slug) + .await + .map_err(|e| { + tracing::error!("release_logs_sse get_artifact_by_slug: {e}"); + error_page( + &state, + axum::http::StatusCode::NOT_FOUND, + "Not found", + "Release not found.", + ) + })?; + + // Fetch release intent states to find intent IDs for this artifact. + let release_intents = state + .platform_client + .get_release_intent_states(&access_token, &org, Some(&project), true) + .await + .unwrap_or_default(); + + let intent_ids: Vec = release_intents + .into_iter() + .filter(|ri| ri.artifact_id == artifact.artifact_id) + .map(|ri| ri.release_intent_id) + .collect(); + + if intent_ids.is_empty() { + // No release intents — return an SSE stream that sends a "done" event and closes. + let (tx, rx) = tokio::sync::mpsc::channel::>(1); + tokio::spawn(async move { + let _ = tx + .send(Ok(Event::default() + .event("done") + .data(r#"{"message":"no logs"}"#))) + .await; + }); + let stream = ReceiverStream::new(rx); + return Ok(Sse::new(stream).keep_alive(KeepAlive::default()).into_response()); + } + + let (tx, rx) = tokio::sync::mpsc::channel::>(128); + + // Spawn a WaitRelease stream for each release intent. + for intent_id in intent_ids { + let grpc = grpc_client.clone(); + let token = access_token.clone(); + let tx = tx.clone(); + tokio::spawn(async move { + if let Err(e) = stream_release_logs(&grpc, &token, &intent_id, &tx).await { + tracing::warn!("release log stream for {intent_id}: {e}"); + } + }); + } + + // Drop our copy of tx so the stream ends when all spawned tasks finish. + drop(tx); + + let stream = ReceiverStream::new(rx); + let sse = Sse::new(stream).keep_alive(KeepAlive::default()); + Ok(sse.into_response()) +} + +async fn stream_release_logs( + grpc: &GrpcForestClient, + access_token: &str, + release_intent_id: &str, + tx: &tokio::sync::mpsc::Sender>, +) -> Result<(), Box> { + let mut client = grpc.release_client(); + let mut req = tonic::Request::new(forage_grpc::WaitReleaseRequest { + release_intent_id: release_intent_id.to_string(), + }); + let bearer: tonic::metadata::MetadataValue<_> = + format!("Bearer {access_token}").parse()?; + req.metadata_mut().insert("authorization", bearer); + + let resp = client.wait_release(req).await?; + let mut stream = resp.into_inner(); + + while let Some(result) = stream.next().await { + match result { + Ok(event) => { + let sse_event = match event.event { + Some(forage_grpc::wait_release_event::Event::LogLine(log)) => { + let channel = match log.channel { + 1 => "stdout", + 2 => "stderr", + _ => "stdout", + }; + let data = serde_json::json!({ + "destination": log.destination, + "line": log.line, + "timestamp": log.timestamp, + "channel": channel, + }); + Some(Event::default().event("log").data(data.to_string())) + } + Some(forage_grpc::wait_release_event::Event::StatusUpdate(su)) => { + let data = serde_json::json!({ + "destination": su.destination, + "status": su.status, + }); + Some(Event::default().event("status").data(data.to_string())) + } + Some(forage_grpc::wait_release_event::Event::StageUpdate(su)) => { + let data = serde_json::json!({ + "stage_id": su.stage_id, + "stage_type": su.stage_type, + "status": su.status, + }); + Some(Event::default().event("stage").data(data.to_string())) + } + None => None, + }; + if let Some(sse_event) = sse_event { + if tx.send(Ok(sse_event)).await.is_err() { + return Ok(()); // Client disconnected + } + } + } + Err(e) => { + tracing::warn!("wait_release stream error: {e}"); + break; + } + } + } + + // Signal that this intent's stream is done. + let _ = tx + .send(Ok(Event::default() + .event("done") + .data(format!( + r#"{{"release_intent_id":"{}"}}"#, + release_intent_id + )))) + .await; + + Ok(()) +} diff --git a/crates/forage-server/src/routes/mod.rs b/crates/forage-server/src/routes/mod.rs index 2c5e626..909c31c 100644 --- a/crates/forage-server/src/routes/mod.rs +++ b/crates/forage-server/src/routes/mod.rs @@ -1,4 +1,5 @@ mod auth; +mod events; mod pages; mod platform; @@ -14,10 +15,22 @@ pub fn router() -> Router { .merge(pages::router()) .merge(auth::router()) .merge(platform::router()) + .merge(events::router()) } /// Render an error page with the given status code, heading, and message. fn error_page(state: &AppState, status: StatusCode, heading: &str, message: &str) -> Response { + error_page_detail(state, status, heading, message, None) +} + +/// Render an error page with optional error detail (shown in a collapsible section). +fn error_page_detail( + state: &AppState, + status: StatusCode, + heading: &str, + message: &str, + detail: Option<&str>, +) -> Response { let html = state.templates.render( "pages/error.html.jinja", context! { @@ -26,6 +39,7 @@ fn error_page(state: &AppState, status: StatusCode, heading: &str, message: &str status => status.as_u16(), heading => heading, message => message, + detail => detail, }, ); match html { @@ -33,3 +47,28 @@ fn error_page(state: &AppState, status: StatusCode, heading: &str, message: &str Err(_) => status.into_response(), } } + +/// Log an error and render a 500 page with the error detail. +fn internal_error(state: &AppState, context: &str, err: &dyn std::fmt::Display) -> Response { + let detail = format!("{err:#}"); + tracing::error!("{context}: {detail}"); + error_page_detail( + state, + StatusCode::INTERNAL_SERVER_ERROR, + "Something went wrong", + "An internal error occurred. Please try again.", + Some(&detail), + ) +} + +/// Log a warning for a failed call and return the default value. +/// Use for supplementary data where graceful degradation is acceptable. +fn warn_default(context: &str, result: Result) -> T { + match result { + Ok(v) => v, + Err(e) => { + tracing::warn!("{context}: {e:#}"); + T::default() + } + } +} diff --git a/crates/forage-server/src/routes/platform.rs b/crates/forage-server/src/routes/platform.rs index 42ffbbd..f58850e 100644 --- a/crates/forage-server/src/routes/platform.rs +++ b/crates/forage-server/src/routes/platform.rs @@ -1,29 +1,56 @@ -use axum::extract::{Path, State}; +use axum::extract::{Path, Query, State}; use axum::http::StatusCode; use axum::response::{Html, IntoResponse, Redirect, Response}; use axum::routing::{get, post}; -use axum::{Form, Router}; -use forage_core::platform::validate_slug; +use axum::{Json, Router}; +use axum_extra::extract::Form; +use chrono::Datelike; +use forage_core::platform::{ + validate_slug, CreatePolicyInput, CreateReleasePipelineInput, CreateTriggerInput, + PipelineStage, PolicyConfig, UpdatePolicyInput, UpdateReleasePipelineInput, + UpdateTriggerInput, +}; use forage_core::session::CachedOrg; use minijinja::context; -use serde::Deserialize; +use serde::{Deserialize, Serialize}; -use super::error_page; +use super::{error_page, internal_error, warn_default}; use crate::auth::{self, Session}; use crate::state::AppState; pub fn router() -> Router { Router::new() .route("/dashboard", get(dashboard)) + .route("/notifications", get(notifications_page)) .route("/orgs", post(create_org_submit)) .route("/orgs/{org}/projects", get(projects_list)) .route("/orgs/{org}/projects/{project}", get(project_detail)) + .route( + "/orgs/{org}/projects/{project}/releases", + get(project_releases), + ) .route( "/orgs/{org}/projects/{project}/releases/{slug}", get(artifact_detail), ) .route("/orgs/{org}/releases", get(releases_page)) .route("/orgs/{org}/destinations", get(destinations_page)) + .route( + "/orgs/{org}/destinations/environments", + post(create_environment_submit), + ) + .route( + "/orgs/{org}/destinations/create", + post(create_destination_submit), + ) + .route( + "/orgs/{org}/destinations/detail", + get(destination_detail), + ) + .route( + "/orgs/{org}/destinations/detail/update", + post(update_destination_submit), + ) .route("/orgs/{org}/usage", get(usage)) .route( "/orgs/{org}/settings/members", @@ -37,6 +64,64 @@ pub fn router() -> Router { "/orgs/{org}/settings/members/{user_id}/remove", post(remove_member_submit), ) + .route( + "/orgs/{org}/projects/{project}/deploy", + post(deploy_release), + ) + .route( + "/orgs/{org}/projects/{project}/triggers", + get(triggers_page).post(create_trigger_submit), + ) + .route( + "/orgs/{org}/projects/{project}/triggers/{name}", + get(edit_trigger_page).post(edit_trigger_submit), + ) + .route( + "/orgs/{org}/projects/{project}/triggers/{name}/toggle", + post(toggle_trigger), + ) + .route( + "/orgs/{org}/projects/{project}/triggers/{name}/delete", + post(delete_trigger), + ) + .route( + "/orgs/{org}/projects/{project}/policies", + get(policies_page).post(create_policy_submit), + ) + .route( + "/orgs/{org}/projects/{project}/policies/{name}", + get(edit_policy_page).post(edit_policy_submit), + ) + .route( + "/orgs/{org}/projects/{project}/policies/{name}/toggle", + post(toggle_policy), + ) + .route( + "/orgs/{org}/projects/{project}/policies/{name}/delete", + post(delete_policy), + ) + .route( + "/orgs/{org}/projects/{project}/pipelines", + get(pipelines_page).post(create_pipeline_submit), + ) + .route( + "/orgs/{org}/projects/{project}/pipelines/{name}/toggle", + post(toggle_pipeline), + ) + .route( + "/orgs/{org}/projects/{project}/pipelines/{name}/update", + post(update_pipeline_submit), + ) + .route( + "/orgs/{org}/projects/{project}/pipelines/{name}/delete", + post(delete_pipeline), + ) + .route("/users/{username}", get(user_profile)) + .route( + "/api/orgs/{org}/projects/{project}/timeline", + get(timeline_api), + ) + .route("/api/orgs/{org}/timeline", get(org_timeline_api)) } fn orgs_context(orgs: &[CachedOrg]) -> Vec { @@ -45,6 +130,7 @@ fn orgs_context(orgs: &[CachedOrg]) -> Vec { .collect() } + #[allow(clippy::result_large_err)] fn require_org_membership<'a>( state: &AppState, @@ -107,27 +193,55 @@ async fn dashboard( }, ) .map_err(|e| { - tracing::error!("template error: {e:#}"); - error_page( - &state, - StatusCode::INTERNAL_SERVER_ERROR, - "Something went wrong", - "Please try again.", - ) + internal_error(&state, "template error", &e) })?; return Ok(Html(html).into_response()); } - // Fetch recent releases for the first org to show the pipeline on dashboard - let first_org = &orgs[0]; - let projects = state - .platform_client - .list_projects(&session.access_token, &first_org.name) - .await - .unwrap_or_default(); + // Fetch recent activity: for each org, get projects, then artifacts + let mut recent_activity = Vec::new(); + let mut first_org_projects: Vec = Vec::new(); + for org in orgs { + let projects = warn_default( + "dashboard: list_projects", + state.platform_client.list_projects(&session.access_token, &org.name).await, + ); - let items = fetch_org_artifacts(&state, &session.access_token, &first_org.name, &projects).await; - let data = build_timeline(items, &first_org.name); + if first_org_projects.is_empty() && org.name == orgs.first().map(|o| o.name.as_str()).unwrap_or_default() { + first_org_projects = projects.clone(); + } + + for project in projects.iter().take(5) { + let artifacts = warn_default( + "dashboard: list_artifacts", + state.platform_client.list_artifacts(&session.access_token, &org.name, project).await, + ); + + for artifact in artifacts { + let mut seen_envs = std::collections::HashSet::new(); + let dest_envs: Vec = artifact + .destinations + .iter() + .filter(|d| seen_envs.insert(d.environment.clone())) + .map(|d| d.environment.clone()) + .collect(); + recent_activity.push(context! { + org_name => org.name, + project_name => project, + slug => artifact.slug, + title => artifact.context.title, + created_at => artifact.created_at, + dest_envs => dest_envs, + }); + if recent_activity.len() >= 10 { + break; + } + } + if recent_activity.len() >= 10 { + break; + } + } + } let html = state .templates @@ -138,26 +252,300 @@ async fn dashboard( description => "Your Forage dashboard", user => context! { username => session.user.username }, csrf_token => &session.csrf_token, - current_org => &first_org.name, + current_org => orgs.first().map(|o| &o.name), orgs => orgs_context(orgs), - timeline => data.timeline, - lanes => data.lanes, + projects => first_org_projects, + recent_activity => recent_activity, active_tab => "dashboard", }, ) .map_err(|e| { - tracing::error!("template error: {e:#}"); - error_page( - &state, - StatusCode::INTERNAL_SERVER_ERROR, - "Something went wrong", - "Please try again.", - ) + internal_error(&state, "template error", &e) })?; Ok(Html(html).into_response()) } +// ─── Notifications ─────────────────────────────────────────────────── + +struct NotifRelease { + org: String, + project: String, + slug: String, + title: String, + description: Option, + version: Option, + branch: Option, + commit_sha: Option, + commit_message: Option, + source_user: Option, + created_at: String, + summary_status: String, + env_groups: Vec, + pipeline_stages: Vec, + has_pipeline: bool, + destinations: Vec, +} + +async fn fetch_notifications( + state: &AppState, + session: &Session, +) -> Vec { + let orgs = &session.user.orgs; + let username = &session.user.username; + let mut releases: Vec = Vec::new(); + + for org in orgs { + let (projects, dest_states, release_intents) = tokio::join!( + state + .platform_client + .list_projects(&session.access_token, &org.name), + state + .platform_client + .get_destination_states(&session.access_token, &org.name, None), + state + .platform_client + .get_release_intent_states(&session.access_token, &org.name, None, true), + ); + let projects = match projects { + Ok(p) => p, + Err(_) => continue, + }; + let dest_states = dest_states.unwrap_or_default(); + let release_intents = release_intents.unwrap_or_default(); + + // Index destination states by artifact_id. + let mut states_by_artifact: std::collections::HashMap< + &str, + Vec<&forage_core::platform::DestinationState>, + > = std::collections::HashMap::new(); + for ds in &dest_states.destinations { + if let Some(aid) = ds.artifact_id.as_deref() { + states_by_artifact.entry(aid).or_default().push(ds); + } + } + + // Index pipeline stages by artifact_id. + let mut intent_stages_by_artifact: std::collections::HashMap< + &str, + &[forage_core::platform::PipelineRunStageState], + > = std::collections::HashMap::new(); + for ri in &release_intents { + if !ri.stages.is_empty() { + intent_stages_by_artifact.insert(ri.artifact_id.as_str(), &ri.stages); + } + } + + // Fetch pipeline configs per project to know which projects have pipelines. + let mut pipelines_by_project: std::collections::HashMap = + std::collections::HashMap::new(); + for p in &projects { + let has = warn_default( + "list_release_pipelines", + state + .platform_client + .list_release_pipelines(&session.access_token, &org.name, p) + .await, + ) + .iter() + .any(|pl| pl.enabled); + if has { + pipelines_by_project.insert(p.clone(), true); + } + } + + for project in &projects { + let artifacts = match state + .platform_client + .list_artifacts(&session.access_token, &org.name, project) + .await + { + Ok(a) => a, + Err(_) => continue, + }; + for artifact in artifacts { + // Filter to current user's releases. + let is_mine = artifact + .source + .as_ref() + .and_then(|s| s.user.as_deref()) + .map(|u| u == username) + .unwrap_or(false); + if !is_mine { + continue; + } + + let matching_states = states_by_artifact + .get(artifact.artifact_id.as_str()) + .cloned() + .unwrap_or_default(); + + // Compute summary status. + let aid = artifact.artifact_id.as_str(); + let summary_status = compute_summary_status(&matching_states, || { + intent_stages_by_artifact.contains_key(aid) + }); + + // Build env groups for display. + let env_groups = build_env_groups(&matching_states); + + // Build pipeline stages from intent data. + let mut pipeline_stages: Vec = Vec::new(); + if let Some(run_stages) = intent_stages_by_artifact.get(aid) { + let sorted = topo_sort_run_stages(run_stages); + for rs in sorted { + let display_status = deploy_stage_display_status(rs, &matching_states); + pipeline_stages.push(context! { + id => rs.stage_id, + stage_type => rs.stage_type, + environment => rs.environment, + duration_seconds => rs.duration_seconds, + status => display_status, + started_at => rs.started_at, + completed_at => rs.completed_at, + error_message => rs.error_message, + wait_until => rs.wait_until, + }); + } + } + + let project_has_pipeline = pipelines_by_project.contains_key(project); + let has_pipeline = !pipeline_stages.is_empty() || project_has_pipeline; + + // Build destinations. + let destinations: Vec = matching_states + .iter() + .map(|ds| { + context! { + name => ds.destination_name, + environment => ds.environment, + status => ds.status, + error_message => ds.error_message, + queued_at => ds.queued_at, + started_at => ds.started_at, + completed_at => ds.completed_at, + queue_position => ds.queue_position, + } + }) + .collect(); + + releases.push(NotifRelease { + org: org.name.clone(), + project: project.clone(), + slug: artifact.slug, + title: artifact.context.title, + description: artifact.context.description, + version: artifact.git_ref.as_ref().and_then(|r| r.version.clone()), + branch: artifact.git_ref.as_ref().and_then(|r| r.branch.clone()), + commit_sha: artifact + .git_ref + .as_ref() + .map(|r| r.commit_sha[..r.commit_sha.len().min(7)].to_string()), + commit_message: artifact + .git_ref + .as_ref() + .and_then(|r| r.commit_message.clone()), + source_user: artifact.source.as_ref().and_then(|s| s.user.clone()), + created_at: artifact.created_at, + summary_status: summary_status.to_string(), + env_groups, + pipeline_stages, + has_pipeline, + destinations, + }); + } + } + } + + // Sort: in-progress first (RUNNING, QUEUED), then by created_at descending. + releases.sort_by(|a, b| { + let active = |s: &str| matches!(s, "RUNNING" | "QUEUED"); + let a_active = active(&a.summary_status); + let b_active = active(&b.summary_status); + match (a_active, b_active) { + (true, false) => std::cmp::Ordering::Less, + (false, true) => std::cmp::Ordering::Greater, + _ => b.created_at.cmp(&a.created_at), + } + }); + + releases.truncate(50); + releases +} + +fn notifications_to_values(releases: Vec) -> Vec { + releases + .into_iter() + .map(|r| { + context! { + org => r.org, + project => r.project, + slug => r.slug, + title => r.title, + description => r.description, + version => r.version, + branch => r.branch, + commit_sha => r.commit_sha, + commit_message => r.commit_message, + source_user => r.source_user, + created_at => r.created_at, + summary_status => r.summary_status, + env_groups => r.env_groups, + pipeline_stages => r.pipeline_stages, + has_pipeline => r.has_pipeline, + destinations => r.destinations, + } + }) + .collect() +} + +#[derive(Deserialize)] +struct NotificationsQuery { + #[serde(default)] + _partial: Option, +} + +async fn notifications_page( + State(state): State, + session: Session, + Query(query): Query, +) -> Result { + let releases = fetch_notifications(&state, &session).await; + let release_values = notifications_to_values(releases); + + // Partial render: return just the list HTML for AJAX polling. + if query._partial.is_some() { + let html = state + .templates + .render( + "components/notifications_list.html.jinja", + context! { releases => release_values }, + ) + .map_err(|e| internal_error(&state, "template error", &e))?; + return Ok(Html(html).into_response()); + } + + let orgs = &session.user.orgs; + let html = state + .templates + .render( + "pages/notifications.html.jinja", + context! { + title => "Notifications - Forage", + description => "Your release activity", + user => context! { username => session.user.username }, + csrf_token => &session.csrf_token, + current_org => orgs.first().map(|o| &o.name), + orgs => orgs_context(orgs), + releases => release_values, + active_tab => "notifications", + }, + ) + .map_err(|e| internal_error(&state, "template error", &e))?; + + Ok(Html(html).into_response()) +} + // ─── Create organisation ──────────────────────────────────────────── #[derive(Deserialize)] @@ -196,8 +584,7 @@ async fn create_org_submit( }, ) .map_err(|e| { - tracing::error!("template error: {e:#}"); - error_page(&state, StatusCode::INTERNAL_SERVER_ERROR, "Something went wrong", "Please try again.") + internal_error(&state, "template error", &e) })?; return Ok(Html(html).into_response()); } @@ -262,7 +649,7 @@ async fn projects_list( .platform_client .list_projects(&session.access_token, &org) .await - .unwrap_or_default(); + .map_err(|e| internal_error(&state, "list_projects", &e))?; let html = state .templates @@ -281,13 +668,7 @@ async fn projects_list( }, ) .map_err(|e| { - tracing::error!("template error: {e:#}"); - error_page( - &state, - StatusCode::INTERNAL_SERVER_ERROR, - "Something went wrong", - "Please try again.", - ) + internal_error(&state, "template error", &e) })?; Ok(Html(html).into_response()) @@ -312,11 +693,51 @@ async fn project_detail( )); } - let artifacts = state - .platform_client - .list_artifacts(&session.access_token, &org, &project) - .await - .unwrap_or_default(); + let (artifacts, projects, environments, dest_states, release_intents, project_pipelines) = tokio::join!( + state + .platform_client + .list_artifacts(&session.access_token, &org, &project), + state + .platform_client + .list_projects(&session.access_token, &org), + state + .platform_client + .list_environments(&session.access_token, &org), + state + .platform_client + .get_destination_states(&session.access_token, &org, Some(&project)), + state + .platform_client + .get_release_intent_states(&session.access_token, &org, Some(&project), true), + state + .platform_client + .list_release_pipelines(&session.access_token, &org, &project), + ); + let artifacts = artifacts.map_err(|e| internal_error(&state, "list_artifacts", &e))?; + let projects = warn_default("list_projects", projects); + let environments = warn_default("list_environments", environments); + let dest_states = warn_default("get_destination_states", dest_states); + let release_intents = warn_default("get_release_intent_states", release_intents); + let project_pipelines = warn_default("list_release_pipelines", project_pipelines); + + // Environment options for the deploy dropdown (sorted by sort_order). + let mut sorted_envs = environments.clone(); + sorted_envs.sort_by_key(|e| e.sort_order); + let env_options: Vec = if !sorted_envs.is_empty() { + sorted_envs + .iter() + .map(|e| context! { name => e.name }) + .collect() + } else { + // Fallback: derive from artifact destinations + let mut env_seen = std::collections::HashSet::new(); + artifacts + .iter() + .flat_map(|a| a.destinations.iter()) + .filter(|d| env_seen.insert(d.environment.clone())) + .map(|d| context! { name => d.environment }) + .collect() + }; let items: Vec = artifacts .into_iter() @@ -325,7 +746,12 @@ async fn project_detail( project_name: project.clone(), }) .collect(); - let data = build_timeline(items, &org); + let mut pipelines_map = PipelinesByProject::new(); + if !project_pipelines.is_empty() { + pipelines_map.insert(project.clone(), project_pipelines); + } + let data = build_timeline(items, &org, &environments, &dest_states, &release_intents, &pipelines_map); + let html = state .templates @@ -340,19 +766,101 @@ async fn project_detail( orgs => orgs_context(orgs), org_name => &org, project_name => &project, - active_tab => "projects", + projects => projects, + active_tab => "project_overview", timeline => data.timeline, lanes => data.lanes, + env_options => env_options, }, ) .map_err(|e| { - tracing::error!("template error: {e:#}"); - error_page( - &state, - StatusCode::INTERNAL_SERVER_ERROR, - "Something went wrong", - "Please try again.", - ) + internal_error(&state, "template error", &e) + })?; + + Ok(Html(html).into_response()) +} + +// ─── Project releases list ─────────────────────────────────────────── + +async fn project_releases( + State(state): State, + session: Session, + Path((org, project)): Path<(String, String)>, +) -> Result { + let orgs = &session.user.orgs; + require_org_membership(&state, orgs, &org)?; + + if !validate_slug(&project) { + return Err(error_page( + &state, + StatusCode::BAD_REQUEST, + "Invalid request", + "Invalid project name.", + )); + } + + let (artifacts, projects) = tokio::join!( + state + .platform_client + .list_artifacts(&session.access_token, &org, &project), + state + .platform_client + .list_projects(&session.access_token, &org), + ); + let artifacts = artifacts.map_err(|e| internal_error(&state, "list_artifacts", &e))?; + let projects = warn_default("list_projects", projects); + + let releases: Vec = artifacts + .iter() + .map(|a| { + let mut seen_envs = std::collections::HashSet::new(); + let envs: Vec = a + .destinations + .iter() + .filter(|d| seen_envs.insert(d.environment.clone())) + .map(|d| d.environment.clone()) + .collect(); + let status = if a.destinations.is_empty() { + "pending" + } else { + "deployed" + }; + context! { + slug => a.slug, + title => a.context.title, + description => a.context.description, + created_at => a.created_at, + commit_sha => a.git_ref.as_ref().map(|r| r.commit_sha.clone()), + branch => a.git_ref.as_ref().and_then(|r| r.branch.clone()), + version => a.git_ref.as_ref().and_then(|r| r.version.clone()), + source_user => a.source.as_ref().and_then(|s| s.user.clone()), + source_type => a.source.as_ref().and_then(|s| s.source_type.clone()), + envs => envs, + status => status, + } + }) + .collect(); + + let html = state + .templates + .render( + "pages/project_releases.html.jinja", + context! { + title => format!("Releases - {project} - {org} - Forage"), + description => format!("All releases for {project} in {org}"), + user => context! { username => session.user.username }, + csrf_token => &session.csrf_token, + current_org => &org, + orgs => orgs_context(orgs), + org_name => &org, + project_name => &project, + projects => projects, + active_tab => "project_releases", + releases => releases, + }, + ) + .map_err(|e| { + internal_error(&state, "template error", &e) })?; Ok(Html(html).into_response()) @@ -377,27 +885,98 @@ async fn artifact_detail( )); } - let artifact = state - .platform_client - .get_artifact_by_slug(&session.access_token, &slug) - .await - .map_err(|e| match e { - forage_core::platform::PlatformError::NotFound(_) => error_page( - &state, - StatusCode::NOT_FOUND, - "Not found", - "This release could not be found.", - ), - other => { - tracing::error!("failed to fetch artifact: {other}"); - error_page( - &state, - StatusCode::INTERNAL_SERVER_ERROR, - "Something went wrong", - "Please try again.", - ) + let (artifact_result, projects, dest_states, release_intents, pipelines) = tokio::join!( + state + .platform_client + .get_artifact_by_slug(&session.access_token, &slug), + state + .platform_client + .list_projects(&session.access_token, &org), + state + .platform_client + .get_destination_states(&session.access_token, &org, Some(&project)), + state + .platform_client + .get_release_intent_states(&session.access_token, &org, Some(&project), true), + state + .platform_client + .list_release_pipelines(&session.access_token, &org, &project), + ); + let artifact = artifact_result.map_err(|e| match e { + forage_core::platform::PlatformError::NotFound(_) => error_page( + &state, + StatusCode::NOT_FOUND, + "Not found", + "This release could not be found.", + ), + other => { + internal_error(&state, "failed to fetch artifact", &other) + } + })?; + let projects = warn_default("list_projects", projects); + let dest_states = dest_states.unwrap_or_default(); + let release_intents = release_intents.unwrap_or_default(); + let project_has_pipeline = warn_default("list_release_pipelines", pipelines) + .iter() + .any(|pl| pl.enabled); + + // Filter destination states to this artifact. + let matching_states: Vec<&forage_core::platform::DestinationState> = dest_states + .destinations + .iter() + .filter(|ds| ds.artifact_id.as_deref() == Some(&artifact.artifact_id)) + .collect(); + + // Compute summary status. + let summary_status = compute_summary_status(&matching_states, || { + release_intents + .iter() + .any(|ri| ri.artifact_id == artifact.artifact_id && !ri.stages.is_empty()) + }); + + // Build pipeline stages from intent data. + let mut pipeline_stages: Vec = Vec::new(); + for ri in &release_intents { + if ri.artifact_id == artifact.artifact_id && !ri.stages.is_empty() { + let sorted = topo_sort_run_stages(&ri.stages); + for rs in sorted { + let display_status = deploy_stage_display_status(rs, &matching_states); + pipeline_stages.push(context! { + id => rs.stage_id, + stage_type => rs.stage_type, + environment => rs.environment, + duration_seconds => rs.duration_seconds, + status => display_status, + started_at => rs.started_at, + completed_at => rs.completed_at, + error_message => rs.error_message, + wait_until => rs.wait_until, + }); } - })?; + } + } + + let has_pipeline = !pipeline_stages.is_empty() || project_has_pipeline; + + // Build env groups. + let env_groups = build_env_groups(&matching_states); + + // Build destinations with status. + let destinations: Vec = matching_states + .iter() + .map(|ds| { + context! { + name => ds.destination_name, + environment => ds.environment, + status => ds.status, + error_message => ds.error_message, + queued_at => ds.queued_at, + started_at => ds.started_at, + completed_at => ds.completed_at, + queue_position => ds.queue_position, + } + }) + .collect(); let html = state .templates @@ -412,42 +991,177 @@ async fn artifact_detail( orgs => orgs_context(orgs), org_name => &org, project_name => &project, - active_tab => "projects", - artifact => context! { - slug => artifact.slug, - title => artifact.context.title, - description => artifact.context.description, - web => artifact.context.web, - pr => artifact.context.pr, - created_at => artifact.created_at, - source_user => artifact.source.as_ref().and_then(|s| s.user.clone()), - source_email => artifact.source.as_ref().and_then(|s| s.email.clone()), - source_type => artifact.source.as_ref().and_then(|s| s.source_type.clone()), - run_url => artifact.source.as_ref().and_then(|s| s.run_url.clone()), - commit_sha => artifact.git_ref.as_ref().map(|r| r.commit_sha.clone()), - branch => artifact.git_ref.as_ref().and_then(|r| r.branch.clone()), - commit_message => artifact.git_ref.as_ref().and_then(|r| r.commit_message.clone()), - version => artifact.git_ref.as_ref().and_then(|r| r.version.clone()), - repo_url => artifact.git_ref.as_ref().and_then(|r| r.repo_url.clone()), - destinations => artifact.destinations.iter().map(|d| { - context! { name => d.name, environment => d.environment } - }).collect::>(), + projects => projects, + active_tab => "project_releases", + artifact => { + // Parse auto-generated description for fallback metadata. + let desc_meta = artifact.context.description.as_deref() + .filter(|d| d.starts_with("Branch:")) + .map(parse_description_metadata) + .unwrap_or_default(); + + let branch = artifact.git_ref.as_ref().and_then(|r| r.branch.clone()) + .or_else(|| desc_meta.get("branch").cloned()); + let source_type = artifact.source.as_ref().and_then(|s| s.source_type.clone()) + .or_else(|| desc_meta.get("source").cloned()); + let source_user = artifact.source.as_ref().and_then(|s| s.user.clone()) + .or_else(|| desc_meta.get("author").cloned()); + + context! { + slug => artifact.slug, + title => artifact.context.title, + description => artifact.context.description, + web => artifact.context.web, + pr => artifact.context.pr, + created_at => artifact.created_at, + source_user => source_user, + source_email => artifact.source.as_ref().and_then(|s| s.email.clone()), + source_type => source_type, + run_url => artifact.source.as_ref().and_then(|s| s.run_url.clone()), + commit_sha => artifact.git_ref.as_ref().map(|r| r.commit_sha.clone()), + branch => branch, + commit_message => artifact.git_ref.as_ref().and_then(|r| r.commit_message.clone()), + version => artifact.git_ref.as_ref().and_then(|r| r.version.clone()), + repo_url => artifact.git_ref.as_ref().and_then(|r| r.repo_url.clone()), + } }, + summary_status => &summary_status, + pipeline_stages => pipeline_stages, + has_pipeline => has_pipeline, + env_groups => env_groups, + destinations => destinations, + configured_destinations => artifact.destinations.iter().map(|d| { + context! { name => d.name, environment => d.environment } + }).collect::>(), + has_release_intents => release_intents.iter().any(|ri| ri.artifact_id == artifact.artifact_id), }, ) .map_err(|e| { - tracing::error!("template error: {e:#}"); - error_page( - &state, - StatusCode::INTERNAL_SERVER_ERROR, - "Something went wrong", - "Please try again.", - ) + internal_error(&state, "template error", &e) })?; Ok(Html(html).into_response()) } +/// Compute summary status from destination states. +fn compute_summary_status bool>( + matching_states: &[&forage_core::platform::DestinationState], + has_intent_stages: F, +) -> String { + if matching_states.is_empty() { + if has_intent_stages() { + "QUEUED".to_string() + } else { + "PENDING".to_string() + } + } else { + let statuses: Vec<&str> = matching_states + .iter() + .filter_map(|ds| ds.status.as_deref()) + .collect(); + if statuses.iter().any(|s| *s == "RUNNING" || *s == "ASSIGNED") { + "RUNNING" + } else if statuses.contains(&"QUEUED") { + "QUEUED" + } else if statuses.contains(&"FAILED") { + "FAILED" + } else if statuses.contains(&"TIMED_OUT") { + "TIMED_OUT" + } else if statuses.contains(&"CANCELLED") { + "CANCELLED" + } else if statuses.contains(&"SUCCEEDED") { + "SUCCEEDED" + } else { + "PENDING" + } + .to_string() + } +} + +/// Destination-aware status override for deploy stages. +fn deploy_stage_display_status<'a>( + rs: &'a forage_core::platform::PipelineRunStageState, + matching_states: &[&forage_core::platform::DestinationState], +) -> &'a str { + if rs.stage_type == "deploy" && (rs.status == "RUNNING" || rs.status == "ASSIGNED") { + if let Some(ref env) = rs.environment { + let env_dests: Vec<&str> = matching_states + .iter() + .filter(|ds| ds.environment == *env) + .filter_map(|ds| ds.status.as_deref()) + .collect(); + if !env_dests.is_empty() && env_dests.iter().all(|s| *s == "QUEUED") { + return "QUEUED"; + } + } + } + &rs.status +} + +/// Parse auto-generated description like "Branch: main. Source: github_actions. Author: tnielsen." +/// into a map of key-value pairs. Used as fallback when structured fields are empty. +fn parse_description_metadata(desc: &str) -> std::collections::HashMap { + let mut meta = std::collections::HashMap::new(); + for part in desc.split(". ") { + let part = part.trim_end_matches('.'); + if let Some((key, val)) = part.split_once(": ") { + meta.insert(key.to_lowercase(), val.to_string()); + } + } + meta +} + +/// Build env groups for display (grouped by best status). +fn build_env_groups( + matching_states: &[&forage_core::platform::DestinationState], +) -> Vec { + let mut env_best: std::collections::HashMap<&str, &str> = + std::collections::HashMap::new(); + let mut unique_envs = Vec::new(); + let mut seen = std::collections::HashSet::new(); + for ds in matching_states { + let status = ds.status.as_deref().unwrap_or("PENDING"); + let env = ds.environment.as_str(); + if seen.insert(env) { + unique_envs.push(env); + } + let cur = env_best.get(env).copied().unwrap_or("PENDING"); + let pri = |s: &str| -> u8 { + match s { + "RUNNING" | "ASSIGNED" => 6, + "QUEUED" => 5, + "FAILED" => 4, + "TIMED_OUT" => 3, + "CANCELLED" => 2, + "SUCCEEDED" => 1, + _ => 0, + } + }; + if pri(status) > pri(cur) { + env_best.insert(env, status); + } + } + let status_order = [ + "RUNNING", "QUEUED", "FAILED", "TIMED_OUT", "CANCELLED", "SUCCEEDED", + ]; + let mut env_groups = Vec::new(); + for &gs in &status_order { + let envs_in: Vec<&str> = unique_envs + .iter() + .filter(|e| env_best.get(*e).copied() == Some(gs)) + .copied() + .collect(); + if !envs_in.is_empty() { + let ds = if gs == "ASSIGNED" { "RUNNING" } else { gs }; + env_groups.push(context! { + status => ds, + envs => envs_in, + }); + } + } + env_groups +} + // ─── Usage ────────────────────────────────────────────────────────── async fn usage( @@ -462,7 +1176,7 @@ async fn usage( .platform_client .list_projects(&session.access_token, &org) .await - .unwrap_or_default(); + .map_err(|e| internal_error(&state, "list_projects", &e))?; let html = state .templates @@ -482,18 +1196,331 @@ async fn usage( }, ) .map_err(|e| { - tracing::error!("template error: {e:#}"); - error_page( - &state, - StatusCode::INTERNAL_SERVER_ERROR, - "Something went wrong", - "Please try again.", - ) + internal_error(&state, "template error", &e) })?; Ok(Html(html).into_response()) } +// ─── Deploy release ──────────────────────────────────────────────── + +#[derive(Deserialize)] +struct DeployForm { + _csrf: String, + artifact_id: String, + #[serde(default)] + environment: Option, + #[serde(default)] + use_pipeline: Option, +} + +async fn deploy_release( + State(state): State, + session: Session, + Path((org, project)): Path<(String, String)>, + Form(form): Form, +) -> Result { + let orgs = &session.user.orgs; + require_org_membership(&state, orgs, &org)?; + if !auth::validate_csrf(&session, &form._csrf) { + return Err(error_page( + &state, + StatusCode::FORBIDDEN, + "Invalid request", + "CSRF validation failed.", + )); + } + + let use_pipeline = form.use_pipeline.as_deref() == Some("true"); + let environments: Vec = form.environment.into_iter().collect(); + + state + .platform_client + .release_artifact( + &session.access_token, + &form.artifact_id, + &[], + &environments, + use_pipeline, + ) + .await + .map_err(|e| { + internal_error(&state, "deploy failed", &e) + })?; + + Ok(Redirect::to(&format!( + "/orgs/{org}/projects/{project}/releases" + )) + .into_response()) +} + +// ─── User profile ────────────────────────────────────────────────── + +async fn user_profile( + State(state): State, + session: Session, + Path(username): Path, +) -> Result { + let profile = state + .forest_client + .get_user_by_username(&session.access_token, &username) + .await + .map_err(|e| { +{ + tracing::error!("get_user_by_username({username}): {e:#}"); + error_page( + &state, + StatusCode::NOT_FOUND, + "User not found", + &format!("No user named '{username}' was found."), + ) + } + })?; + + let orgs = &session.user.orgs; + + // Fetch contributions: collect artifacts created by this user across all orgs/projects. + let profile_data = build_user_profile_data(&state, &session, orgs, &username).await; + + let html = state + .templates + .render( + "pages/user_profile.html.jinja", + context! { + title => format!("{} - Forage", profile.username), + description => format!("Profile for {}", profile.username), + user => context! { username => session.user.username }, + csrf_token => &session.csrf_token, + current_org => orgs.first().map(|o| &o.name), + orgs => orgs_context(orgs), + profile => context! { + username => profile.username, + user_id => profile.user_id, + created_at => profile.created_at, + }, + heatmap => profile_data.heatmap, + recent_releases => profile_data.recent_releases, + contributed_projects => profile_data.contributed_projects, + active_tab => "", + }, + ) + .map_err(|e| { + internal_error(&state, "template error", &e) + })?; + + Ok(Html(html).into_response()) +} + +/// User profile data: heatmap, recent releases, and contributed projects. +struct UserProfileData { + heatmap: minijinja::Value, + recent_releases: Vec, + contributed_projects: Vec, +} + +/// Build user profile data: contribution heatmap, recent releases, and contributed projects. +async fn build_user_profile_data( + state: &AppState, + session: &Session, + orgs: &[forage_core::session::CachedOrg], + target_username: &str, +) -> UserProfileData { + use std::collections::{HashMap, HashSet}; + + let today = chrono::Utc::now().date_naive(); + let start = today - chrono::Duration::days(363); + let start_weekday = start.weekday().num_days_from_sunday(); + let grid_start = start - chrono::Duration::days(start_weekday as i64); + + let mut day_counts: HashMap = HashMap::new(); + let mut total_contributions = 0u32; + + // Collect recent releases (sorted by created_at desc, capped at 10). + struct RecentRelease { + org: String, + project: String, + slug: String, + version: Option, + branch: Option, + commit_sha: Option, + created_at: String, + } + let mut all_releases: Vec = Vec::new(); + + // Track unique org/project pairs for contributed projects. + let mut project_set: HashSet<(String, String)> = HashSet::new(); + let mut project_release_counts: HashMap<(String, String), u32> = HashMap::new(); + + // Fetch artifacts from all orgs/projects (best effort). + for org in orgs { + let projects = match state + .platform_client + .list_projects(&session.access_token, &org.name) + .await + { + Ok(p) => p, + Err(_) => continue, + }; + for project in &projects { + let artifacts = match state + .platform_client + .list_artifacts(&session.access_token, &org.name, project) + .await + { + Ok(a) => a, + Err(_) => continue, + }; + for artifact in &artifacts { + let is_match = artifact + .source + .as_ref() + .and_then(|s| s.user.as_deref()) + .map(|u| u == target_username) + .unwrap_or(false); + if !is_match { + continue; + } + + // Track contributed project. + let key = (org.name.clone(), project.clone()); + project_set.insert(key.clone()); + *project_release_counts.entry(key).or_default() += 1; + + // Collect for recent releases list. + all_releases.push(RecentRelease { + org: org.name.clone(), + project: project.clone(), + slug: artifact.slug.clone(), + version: artifact.git_ref.as_ref().and_then(|r| r.version.clone()), + branch: artifact.git_ref.as_ref().and_then(|r| r.branch.clone()), + commit_sha: artifact.git_ref.as_ref().map(|r| r.commit_sha.clone()), + created_at: artifact.created_at.clone(), + }); + + // Heatmap day counts. + if let Ok(dt) = chrono::DateTime::parse_from_rfc3339(&artifact.created_at) { + let date = dt.date_naive(); + if date >= grid_start && date <= today { + *day_counts.entry(date).or_default() += 1; + total_contributions += 1; + } + } + } + } + } + + // Sort releases by created_at descending, take top 10. + all_releases.sort_by(|a, b| b.created_at.cmp(&a.created_at)); + all_releases.truncate(10); + let recent_releases: Vec = all_releases + .into_iter() + .map(|r| { + context! { + org => r.org, + project => r.project, + slug => r.slug, + version => r.version, + branch => r.branch, + commit_sha => r.commit_sha.as_deref().map(|s| &s[..s.len().min(7)]), + created_at => r.created_at, + } + }) + .collect(); + + // Build contributed projects list sorted by release count descending. + let mut contributed_projects: Vec<((String, String), u32)> = project_release_counts + .into_iter() + .collect(); + contributed_projects.sort_by(|a, b| b.1.cmp(&a.1)); + let contributed_projects: Vec = contributed_projects + .into_iter() + .map(|((org, project), count)| { + context! { + org => org, + project => project, + release_count => count, + } + }) + .collect(); + + // Build the grid: 53 columns (weeks) x 7 rows (days). + // Use a power scale (sqrt) to map counts to 5 levels (0-4). + let max_count = day_counts.values().copied().max().unwrap_or(0); + + let mut weeks: Vec = Vec::new(); + let mut current = grid_start; + let grid_end = grid_start + chrono::Duration::days(53 * 7 - 1); + + let mut week_days: Vec = Vec::new(); + while current <= grid_end && current <= today { + let count = day_counts.get(¤t).copied().unwrap_or(0); + let opacity = contribution_opacity(count, max_count); + let in_range = current >= grid_start && current <= today; + + week_days.push(context! { + date => current.format("%Y-%m-%d").to_string(), + count => count, + opacity => opacity, + in_range => in_range, + }); + + // End of week (Saturday) — flush to weeks. + if current.weekday() == chrono::Weekday::Sat || current == today { + weeks.push(minijinja::Value::from_serialize(&week_days)); + week_days = Vec::new(); + } + current += chrono::Duration::days(1); + } + if !week_days.is_empty() { + weeks.push(minijinja::Value::from_serialize(&week_days)); + } + + // Month labels: first occurrence of each month in the grid. + let mut month_labels: Vec = Vec::new(); + let mut last_month = None; + let mut col = 0usize; + let mut d = grid_start; + while d <= today { + if d.weekday() == chrono::Weekday::Sun { + let m = d.month(); + if last_month != Some(m) { + last_month = Some(m); + let label = d.format("%b").to_string(); + month_labels.push(context! { col => col, label => label }); + } + col += 1; + } + d += chrono::Duration::days(1); + } + + let heatmap = context! { + weeks => weeks, + month_labels => month_labels, + total => total_contributions, + max_count => max_count, + }; + + UserProfileData { + heatmap, + recent_releases, + contributed_projects, + } +} + +/// Map a contribution count to an opacity 0.0–1.0 using a power scale (sqrt). +/// This gives a smooth gradient where low counts still have visible color +/// and high counts don't dwarf everything else. +fn contribution_opacity(count: u32, max_count: u32) -> String { + if count == 0 || max_count == 0 { + return "0".to_string(); + } + // sqrt scale: brings low values up, compresses high values. + // Min opacity 0.15 so even 1 contribution is visible. + let ratio = (count as f64).sqrt() / (max_count as f64).sqrt(); + let opacity = 0.15 + ratio * 0.85; + format!("{:.2}", opacity.clamp(0.15, 1.0)) +} + // ─── Timeline builder (shared between dashboard, project detail, releases) ─── struct ArtifactWithProject { @@ -506,67 +1533,369 @@ struct TimelineData { lanes: Vec, } -fn build_timeline(items: Vec, org_name: &str) -> TimelineData { +/// Pipeline info indexed by project name, for overlaying onto releases. +type PipelinesByProject = std::collections::HashMap>; + + + +/// Topologically sort pipeline run stage states by their `depends_on` edges. +fn topo_sort_run_stages( + stages: &[forage_core::platform::PipelineRunStageState], +) -> Vec<&forage_core::platform::PipelineRunStageState> { + use std::collections::{HashMap, VecDeque}; + + let index_by_id: HashMap<&str, usize> = stages + .iter() + .enumerate() + .map(|(i, s)| (s.stage_id.as_str(), i)) + .collect(); + + let mut in_degree = vec![0u32; stages.len()]; + for (i, stage) in stages.iter().enumerate() { + for dep in &stage.depends_on { + if index_by_id.contains_key(dep.as_str()) { + in_degree[i] += 1; + } + } + } + + let mut dependents: HashMap> = HashMap::new(); + for (i, stage) in stages.iter().enumerate() { + for dep in &stage.depends_on { + if let Some(&dep_idx) = index_by_id.get(dep.as_str()) { + dependents.entry(dep_idx).or_default().push(i); + } + } + } + + let mut queue: VecDeque = in_degree + .iter() + .enumerate() + .filter(|(_, d)| **d == 0) + .map(|(i, _)| i) + .collect(); + + let mut result = Vec::with_capacity(stages.len()); + while let Some(idx) = queue.pop_front() { + result.push(&stages[idx]); + if let Some(deps) = dependents.get(&idx) { + for &dep_idx in deps { + in_degree[dep_idx] -= 1; + if in_degree[dep_idx] == 0 { + queue.push_back(dep_idx); + } + } + } + } + + if result.len() < stages.len() { + let in_result: std::collections::HashSet = + result.iter().map(|s| index_by_id[s.stage_id.as_str()]).collect(); + for (i, stage) in stages.iter().enumerate() { + if !in_result.contains(&i) { + result.push(stage); + } + } + } + + result +} + +fn build_timeline( + items: Vec, + org_name: &str, + environments: &[forage_core::platform::Environment], + deployment_states: &forage_core::platform::DeploymentStates, + release_intents: &[forage_core::platform::ReleaseIntentState], + pipelines_by_project: &PipelinesByProject, +) -> TimelineData { + // Index destination states by artifact_id for quick lookup. + let mut states_by_artifact: std::collections::HashMap< + &str, + Vec<&forage_core::platform::DestinationState>, + > = std::collections::HashMap::new(); + for ds in &deployment_states.destinations { + if let Some(aid) = ds.artifact_id.as_deref() { + states_by_artifact.entry(aid).or_default().push(ds); + } + } + + // Index release intent stages by artifact_id for quick lookup. + let mut intent_stages_by_artifact: std::collections::HashMap< + &str, + &[forage_core::platform::PipelineRunStageState], + > = std::collections::HashMap::new(); + for ri in release_intents { + if !ri.stages.is_empty() { + intent_stages_by_artifact.insert(ri.artifact_id.as_str(), &ri.stages); + } + } + struct RawRelease { value: minijinja::Value, has_dests: bool, } let mut raw_releases: Vec = Vec::new(); - let mut env_set = std::collections::BTreeSet::new(); for item in items { let artifact = item.artifact; let project = &item.project_name; + // Look up deployment state from destination states instead of artifact.destinations. + let matching_states = states_by_artifact + .get(artifact.artifact_id.as_str()) + .cloned() + .unwrap_or_default(); + let mut release_envs = Vec::new(); - let dests: Vec = artifact - .destinations + let mut release_env_statuses = Vec::new(); + let dests: Vec = matching_states .iter() - .map(|d| { - env_set.insert(d.environment.clone()); - release_envs.push(d.environment.clone()); + .map(|ds| { + release_envs.push(ds.environment.clone()); + let status_str = ds.status.as_deref().unwrap_or("PENDING"); + release_env_statuses.push(format!("{}:{}", ds.environment, status_str)); context! { - name => d.name, - environment => d.environment, - type_name => d.type_name, - type_version => d.type_version, + name => ds.destination_name, + environment => ds.environment, + status => ds.status, + error_message => ds.error_message, + queued_at => ds.queued_at, + started_at => ds.started_at, + completed_at => ds.completed_at, + queue_position => ds.queue_position, } }) .collect(); let has_dests = !dests.is_empty(); - let dest_envs_str = release_envs.join(","); + let dest_envs_str = release_env_statuses.join(","); + let mut seen_envs = std::collections::HashSet::new(); + let unique_envs: Vec = release_envs + .iter() + .filter(|e| seen_envs.insert(e.as_str())) + .cloned() + .collect(); + + // Group environments by status for the summary line. + // Each env gets its best (highest-priority) status. + let mut env_best_status: std::collections::HashMap<&str, &str> = + std::collections::HashMap::new(); + for ds in &matching_states { + let status = ds.status.as_deref().unwrap_or("PENDING"); + let env = ds.environment.as_str(); + let current = env_best_status.get(env).copied().unwrap_or("PENDING"); + let priority = |s: &str| -> u8 { + match s { + "RUNNING" | "ASSIGNED" => 6, + "QUEUED" => 5, + "FAILED" => 4, + "TIMED_OUT" => 3, + "CANCELLED" => 2, + "SUCCEEDED" => 1, + _ => 0, + } + }; + if priority(status) > priority(current) { + env_best_status.insert(env, status); + } + } + // Build groups sorted by priority (deploying first), then collect envs per group. + let status_order = [ + "RUNNING", "QUEUED", "FAILED", "TIMED_OUT", "CANCELLED", "SUCCEEDED", + ]; + let mut env_groups: Vec = Vec::new(); + for &group_status in &status_order { + let envs_in_group: Vec = unique_envs + .iter() + .filter(|e| env_best_status.get(e.as_str()).copied() == Some(group_status)) + .cloned() + .collect(); + if !envs_in_group.is_empty() { + // Normalize ASSIGNED to RUNNING for display + let display_status = if group_status == "ASSIGNED" { + "RUNNING" + } else { + group_status + }; + env_groups.push(context! { + status => display_status, + envs => envs_in_group, + }); + } + } + + // Build pipeline stage view from pipeline run data (if available) or + // fall back to heuristic matching from destination states. + let pipeline_stages: Vec = { + let mut stages = Vec::new(); + + // First, check if the server returned pipeline run data for this artifact. + if let Some(run_stages) = intent_stages_by_artifact.get(artifact.artifact_id.as_str()) { + let sorted = topo_sort_run_stages(run_stages); + for rs in sorted { + let wait_until_str = rs.wait_until.as_deref(); + // For deploy stages the orchestrator may mark a stage as + // RUNNING before the actual destinations have started. + // Check destination states: if all destinations for this + // environment are still QUEUED, report the stage as QUEUED. + let display_status = if rs.stage_type == "deploy" + && (rs.status == "RUNNING" || rs.status == "ASSIGNED") + { + if let Some(ref env) = rs.environment { + let env_dests: Vec<&str> = matching_states + .iter() + .filter(|ds| ds.environment == *env) + .filter_map(|ds| ds.status.as_deref()) + .collect(); + if !env_dests.is_empty() + && env_dests.iter().all(|s| *s == "QUEUED") + { + "QUEUED" + } else { + &rs.status + } + } else { + &rs.status + } + } else { + &rs.status + }; + stages.push(context! { + id => rs.stage_id, + stage_type => rs.stage_type, + environment => rs.environment, + duration_seconds => rs.duration_seconds, + depends_on => rs.depends_on, + status => display_status, + started_at => rs.started_at, + completed_at => rs.completed_at, + error_message => rs.error_message, + wait_until => wait_until_str, + }); + } + } + // No heuristic fallback: if there is no pipeline run data for + // this artifact we leave pipeline_stages empty. The frontend + // uses env_groups to decide between "Deployed" and "Queued". + stages + }; + // A release "has a pipeline" if we have stage data from the server, + // OR if the project has an enabled pipeline config (for not-yet-deployed releases). + let project_has_enabled_pipeline = pipelines_by_project + .get(project) + .map(|ps| ps.iter().any(|p| p.enabled)) + .unwrap_or(false); + let has_pipeline = !pipeline_stages.is_empty() || project_has_enabled_pipeline; + + // Compute summary status from individual destination statuses. + // Priority: RUNNING/ASSIGNED > QUEUED > FAILED/TIMED_OUT/CANCELLED > SUCCEEDED + let summary_status = if !has_dests { + "PENDING" + } else { + let statuses: Vec<&str> = matching_states + .iter() + .filter_map(|ds| ds.status.as_deref()) + .collect(); + if statuses.iter().any(|s| *s == "RUNNING" || *s == "ASSIGNED") { + "RUNNING" + } else if statuses.contains(&"QUEUED") { + "QUEUED" + } else if statuses.contains(&"FAILED") { + "FAILED" + } else if statuses.contains(&"TIMED_OUT") { + "TIMED_OUT" + } else if statuses.contains(&"CANCELLED") { + "CANCELLED" + } else if statuses.contains(&"SUCCEEDED") { + "SUCCEEDED" + } else { + "PENDING" + } + }; + raw_releases.push(RawRelease { value: context! { + artifact_id => artifact.artifact_id, slug => artifact.slug, title => artifact.context.title, description => artifact.context.description, + web => artifact.context.web, + pr => artifact.context.pr, project_name => project, org_name => org_name, created_at => artifact.created_at, commit_sha => artifact.git_ref.as_ref().map(|r| r.commit_sha.clone()), branch => artifact.git_ref.as_ref().and_then(|r| r.branch.clone()), version => artifact.git_ref.as_ref().and_then(|r| r.version.clone()), + commit_message => artifact.git_ref.as_ref().and_then(|r| r.commit_message.clone()), + repo_url => artifact.git_ref.as_ref().and_then(|r| r.repo_url.clone()), source_user => artifact.source.as_ref().and_then(|s| s.user.clone()), + source_email => artifact.source.as_ref().and_then(|s| s.email.clone()), source_type => artifact.source.as_ref().and_then(|s| s.source_type.clone()), + run_url => artifact.source.as_ref().and_then(|s| s.run_url.clone()), destinations => dests, dest_envs => dest_envs_str, + unique_envs => unique_envs, + env_groups => env_groups, + summary_status => summary_status, + pipeline_stages => pipeline_stages, + has_pipeline => has_pipeline, }, has_dests, }); } - let lanes: Vec = env_set - .into_iter() - .map(|env| context! { name => env }) - .collect(); + // Use environments from the API (sorted by sort_order), falling back to + // environments discovered from destination states. + let lanes: Vec = if !environments.is_empty() { + let mut envs: Vec<_> = environments.to_vec(); + envs.sort_by_key(|e| e.sort_order); + envs.iter() + .map(|env| { + context! { + name => env.name, + description => env.description, + color => env_lane_color(&env.name), + } + }) + .collect() + } else { + let mut env_set = std::collections::BTreeSet::new(); + for ds in &deployment_states.destinations { + if !ds.environment.is_empty() { + env_set.insert(ds.environment.clone()); + } + } + env_set + .into_iter() + .map(|env| { + let color = env_lane_color(&env); + context! { name => env, color => color } + }) + .collect() + }; + + // Truncate: keep everything up to the last deployed release, plus 3 + // older items for context. + let last_deployed_idx = raw_releases + .iter() + .rposition(|r| r.has_dests) + .map(|i| i + 1) + .unwrap_or(0); + let keep = last_deployed_idx + 3; + if keep < raw_releases.len() { + raw_releases.truncate(keep); + } let mut timeline_items: Vec = Vec::new(); let mut hidden_buf: Vec = Vec::new(); + let mut seen_deployed = false; for raw in raw_releases { if raw.has_dests { + // Flush any hidden buffer before a deployed release if !hidden_buf.is_empty() { let count = hidden_buf.len(); timeline_items.push(context! { @@ -575,11 +1904,19 @@ fn build_timeline(items: Vec, org_name: &str) -> TimelineDa releases => std::mem::take(&mut hidden_buf), }); } + seen_deployed = true; + timeline_items.push(context! { + kind => "release", + release => raw.value, + }); + } else if !seen_deployed { + // Before any deployment: show as regular (pending) release timeline_items.push(context! { kind => "release", release => raw.value, }); } else { + // After a deployment: group as hidden hidden_buf.push(raw.value); } } @@ -598,6 +1935,528 @@ fn build_timeline(items: Vec, org_name: &str) -> TimelineDa } } +// ─── Serialisable API types (for the JSON timeline endpoint) ───────── + +#[derive(Debug, Serialize)] +pub struct ApiTimelineResponse { + pub timeline: Vec, + pub lanes: Vec, +} + +#[derive(Debug, Serialize)] +#[serde(tag = "kind", rename_all = "snake_case")] +pub enum ApiTimelineItem { + Release { release: Box }, + Hidden { count: usize, releases: Vec }, +} + +#[derive(Debug, Serialize)] +pub struct ApiRelease { + pub artifact_id: String, + pub slug: String, + pub title: String, + pub description: Option, + pub web: Option, + pub pr: Option, + pub project_name: String, + pub created_at: String, + pub commit_sha: Option, + pub branch: Option, + pub version: Option, + pub commit_message: Option, + pub repo_url: Option, + pub source_user: Option, + pub source_type: Option, + pub run_url: Option, + pub summary_status: String, + pub has_pipeline: bool, + pub dest_envs: String, + pub destinations: Vec, + pub env_groups: Vec, + pub pipeline_stages: Vec, +} + +#[derive(Debug, Serialize)] +pub struct ApiDestinationState { + pub name: String, + pub environment: String, + pub status: Option, + pub error_message: Option, + pub queued_at: Option, + pub started_at: Option, + pub completed_at: Option, + pub queue_position: Option, +} + +#[derive(Debug, Serialize)] +pub struct ApiEnvGroup { + pub status: String, + pub envs: Vec, +} + +#[derive(Debug, Serialize)] +pub struct ApiPipelineStage { + pub id: String, + pub stage_type: String, + pub environment: Option, + pub duration_seconds: Option, + pub status: String, + pub started_at: Option, + pub completed_at: Option, + pub error_message: Option, + pub wait_until: Option, +} + +#[derive(Debug, Serialize)] +pub struct ApiLane { + pub name: String, + pub color: String, + pub description: Option, +} + +/// Build a serialisable timeline from the same inputs as `build_timeline`. +/// The logic is kept intentionally parallel so both renderers stay in sync. +fn build_timeline_json( + items: Vec, + environments: &[forage_core::platform::Environment], + deployment_states: &forage_core::platform::DeploymentStates, + release_intents: &[forage_core::platform::ReleaseIntentState], + pipelines_by_project: &PipelinesByProject, +) -> ApiTimelineResponse { + // Index destination states by artifact_id. + let mut states_by_artifact: std::collections::HashMap< + &str, + Vec<&forage_core::platform::DestinationState>, + > = std::collections::HashMap::new(); + for ds in &deployment_states.destinations { + if let Some(aid) = ds.artifact_id.as_deref() { + states_by_artifact.entry(aid).or_default().push(ds); + } + } + + // Index pipeline run stages by artifact_id. + let mut intent_stages_by_artifact: std::collections::HashMap< + &str, + &[forage_core::platform::PipelineRunStageState], + > = std::collections::HashMap::new(); + for ri in release_intents { + if !ri.stages.is_empty() { + intent_stages_by_artifact.insert(ri.artifact_id.as_str(), &ri.stages); + } + } + + struct RawRelease { + release: ApiRelease, + has_dests: bool, + } + + let priority = |s: &str| -> u8 { + match s { + "RUNNING" | "ASSIGNED" => 6, + "QUEUED" => 5, + "FAILED" => 4, + "TIMED_OUT" => 3, + "CANCELLED" => 2, + "SUCCEEDED" => 1, + _ => 0, + } + }; + + let mut raw_releases: Vec = Vec::new(); + + for item in items { + let artifact = item.artifact; + let project = item.project_name; + + let matching_states = states_by_artifact + .get(artifact.artifact_id.as_str()) + .cloned() + .unwrap_or_default(); + + let mut release_envs: Vec = Vec::new(); + let mut release_env_statuses: Vec = Vec::new(); + let destinations: Vec = matching_states + .iter() + .map(|ds| { + release_envs.push(ds.environment.clone()); + let status_str = ds.status.as_deref().unwrap_or("PENDING"); + release_env_statuses.push(format!("{}:{}", ds.environment, status_str)); + ApiDestinationState { + name: ds.destination_name.clone(), + environment: ds.environment.clone(), + status: ds.status.clone(), + error_message: ds.error_message.clone(), + queued_at: ds.queued_at.clone(), + started_at: ds.started_at.clone(), + completed_at: ds.completed_at.clone(), + queue_position: ds.queue_position, + } + }) + .collect(); + + let has_dests = !destinations.is_empty(); + let dest_envs = release_env_statuses.join(","); + + let mut seen_envs = std::collections::HashSet::new(); + let unique_envs: Vec = release_envs + .iter() + .filter(|e| seen_envs.insert(e.as_str())) + .cloned() + .collect(); + + // Per-environment best status for grouping. + let mut env_best_status: std::collections::HashMap<&str, &str> = + std::collections::HashMap::new(); + for ds in &matching_states { + let status = ds.status.as_deref().unwrap_or("PENDING"); + let env = ds.environment.as_str(); + let current = env_best_status.get(env).copied().unwrap_or("PENDING"); + if priority(status) > priority(current) { + env_best_status.insert(env, status); + } + } + + let status_order = [ + "RUNNING", "QUEUED", "FAILED", "TIMED_OUT", "CANCELLED", "SUCCEEDED", + ]; + let env_groups: Vec = status_order + .iter() + .filter_map(|&group_status| { + let envs_in_group: Vec = unique_envs + .iter() + .filter(|e| env_best_status.get(e.as_str()).copied() == Some(group_status)) + .cloned() + .collect(); + if envs_in_group.is_empty() { + return None; + } + let display_status = if group_status == "ASSIGNED" { + "RUNNING" + } else { + group_status + }; + Some(ApiEnvGroup { + status: display_status.to_string(), + envs: envs_in_group, + }) + }) + .collect(); + + // Build pipeline stages — same logic as build_timeline. + let pipeline_stages: Vec = { + let mut stages = Vec::new(); + + if let Some(run_stages) = intent_stages_by_artifact.get(artifact.artifact_id.as_str()) { + let sorted = topo_sort_run_stages(run_stages); + for rs in sorted { + // Same destination-aware override as build_timeline. + let display_status = if rs.stage_type == "deploy" + && (rs.status == "RUNNING" || rs.status == "ASSIGNED") + { + if let Some(ref env) = rs.environment { + let env_dests: Vec<&str> = matching_states + .iter() + .filter(|ds| ds.environment == *env) + .filter_map(|ds| ds.status.as_deref()) + .collect(); + if !env_dests.is_empty() + && env_dests.iter().all(|s| *s == "QUEUED") + { + "QUEUED".to_string() + } else if rs.status == "ASSIGNED" { + "RUNNING".to_string() + } else { + rs.status.clone() + } + } else if rs.status == "ASSIGNED" { + "RUNNING".to_string() + } else { + rs.status.clone() + } + } else if rs.status == "ASSIGNED" { + "RUNNING".to_string() + } else { + rs.status.clone() + }; + stages.push(ApiPipelineStage { + id: rs.stage_id.clone(), + stage_type: rs.stage_type.clone(), + environment: rs.environment.clone(), + duration_seconds: rs.duration_seconds, + status: display_status, + started_at: rs.started_at.clone(), + completed_at: rs.completed_at.clone(), + error_message: rs.error_message.clone(), + wait_until: rs.wait_until.clone(), + }); + } + } + // No heuristic fallback — same rationale as build_timeline. + stages + }; + + let project_has_enabled_pipeline = pipelines_by_project + .get(&project) + .map(|ps| ps.iter().any(|p| p.enabled)) + .unwrap_or(false); + let has_pipeline = !pipeline_stages.is_empty() || project_has_enabled_pipeline; + + let summary_status = if !has_dests { + "PENDING" + } else { + let statuses: Vec<&str> = matching_states + .iter() + .filter_map(|ds| ds.status.as_deref()) + .collect(); + if statuses.iter().any(|s| *s == "RUNNING" || *s == "ASSIGNED") { + "RUNNING" + } else if statuses.contains(&"QUEUED") { + "QUEUED" + } else if statuses.contains(&"FAILED") { + "FAILED" + } else if statuses.contains(&"TIMED_OUT") { + "TIMED_OUT" + } else if statuses.contains(&"CANCELLED") { + "CANCELLED" + } else if statuses.contains(&"SUCCEEDED") { + "SUCCEEDED" + } else { + "PENDING" + } + }; + + raw_releases.push(RawRelease { + release: ApiRelease { + artifact_id: artifact.artifact_id, + slug: artifact.slug, + title: artifact.context.title, + description: artifact.context.description, + web: artifact.context.web, + pr: artifact.context.pr, + project_name: project, + created_at: artifact.created_at, + commit_sha: artifact.git_ref.as_ref().map(|r| r.commit_sha.clone()), + branch: artifact.git_ref.as_ref().and_then(|r| r.branch.clone()), + version: artifact.git_ref.as_ref().and_then(|r| r.version.clone()), + commit_message: artifact.git_ref.as_ref().and_then(|r| r.commit_message.clone()), + repo_url: artifact.git_ref.as_ref().and_then(|r| r.repo_url.clone()), + source_user: artifact.source.as_ref().and_then(|s| s.user.clone()), + source_type: artifact.source.as_ref().and_then(|s| s.source_type.clone()), + run_url: artifact.source.as_ref().and_then(|s| s.run_url.clone()), + summary_status: summary_status.to_string(), + has_pipeline, + dest_envs, + destinations, + env_groups, + pipeline_stages, + }, + has_dests, + }); + } + + // Build lanes — same logic as build_timeline. + let lanes: Vec = if !environments.is_empty() { + let mut envs = environments.to_vec(); + envs.sort_by_key(|e| e.sort_order); + envs.iter() + .map(|env| ApiLane { + name: env.name.clone(), + color: env_lane_color(&env.name).to_string(), + description: env.description.clone(), + }) + .collect() + } else { + let mut env_set = std::collections::BTreeSet::new(); + for ds in &deployment_states.destinations { + if !ds.environment.is_empty() { + env_set.insert(ds.environment.clone()); + } + } + env_set + .into_iter() + .map(|env| ApiLane { + color: env_lane_color(&env).to_string(), + name: env, + description: None, + }) + .collect() + }; + + // Truncate: keep up to last deployed + 3. + let last_deployed_idx = raw_releases + .iter() + .rposition(|r| r.has_dests) + .map(|i| i + 1) + .unwrap_or(0); + let keep = last_deployed_idx + 3; + if keep < raw_releases.len() { + raw_releases.truncate(keep); + } + + let mut timeline: Vec = Vec::new(); + let mut hidden_buf: Vec = Vec::new(); + let mut seen_deployed = false; + + for raw in raw_releases { + if raw.has_dests { + if !hidden_buf.is_empty() { + let count = hidden_buf.len(); + timeline.push(ApiTimelineItem::Hidden { + count, + releases: std::mem::take(&mut hidden_buf), + }); + } + seen_deployed = true; + timeline.push(ApiTimelineItem::Release { + release: Box::new(raw.release), + }); + } else if !seen_deployed { + timeline.push(ApiTimelineItem::Release { + release: Box::new(raw.release), + }); + } else { + hidden_buf.push(raw.release); + } + } + if !hidden_buf.is_empty() { + let count = hidden_buf.len(); + timeline.push(ApiTimelineItem::Hidden { + count, + releases: std::mem::take(&mut hidden_buf), + }); + } + + ApiTimelineResponse { timeline, lanes } +} + +// ─── GET /api/orgs/{org}/projects/{project}/timeline ───────────────── + +async fn timeline_api( + State(state): State, + session: Session, + Path((org, project)): Path<(String, String)>, +) -> Result { + let orgs = &session.user.orgs; + require_org_membership(&state, orgs, &org)?; + + if !validate_slug(&project) { + return Err(( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({ "error": "invalid project name" })), + ) + .into_response()); + } + + let (artifacts, environments, dest_states, release_intents, project_pipelines) = tokio::join!( + state + .platform_client + .list_artifacts(&session.access_token, &org, &project), + state + .platform_client + .list_environments(&session.access_token, &org), + state + .platform_client + .get_destination_states(&session.access_token, &org, Some(&project)), + state + .platform_client + .get_release_intent_states(&session.access_token, &org, Some(&project), true), + state + .platform_client + .list_release_pipelines(&session.access_token, &org, &project), + ); + let artifacts = artifacts.map_err(|e| { + tracing::error!("timeline_api list_artifacts: {e:#}"); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ "error": "failed to fetch artifacts" })), + ) + .into_response() + })?; + let environments = warn_default("list_environments", environments); + let dest_states = warn_default("get_destination_states", dest_states); + let release_intents = warn_default("get_release_intent_states", release_intents); + let project_pipelines = warn_default("list_release_pipelines", project_pipelines); + + let items: Vec = artifacts + .into_iter() + .map(|a| ArtifactWithProject { + artifact: a, + project_name: project.clone(), + }) + .collect(); + + let mut pipelines_map = PipelinesByProject::new(); + if !project_pipelines.is_empty() { + pipelines_map.insert(project.clone(), project_pipelines); + } + + let data = build_timeline_json(items, &environments, &dest_states, &release_intents, &pipelines_map); + + Ok(Json(data).into_response()) +} + +// ─── GET /api/orgs/{org}/timeline ──────────────────────────────────── + +async fn org_timeline_api( + State(state): State, + session: Session, + Path(org): Path, +) -> Result { + let orgs = &session.user.orgs; + require_org_membership(&state, orgs, &org)?; + + let (projects, environments, dest_states, release_intents) = tokio::join!( + state + .platform_client + .list_projects(&session.access_token, &org), + state + .platform_client + .list_environments(&session.access_token, &org), + state + .platform_client + .get_destination_states(&session.access_token, &org, None), + state + .platform_client + .get_release_intent_states(&session.access_token, &org, None, true), + ); + let projects = projects.map_err(|e| { + tracing::error!("org_timeline_api list_projects: {e:#}"); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ "error": "failed to fetch projects" })), + ) + .into_response() + })?; + let environments = warn_default("list_environments", environments); + let dest_states = warn_default("get_destination_states", dest_states); + let release_intents = warn_default("get_release_intent_states", release_intents); + + let mut pipelines_by_project = PipelinesByProject::new(); + for p in &projects { + let pipelines = warn_default( + "list_release_pipelines", + state + .platform_client + .list_release_pipelines(&session.access_token, &org, p) + .await, + ); + if !pipelines.is_empty() { + pipelines_by_project.insert(p.clone(), pipelines); + } + } + + let items = fetch_org_artifacts(&state, &session.access_token, &org, &projects).await; + let data = build_timeline_json( + items, + &environments, + &dest_states, + &release_intents, + &pipelines_by_project, + ); + + Ok(Json(data).into_response()) +} + /// Fetch all artifacts across projects and return as ArtifactWithProject list. async fn fetch_org_artifacts( state: &AppState, @@ -607,11 +2466,10 @@ async fn fetch_org_artifacts( ) -> Vec { let mut items = Vec::new(); for project in projects { - let artifacts = state - .platform_client - .list_artifacts(access_token, org, project) - .await - .unwrap_or_default(); + let artifacts = warn_default( + &format!("list_artifacts({project})"), + state.platform_client.list_artifacts(access_token, org, project).await, + ); for artifact in artifacts { items.push(ArtifactWithProject { artifact, @@ -632,14 +2490,49 @@ async fn releases_page( let orgs = &session.user.orgs; require_org_membership(&state, orgs, &org)?; - let projects = state - .platform_client - .list_projects(&session.access_token, &org) - .await - .unwrap_or_default(); + let (projects, environments, dest_states, release_intents) = tokio::join!( + state + .platform_client + .list_projects(&session.access_token, &org), + state + .platform_client + .list_environments(&session.access_token, &org), + state + .platform_client + .get_destination_states(&session.access_token, &org, None), + state + .platform_client + .get_release_intent_states(&session.access_token, &org, None, true), + ); + let projects = projects.map_err(|e| internal_error(&state, "list_projects", &e))?; + let environments = warn_default("list_environments", environments); + let dest_states = warn_default("get_destination_states", dest_states); + let release_intents = warn_default("get_release_intent_states", release_intents); + + // Fetch pipelines for all projects. + let mut pipelines_by_project = PipelinesByProject::new(); + for p in &projects { + let pipelines = warn_default( + "list_release_pipelines", + state + .platform_client + .list_release_pipelines(&session.access_token, &org, p) + .await, + ); + if !pipelines.is_empty() { + pipelines_by_project.insert(p.clone(), pipelines); + } + } let items = fetch_org_artifacts(&state, &session.access_token, &org, &projects).await; - let data = build_timeline(items, &org); + let data = build_timeline(items, &org, &environments, &dest_states, &release_intents, &pipelines_by_project); + + let mut sorted_envs = environments.clone(); + sorted_envs.sort_by_key(|e| e.sort_order); + let env_options: Vec = sorted_envs + .iter() + .map(|e| context! { name => e.name }) + .collect(); let html = state .templates @@ -655,17 +2548,12 @@ async fn releases_page( org_name => &org, timeline => data.timeline, lanes => data.lanes, + env_options => env_options, active_tab => "releases", }, ) .map_err(|e| { - tracing::error!("template error: {e:#}"); - error_page( - &state, - StatusCode::INTERNAL_SERVER_ERROR, - "Something went wrong", - "Please try again.", - ) + internal_error(&state, "template error", &e) })?; Ok(Html(html).into_response()) @@ -682,39 +2570,76 @@ async fn destinations_page( let current_org = require_org_membership(&state, orgs, &org)?; let is_admin = current_org.role == "owner" || current_org.role == "admin"; - let projects = state - .platform_client - .list_projects(&session.access_token, &org) - .await - .unwrap_or_default(); - - // Aggregate unique destinations from artifacts - let mut destinations = Vec::new(); - let mut seen = std::collections::HashSet::new(); - - for project in &projects { - let artifacts = state + let (environments, org_destinations, projects) = tokio::join!( + state .platform_client - .list_artifacts(&session.access_token, &org, project) - .await - .unwrap_or_default(); + .list_environments(&session.access_token, &org), + state + .platform_client + .list_destinations(&session.access_token, &org), + state + .platform_client + .list_projects(&session.access_token, &org), + ); + let mut environments = environments.map_err(|e| internal_error(&state, "list_environments", &e))?; + environments.sort_by_key(|e| e.sort_order); + let org_destinations = org_destinations.map_err(|e| internal_error(&state, "list_destinations", &e))?; + let projects = warn_default("list_projects", projects); - for artifact in &artifacts { - for dest in &artifact.destinations { - let key = (dest.name.clone(), dest.environment.clone()); - if seen.insert(key) { - destinations.push(context! { - name => dest.name, - environment => dest.environment, - project_name => project, - artifact_title => artifact.context.title, - artifact_slug => artifact.slug, - created_at => artifact.created_at, - }); - } + let env_list: Vec = environments + .iter() + .map(|e| { + let env_dests: Vec = org_destinations + .iter() + .filter(|d| d.environment == e.name) + .map(|d| { + let meta_entries: Vec = d + .metadata + .iter() + .map(|(k, v)| context! { key => k, value => v }) + .collect(); + context! { + name => d.name, + environment => d.environment, + type_name => d.dest_type.as_ref().map(|t| t.name.clone()), + type_organisation => d.dest_type.as_ref().map(|t| t.organisation.clone()), + type_version => d.dest_type.as_ref().map(|t| t.version), + metadata => meta_entries, + } + }) + .collect(); + context! { + id => e.id, + name => e.name, + description => e.description, + sort_order => e.sort_order, + destinations => env_dests, } - } - } + }) + .collect(); + + // Also collect destinations not associated with any known environment + let known_envs: std::collections::HashSet<&str> = + environments.iter().map(|e| e.name.as_str()).collect(); + let orphan_dests: Vec = org_destinations + .iter() + .filter(|d| !known_envs.contains(d.environment.as_str())) + .map(|d| { + let meta_entries: Vec = d + .metadata + .iter() + .map(|(k, v)| context! { key => k, value => v }) + .collect(); + context! { + name => d.name, + environment => d.environment, + type_name => d.dest_type.as_ref().map(|t| t.name.clone()), + type_organisation => d.dest_type.as_ref().map(|t| t.organisation.clone()), + type_version => d.dest_type.as_ref().map(|t| t.version), + metadata => meta_entries, + } + }) + .collect(); let html = state .templates @@ -728,24 +2653,314 @@ async fn destinations_page( current_org => &org, orgs => orgs_context(orgs), org_name => &org, - destinations => destinations, + environments => env_list, + orphan_destinations => orphan_dests, + projects => projects, is_admin => is_admin, active_tab => "destinations", }, ) .map_err(|e| { - tracing::error!("template error: {e:#}"); - error_page( - &state, - StatusCode::INTERNAL_SERVER_ERROR, - "Something went wrong", - "Please try again.", - ) + internal_error(&state, "template error", &e) })?; Ok(Html(html).into_response()) } +#[derive(Deserialize)] +struct CreateEnvironmentForm { + _csrf: String, + name: String, + #[serde(default)] + description: String, + #[serde(default)] + sort_order: i32, +} + +async fn create_environment_submit( + State(state): State, + session: Session, + Path(org): Path, + Form(form): Form, +) -> Result { + let orgs = &session.user.orgs; + let current_org = require_org_membership(&state, orgs, &org)?; + require_admin(&state, current_org)?; + if !auth::validate_csrf(&session, &form._csrf) { + return Err(error_page(&state, StatusCode::FORBIDDEN, "Invalid request", "CSRF validation failed. Please try again.")); + } + + if !validate_slug(&form.name) { + return Err(error_page( + &state, + StatusCode::BAD_REQUEST, + "Invalid environment name", + "Environment names must be lowercase alphanumeric with hyphens, max 64 chars.", + )); + } + + let description = if form.description.is_empty() { + None + } else { + Some(form.description.as_str()) + }; + + state + .platform_client + .create_environment( + &session.access_token, + &org, + &form.name, + description, + form.sort_order, + ) + .await + .map_err(|e| { + internal_error(&state, "create environment error", &e) + })?; + + Ok(Redirect::to(&format!("/orgs/{org}/destinations")).into_response()) +} + +#[derive(Deserialize)] +struct CreateDestinationForm { + _csrf: String, + name: String, + environment: String, + #[serde(default)] + type_organisation: String, + #[serde(default)] + type_name: String, + #[serde(default)] + type_version: Option, + #[serde(default, deserialize_with = "deserialize_string_or_seq")] + metadata_keys: Vec, + #[serde(default, deserialize_with = "deserialize_string_or_seq")] + metadata_values: Vec, +} + +/// HTML forms send a single value as a string, multiple values as a sequence. +/// This deserializer handles both cases. +fn deserialize_string_or_seq<'de, D>(deserializer: D) -> Result, D::Error> +where + D: serde::Deserializer<'de>, +{ + use serde::de; + + struct StringOrVec; + + impl<'de> de::Visitor<'de> for StringOrVec { + type Value = Vec; + + fn expecting(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + f.write_str("a string or sequence of strings") + } + + fn visit_str(self, v: &str) -> Result { + Ok(vec![v.to_string()]) + } + + fn visit_string(self, v: String) -> Result { + Ok(vec![v]) + } + + fn visit_seq>(self, mut seq: A) -> Result { + let mut values = Vec::new(); + while let Some(v) = seq.next_element::()? { + values.push(v); + } + Ok(values) + } + } + + deserializer.deserialize_any(StringOrVec) +} + +fn parse_metadata(keys: &[String], values: &[String]) -> std::collections::HashMap { + keys.iter() + .zip(values.iter()) + .filter(|(k, _)| !k.trim().is_empty()) + .map(|(k, v)| (k.trim().to_string(), v.trim().to_string())) + .collect() +} + +async fn create_destination_submit( + State(state): State, + session: Session, + Path(org): Path, + Form(form): Form, +) -> Result { + let orgs = &session.user.orgs; + let current_org = require_org_membership(&state, orgs, &org)?; + require_admin(&state, current_org)?; + if !auth::validate_csrf(&session, &form._csrf) { + return Err(error_page( + &state, + StatusCode::FORBIDDEN, + "Invalid CSRF token", + "Please try again.", + )); + } + + if form.name.is_empty() || form.environment.is_empty() { + return Err(error_page( + &state, + StatusCode::BAD_REQUEST, + "Invalid request", + "Destination name and environment are required.", + )); + } + + let metadata = parse_metadata(&form.metadata_keys, &form.metadata_values); + let dest_type = if !form.type_name.trim().is_empty() { + Some(forage_core::platform::DestinationType { + organisation: if form.type_organisation.trim().is_empty() { + org.clone() + } else { + form.type_organisation.trim().to_string() + }, + name: form.type_name.trim().to_string(), + version: form.type_version.unwrap_or(1), + }) + } else { + None + }; + + state + .platform_client + .create_destination( + &session.access_token, + &org, + &form.name, + &form.environment, + &metadata, + dest_type.as_ref(), + ) + .await + .map_err(|e| { + internal_error(&state, "create destination error", &e) + })?; + + Ok(Redirect::to(&format!("/orgs/{org}/destinations")).into_response()) +} + +#[derive(Deserialize)] +struct DestinationQuery { + name: String, +} + +async fn destination_detail( + State(state): State, + session: Session, + Path(org): Path, + Query(query): Query, +) -> Result { + let dest_name = &query.name; + let orgs = &session.user.orgs; + let current_org = require_org_membership(&state, orgs, &org)?; + let is_admin = current_org.role == "owner" || current_org.role == "admin"; + + let destinations = state + .platform_client + .list_destinations(&session.access_token, &org) + .await + .map_err(|e| internal_error(&state, "list_destinations", &e))?; + + let dest = destinations + .iter() + .find(|d| d.name == *dest_name) + .ok_or_else(|| { + error_page( + &state, + StatusCode::NOT_FOUND, + "Destination not found", + &format!("No destination named '{dest_name}' was found."), + ) + })?; + + let meta_entries: Vec = dest + .metadata + .iter() + .map(|(k, v)| context! { key => k, value => v }) + .collect(); + + let html = state + .templates + .render( + "pages/destination_detail.html.jinja", + context! { + title => format!("{} - Destinations - {} - Forage", dest_name, org), + description => format!("Destination {} in {}", dest_name, org), + user => context! { username => session.user.username }, + csrf_token => &session.csrf_token, + current_org => &org, + orgs => orgs_context(orgs), + org_name => &org, + is_admin => is_admin, + active_tab => "destinations", + dest_name => &dest.name, + dest_environment => &dest.environment, + dest_type_name => dest.dest_type.as_ref().map(|t| t.name.clone()), + dest_type_organisation => dest.dest_type.as_ref().map(|t| t.organisation.clone()), + dest_type_version => dest.dest_type.as_ref().map(|t| t.version), + metadata => meta_entries, + }, + ) + .map_err(|e| { + internal_error(&state, "template error", &e) + })?; + + Ok(Html(html).into_response()) +} + +#[derive(Deserialize)] +struct UpdateDestinationForm { + _csrf: String, + #[serde(default, deserialize_with = "deserialize_string_or_seq")] + metadata_keys: Vec, + #[serde(default, deserialize_with = "deserialize_string_or_seq")] + metadata_values: Vec, +} + +async fn update_destination_submit( + State(state): State, + session: Session, + Path(org): Path, + Query(query): Query, + Form(form): Form, +) -> Result { + let dest_name = &query.name; + let orgs = &session.user.orgs; + let current_org = require_org_membership(&state, orgs, &org)?; + require_admin(&state, current_org)?; + if !auth::validate_csrf(&session, &form._csrf) { + return Err(error_page( + &state, + StatusCode::FORBIDDEN, + "Invalid CSRF token", + "Please try again.", + )); + } + + let metadata = parse_metadata(&form.metadata_keys, &form.metadata_values); + + state + .platform_client + .update_destination(&session.access_token, dest_name, &metadata) + .await + .map_err(|e| { + internal_error(&state, "update destination error", &e) + })?; + + let encoded_name = urlencoding::encode(dest_name); + Ok( + Redirect::to(&format!( + "/orgs/{org}/destinations/detail?name={encoded_name}" + )) + .into_response(), + ) +} + // ─── Members ──────────────────────────────────────────────────────── async fn members_page( @@ -760,7 +2975,7 @@ async fn members_page( .platform_client .list_members(&session.access_token, ¤t_org.organisation_id) .await - .unwrap_or_default(); + .map_err(|e| internal_error(&state, "list_members", &e))?; let is_admin = current_org.role == "owner" || current_org.role == "admin"; @@ -787,13 +3002,7 @@ async fn members_page( }, ) .map_err(|e| { - tracing::error!("template error: {e:#}"); - error_page( - &state, - StatusCode::INTERNAL_SERVER_ERROR, - "Something went wrong", - "Please try again.", - ) + internal_error(&state, "template error", &e) })?; Ok(Html(html).into_response()) @@ -835,13 +3044,7 @@ async fn add_member_submit( ) .await .map_err(|e| { - tracing::error!("failed to add member: {e}"); - error_page( - &state, - StatusCode::INTERNAL_SERVER_ERROR, - "Something went wrong", - "Please try again.", - ) + internal_error(&state, "failed to add member", &e) })?; Ok(Redirect::to(&format!("/orgs/{org}/settings/members")).into_response()) @@ -882,13 +3085,7 @@ async fn update_member_role_submit( ) .await .map_err(|e| { - tracing::error!("failed to update member role: {e}"); - error_page( - &state, - StatusCode::INTERNAL_SERVER_ERROR, - "Something went wrong", - "Please try again.", - ) + internal_error(&state, "failed to update member role", &e) })?; Ok(Redirect::to(&format!("/orgs/{org}/settings/members")).into_response()) @@ -927,14 +3124,1332 @@ async fn remove_member_submit( ) .await .map_err(|e| { - tracing::error!("failed to remove member: {e}"); - error_page( - &state, - StatusCode::INTERNAL_SERVER_ERROR, - "Something went wrong", - "Please try again.", - ) + internal_error(&state, "failed to remove member", &e) })?; Ok(Redirect::to(&format!("/orgs/{org}/settings/members")).into_response()) } + +// ─── Auto-Release Policies ────────────────────────────────────────── + +// ─── Triggers (auto-release triggers) ─────────────────────────────── + +async fn triggers_page( + State(state): State, + session: Session, + Path((org, project)): Path<(String, String)>, +) -> Result { + let orgs = &session.user.orgs; + let current_org = require_org_membership(&state, orgs, &org)?; + + if !validate_slug(&project) { + return Err(error_page( + &state, + StatusCode::BAD_REQUEST, + "Invalid request", + "Invalid project name.", + )); + } + + let (triggers, environments, destinations, pipelines) = tokio::join!( + state + .platform_client + .list_triggers(&session.access_token, &org, &project), + state + .platform_client + .list_environments(&session.access_token, &org), + state + .platform_client + .list_destinations(&session.access_token, &org), + state + .platform_client + .list_release_pipelines(&session.access_token, &org, &project), + ); + let triggers = triggers.map_err(|e| internal_error(&state, "list_triggers", &e))?; + let environments = warn_default("list_environments", environments); + let destinations = warn_default("list_destinations", destinations); + let pipelines = warn_default("list_release_pipelines", pipelines); + + let is_admin = current_org.role == "owner" || current_org.role == "admin"; + + let trigger_items: Vec = triggers + .iter() + .map(|t| { + context! { + id => t.id, + name => t.name, + enabled => t.enabled, + branch_pattern => t.branch_pattern, + title_pattern => t.title_pattern, + author_pattern => t.author_pattern, + commit_message_pattern => t.commit_message_pattern, + source_type_pattern => t.source_type_pattern, + target_environments => &t.target_environments, + target_destinations => &t.target_destinations, + force_release => t.force_release, + use_pipeline => t.use_pipeline, + created_at => t.created_at, + updated_at => t.updated_at, + } + }) + .collect(); + + let env_options: Vec = environments + .iter() + .map(|e| context! { name => e.name }) + .collect(); + + let dest_options: Vec = destinations + .iter() + .map(|d| context! { name => d.name, environment => d.environment }) + .collect(); + + let pipeline_options: Vec = pipelines + .iter() + .filter(|p| p.enabled) + .map(|p| context! { name => p.name }) + .collect(); + + let projects = warn_default( + "list_projects", + state.platform_client.list_projects(&session.access_token, &org).await, + ); + + let html = state + .templates + .render( + "pages/triggers.html.jinja", + context! { + page_title => format!("Triggers · {} · {}", project, org), + user => context! { + username => session.user.username, + }, + csrf_token => session.csrf_token, + orgs => orgs_context(orgs), + current_org => org, + current_project => project, + projects => projects, + triggers => trigger_items, + environments => env_options, + destinations => dest_options, + pipelines => pipeline_options, + is_admin => is_admin, + }, + ) + .map_err(|e| { + internal_error(&state, "template error", &e) + })?; + + Ok(Html(html).into_response()) +} + +#[derive(Deserialize)] +struct CreateTriggerForm { + csrf_token: String, + #[serde(default)] + name: String, + #[serde(default)] + branch_pattern: String, + #[serde(default)] + title_pattern: String, + #[serde(default)] + author_pattern: String, + #[serde(default)] + commit_message_pattern: String, + #[serde(default)] + source_type_pattern: String, + #[serde(default, deserialize_with = "deserialize_string_or_seq")] + target_environments: Vec, + #[serde(default)] + force_release: Option, + #[serde(default)] + use_pipeline: Option, +} + +async fn create_trigger_submit( + State(state): State, + session: Session, + Path((org, project)): Path<(String, String)>, + Form(form): Form, +) -> Result { + let orgs = &session.user.orgs; + let current_org = require_org_membership(&state, orgs, &org)?; + require_admin(&state, current_org)?; + + if form.csrf_token != session.csrf_token { + return Err(error_page( + &state, + StatusCode::FORBIDDEN, + "Invalid request", + "CSRF validation failed. Please try again.", + )); + } + + if form.name.trim().is_empty() { + return Err(error_page( + &state, + StatusCode::BAD_REQUEST, + "Invalid request", + "Trigger name is required.", + )); + } + + // At least one filter pattern is required + let has_pattern = non_empty(&form.branch_pattern).is_some() + || non_empty(&form.title_pattern).is_some() + || non_empty(&form.author_pattern).is_some() + || non_empty(&form.commit_message_pattern).is_some() + || non_empty(&form.source_type_pattern).is_some(); + if !has_pattern { + return Err(error_page( + &state, + StatusCode::BAD_REQUEST, + "Invalid request", + "At least one filter pattern is required.", + )); + } + + let environments: Vec = form + .target_environments + .iter() + .map(|s| s.trim().to_string()) + .filter(|s| !s.is_empty()) + .collect(); + if environments.is_empty() { + return Err(error_page( + &state, + StatusCode::BAD_REQUEST, + "Invalid request", + "At least one target environment is required.", + )); + } + + // Auto-generate name from first pattern if not provided + let name = if form.name.trim().is_empty() { + let pattern = non_empty(&form.branch_pattern) + .or_else(|| non_empty(&form.title_pattern)) + .or_else(|| non_empty(&form.author_pattern)) + .or_else(|| non_empty(&form.commit_message_pattern)) + .or_else(|| non_empty(&form.source_type_pattern)) + .unwrap_or_default(); + let envs = environments.join("-"); + format!("{}-to-{}", pattern, envs) + } else { + form.name.trim().to_string() + }; + + let input = CreateTriggerInput { + name, + branch_pattern: non_empty(&form.branch_pattern), + title_pattern: non_empty(&form.title_pattern), + author_pattern: non_empty(&form.author_pattern), + commit_message_pattern: non_empty(&form.commit_message_pattern), + source_type_pattern: non_empty(&form.source_type_pattern), + target_environments: environments, + target_destinations: vec![], + force_release: form.force_release.as_deref() == Some("true"), + use_pipeline: form.use_pipeline.as_deref() == Some("true"), + }; + + state + .platform_client + .create_trigger(&session.access_token, &org, &project, &input) + .await + .map_err(|e| { + internal_error(&state, "failed to create trigger", &e) + })?; + + Ok(Redirect::to(&format!("/orgs/{org}/projects/{project}/triggers")).into_response()) +} + +#[derive(Deserialize)] +struct ToggleTriggerForm { + csrf_token: String, + #[serde(default)] + enabled: Option, +} + +async fn toggle_trigger( + State(state): State, + session: Session, + Path((org, project, name)): Path<(String, String, String)>, + Form(form): Form, +) -> Result { + let orgs = &session.user.orgs; + let current_org = require_org_membership(&state, orgs, &org)?; + require_admin(&state, current_org)?; + + if form.csrf_token != session.csrf_token { + return Err(error_page( + &state, + StatusCode::FORBIDDEN, + "Invalid request", + "CSRF validation failed. Please try again.", + )); + } + + let input = UpdateTriggerInput { + enabled: Some(form.enabled.is_some()), + branch_pattern: None, + title_pattern: None, + author_pattern: None, + commit_message_pattern: None, + source_type_pattern: None, + target_environments: vec![], + target_destinations: vec![], + force_release: None, + use_pipeline: None, + }; + + state + .platform_client + .update_trigger(&session.access_token, &org, &project, &name, &input) + .await + .map_err(|e| { + internal_error(&state, "failed to toggle trigger", &e) + })?; + + Ok(Redirect::to(&format!("/orgs/{org}/projects/{project}/triggers")).into_response()) +} + +#[derive(Deserialize)] +struct DeleteTriggerForm { + csrf_token: String, +} + +async fn delete_trigger( + State(state): State, + session: Session, + Path((org, project, name)): Path<(String, String, String)>, + Form(form): Form, +) -> Result { + let orgs = &session.user.orgs; + let current_org = require_org_membership(&state, orgs, &org)?; + require_admin(&state, current_org)?; + + if form.csrf_token != session.csrf_token { + return Err(error_page( + &state, + StatusCode::FORBIDDEN, + "Invalid request", + "CSRF validation failed. Please try again.", + )); + } + + state + .platform_client + .delete_trigger(&session.access_token, &org, &project, &name) + .await + .map_err(|e| { + internal_error(&state, "failed to delete trigger", &e) + })?; + + Ok(Redirect::to(&format!("/orgs/{org}/projects/{project}/triggers")).into_response()) +} + +async fn edit_trigger_page( + State(state): State, + session: Session, + Path((org, project, name)): Path<(String, String, String)>, +) -> Result { + let orgs = &session.user.orgs; + let current_org = require_org_membership(&state, orgs, &org)?; + require_admin(&state, current_org)?; + + if !validate_slug(&project) { + return Err(error_page( + &state, + StatusCode::BAD_REQUEST, + "Invalid request", + "Invalid project name.", + )); + } + + let (triggers, environments, pipelines) = tokio::join!( + state + .platform_client + .list_triggers(&session.access_token, &org, &project), + state + .platform_client + .list_environments(&session.access_token, &org), + state + .platform_client + .list_release_pipelines(&session.access_token, &org, &project), + ); + let triggers = triggers.map_err(|e| internal_error(&state, "list_triggers", &e))?; + let environments = warn_default("list_environments", environments); + let pipelines = warn_default("list_release_pipelines", pipelines); + + let trigger = triggers + .iter() + .find(|t| t.name == name) + .ok_or_else(|| { + error_page( + &state, + StatusCode::NOT_FOUND, + "Not found", + "Trigger not found.", + ) + })?; + + let trigger_ctx = context! { + name => trigger.name, + enabled => trigger.enabled, + branch_pattern => trigger.branch_pattern, + title_pattern => trigger.title_pattern, + author_pattern => trigger.author_pattern, + commit_message_pattern => trigger.commit_message_pattern, + source_type_pattern => trigger.source_type_pattern, + target_environments => &trigger.target_environments, + target_destinations => &trigger.target_destinations, + force_release => trigger.force_release, + use_pipeline => trigger.use_pipeline, + }; + + let env_options: Vec = environments + .iter() + .map(|e| context! { name => e.name }) + .collect(); + + let pipeline_options: Vec = pipelines + .iter() + .filter(|p| p.enabled) + .map(|p| context! { name => p.name }) + .collect(); + + let projects = warn_default( + "list_projects", + state + .platform_client + .list_projects(&session.access_token, &org) + .await, + ); + + let html = state + .templates + .render( + "pages/trigger_edit.html.jinja", + context! { + page_title => format!("Edit Trigger · {} · {}", name, org), + user => context! { + username => session.user.username, + }, + csrf_token => session.csrf_token, + orgs => orgs_context(orgs), + current_org => org, + current_project => project, + projects => projects, + trigger => trigger_ctx, + environments => env_options, + pipelines => pipeline_options, + }, + ) + .map_err(|e| internal_error(&state, "template error", &e))?; + + Ok(Html(html).into_response()) +} + +#[derive(Deserialize)] +struct EditTriggerForm { + csrf_token: String, + #[serde(default)] + branch_pattern: String, + #[serde(default)] + title_pattern: String, + #[serde(default)] + author_pattern: String, + #[serde(default)] + commit_message_pattern: String, + #[serde(default)] + source_type_pattern: String, + #[serde(default, deserialize_with = "deserialize_string_or_seq")] + target_environments: Vec, + #[serde(default)] + force_release: Option, + #[serde(default)] + use_pipeline: Option, +} + +async fn edit_trigger_submit( + State(state): State, + session: Session, + Path((org, project, name)): Path<(String, String, String)>, + Form(form): Form, +) -> Result { + let orgs = &session.user.orgs; + let current_org = require_org_membership(&state, orgs, &org)?; + require_admin(&state, current_org)?; + + if form.csrf_token != session.csrf_token { + return Err(error_page( + &state, + StatusCode::FORBIDDEN, + "Invalid request", + "CSRF validation failed. Please try again.", + )); + } + + let has_pattern = non_empty(&form.branch_pattern).is_some() + || non_empty(&form.title_pattern).is_some() + || non_empty(&form.author_pattern).is_some() + || non_empty(&form.commit_message_pattern).is_some() + || non_empty(&form.source_type_pattern).is_some(); + if !has_pattern { + return Err(error_page( + &state, + StatusCode::BAD_REQUEST, + "Invalid request", + "At least one filter pattern is required.", + )); + } + + let environments: Vec = form + .target_environments + .iter() + .map(|s| s.trim().to_string()) + .filter(|s| !s.is_empty()) + .collect(); + if environments.is_empty() { + return Err(error_page( + &state, + StatusCode::BAD_REQUEST, + "Invalid request", + "At least one target environment is required.", + )); + } + + let input = UpdateTriggerInput { + enabled: None, + branch_pattern: non_empty(&form.branch_pattern), + title_pattern: non_empty(&form.title_pattern), + author_pattern: non_empty(&form.author_pattern), + commit_message_pattern: non_empty(&form.commit_message_pattern), + source_type_pattern: non_empty(&form.source_type_pattern), + target_environments: environments, + target_destinations: vec![], + force_release: Some(form.force_release.as_deref() == Some("true")), + use_pipeline: Some(form.use_pipeline.as_deref() == Some("true")), + }; + + state + .platform_client + .update_trigger(&session.access_token, &org, &project, &name, &input) + .await + .map_err(|e| internal_error(&state, "failed to update trigger", &e))?; + + Ok(Redirect::to(&format!("/orgs/{org}/projects/{project}/triggers")).into_response()) +} + +// ─── Policies (deployment gating) ────────────────────────────────── + +async fn policies_page( + State(state): State, + session: Session, + Path((org, project)): Path<(String, String)>, +) -> Result { + let orgs = &session.user.orgs; + let current_org = require_org_membership(&state, orgs, &org)?; + + if !validate_slug(&project) { + return Err(error_page( + &state, + StatusCode::BAD_REQUEST, + "Invalid request", + "Invalid project name.", + )); + } + + let (policies, environments) = tokio::join!( + state + .platform_client + .list_policies(&session.access_token, &org, &project), + state + .platform_client + .list_environments(&session.access_token, &org), + ); + let policies = policies.map_err(|e| internal_error(&state, "list_policies", &e))?; + let environments = warn_default("list_environments", environments); + + let is_admin = current_org.role == "owner" || current_org.role == "admin"; + + let policy_items: Vec = policies + .iter() + .map(|p| { + let (policy_type, config_detail) = match &p.config { + PolicyConfig::SoakTime { + source_environment, + target_environment, + duration_seconds, + } => ( + "soak_time", + context! { + source_environment => source_environment, + target_environment => target_environment, + duration_seconds => duration_seconds, + duration_human => format_duration(*duration_seconds), + }, + ), + PolicyConfig::BranchRestriction { + target_environment, + branch_pattern, + } => ( + "branch_restriction", + context! { + target_environment => target_environment, + branch_pattern => branch_pattern, + }, + ), + }; + context! { + id => p.id, + name => p.name, + enabled => p.enabled, + policy_type => policy_type, + config => config_detail, + created_at => p.created_at, + updated_at => p.updated_at, + } + }) + .collect(); + + let env_options: Vec = environments + .iter() + .map(|e| context! { name => e.name }) + .collect(); + + let projects = warn_default( + "list_projects", + state.platform_client.list_projects(&session.access_token, &org).await, + ); + + let html = state + .templates + .render( + "pages/policies.html.jinja", + context! { + page_title => format!("Policies · {} · {}", project, org), + user => context! { + username => session.user.username, + }, + csrf_token => session.csrf_token, + orgs => orgs_context(orgs), + current_org => org, + current_project => project, + projects => projects, + policies => policy_items, + environments => env_options, + is_admin => is_admin, + }, + ) + .map_err(|e| { + internal_error(&state, "template error", &e) + })?; + + Ok(Html(html).into_response()) +} + +/// Map environment name to a swimlane bar color (matches ENV_COLORS in swim-lanes.js). +fn env_lane_color(name: &str) -> &'static str { + let lower = name.to_lowercase(); + if lower.contains("prod") && !lower.contains("preprod") && !lower.contains("pre-prod") { + "#ec4899" + } else if lower.contains("preprod") || lower.contains("pre-prod") { + "#f97316" + } else if lower.contains("stag") { + "#eab308" + } else if lower.contains("dev") { + "#8b5cf6" + } else if lower.contains("test") { + "#06b6d4" + } else { + "#6b7280" + } +} + +fn format_duration(seconds: i64) -> String { + if seconds >= 3600 { + let hours = seconds / 3600; + let mins = (seconds % 3600) / 60; + if mins > 0 { + format!("{}h {}m", hours, mins) + } else { + format!("{}h", hours) + } + } else if seconds >= 60 { + format!("{}m", seconds / 60) + } else { + format!("{}s", seconds) + } +} + +#[derive(Deserialize)] +struct CreatePolicyForm { + csrf_token: String, + #[serde(default)] + name: String, + #[serde(default)] + policy_type: String, + // SoakTime fields + #[serde(default)] + source_environment: String, + #[serde(default)] + target_environment: String, + #[serde(default)] + duration_seconds: Option, + // BranchRestriction fields + #[serde(default)] + branch_pattern: String, +} + +async fn create_policy_submit( + State(state): State, + session: Session, + Path((org, project)): Path<(String, String)>, + Form(form): Form, +) -> Result { + let orgs = &session.user.orgs; + let current_org = require_org_membership(&state, orgs, &org)?; + require_admin(&state, current_org)?; + + if form.csrf_token != session.csrf_token { + return Err(error_page( + &state, + StatusCode::FORBIDDEN, + "Invalid request", + "CSRF validation failed. Please try again.", + )); + } + + if form.name.trim().is_empty() { + return Err(error_page( + &state, + StatusCode::BAD_REQUEST, + "Invalid request", + "Policy name is required.", + )); + } + + let config = match form.policy_type.as_str() { + "soak_time" => { + let source = form.source_environment.trim(); + let target = form.target_environment.trim(); + let duration = form.duration_seconds.unwrap_or(0); + if source.is_empty() || target.is_empty() || duration <= 0 { + return Err(error_page( + &state, + StatusCode::BAD_REQUEST, + "Invalid request", + "Soak time requires source environment, target environment, and a positive duration.", + )); + } + PolicyConfig::SoakTime { + source_environment: source.to_string(), + target_environment: target.to_string(), + duration_seconds: duration, + } + } + "branch_restriction" => { + let target = form.target_environment.trim(); + let pattern = form.branch_pattern.trim(); + if target.is_empty() || pattern.is_empty() { + return Err(error_page( + &state, + StatusCode::BAD_REQUEST, + "Invalid request", + "Branch restriction requires a target environment and branch pattern.", + )); + } + PolicyConfig::BranchRestriction { + target_environment: target.to_string(), + branch_pattern: pattern.to_string(), + } + } + _ => { + return Err(error_page( + &state, + StatusCode::BAD_REQUEST, + "Invalid request", + "Invalid policy type. Must be 'soak_time' or 'branch_restriction'.", + )); + } + }; + + let input = CreatePolicyInput { + name: form.name.trim().to_string(), + config, + }; + + state + .platform_client + .create_policy(&session.access_token, &org, &project, &input) + .await + .map_err(|e| { + internal_error(&state, "failed to create policy", &e) + })?; + + Ok(Redirect::to(&format!("/orgs/{org}/projects/{project}/policies")).into_response()) +} + +#[derive(Deserialize)] +struct TogglePolicyForm { + csrf_token: String, + #[serde(default)] + enabled: Option, +} + +async fn toggle_policy( + State(state): State, + session: Session, + Path((org, project, name)): Path<(String, String, String)>, + Form(form): Form, +) -> Result { + let orgs = &session.user.orgs; + let current_org = require_org_membership(&state, orgs, &org)?; + require_admin(&state, current_org)?; + + if form.csrf_token != session.csrf_token { + return Err(error_page( + &state, + StatusCode::FORBIDDEN, + "Invalid request", + "CSRF validation failed. Please try again.", + )); + } + + let input = UpdatePolicyInput { + enabled: Some(form.enabled.is_some()), + config: None, + }; + + state + .platform_client + .update_policy(&session.access_token, &org, &project, &name, &input) + .await + .map_err(|e| { + internal_error(&state, "failed to toggle policy", &e) + })?; + + Ok(Redirect::to(&format!("/orgs/{org}/projects/{project}/policies")).into_response()) +} + +#[derive(Deserialize)] +struct DeletePolicyForm { + csrf_token: String, +} + +async fn delete_policy( + State(state): State, + session: Session, + Path((org, project, name)): Path<(String, String, String)>, + Form(form): Form, +) -> Result { + let orgs = &session.user.orgs; + let current_org = require_org_membership(&state, orgs, &org)?; + require_admin(&state, current_org)?; + + if form.csrf_token != session.csrf_token { + return Err(error_page( + &state, + StatusCode::FORBIDDEN, + "Invalid request", + "CSRF validation failed. Please try again.", + )); + } + + state + .platform_client + .delete_policy(&session.access_token, &org, &project, &name) + .await + .map_err(|e| { + internal_error(&state, "failed to delete policy", &e) + })?; + + Ok(Redirect::to(&format!("/orgs/{org}/projects/{project}/policies")).into_response()) +} + +async fn edit_policy_page( + State(state): State, + session: Session, + Path((org, project, name)): Path<(String, String, String)>, +) -> Result { + let orgs = &session.user.orgs; + let current_org = require_org_membership(&state, orgs, &org)?; + require_admin(&state, current_org)?; + + if !validate_slug(&project) { + return Err(error_page( + &state, + StatusCode::BAD_REQUEST, + "Invalid request", + "Invalid project name.", + )); + } + + let (policies, environments) = tokio::join!( + state + .platform_client + .list_policies(&session.access_token, &org, &project), + state + .platform_client + .list_environments(&session.access_token, &org), + ); + let policies = policies.map_err(|e| internal_error(&state, "list_policies", &e))?; + let environments = warn_default("list_environments", environments); + + let policy = policies + .iter() + .find(|p| p.name == name) + .ok_or_else(|| { + error_page( + &state, + StatusCode::NOT_FOUND, + "Not found", + "Policy not found.", + ) + })?; + + let (policy_type, config_ctx) = match &policy.config { + PolicyConfig::SoakTime { + source_environment, + target_environment, + duration_seconds, + } => ( + "soak_time", + context! { + source_environment => source_environment, + target_environment => target_environment, + duration_seconds => duration_seconds, + }, + ), + PolicyConfig::BranchRestriction { + target_environment, + branch_pattern, + } => ( + "branch_restriction", + context! { + target_environment => target_environment, + branch_pattern => branch_pattern, + }, + ), + }; + + let policy_ctx = context! { + name => policy.name, + enabled => policy.enabled, + policy_type => policy_type, + config => config_ctx, + }; + + let env_options: Vec = environments + .iter() + .map(|e| context! { name => e.name }) + .collect(); + + let projects = warn_default( + "list_projects", + state + .platform_client + .list_projects(&session.access_token, &org) + .await, + ); + + let html = state + .templates + .render( + "pages/policy_edit.html.jinja", + context! { + page_title => format!("Edit Policy · {} · {}", name, org), + user => context! { + username => session.user.username, + }, + csrf_token => session.csrf_token, + orgs => orgs_context(orgs), + current_org => org, + current_project => project, + projects => projects, + policy => policy_ctx, + environments => env_options, + }, + ) + .map_err(|e| internal_error(&state, "template error", &e))?; + + Ok(Html(html).into_response()) +} + +#[derive(Deserialize)] +struct EditPolicyForm { + csrf_token: String, + #[serde(default)] + policy_type: String, + #[serde(default)] + source_environment: String, + #[serde(default)] + target_environment: String, + #[serde(default)] + duration_seconds: Option, + #[serde(default)] + branch_pattern: String, +} + +async fn edit_policy_submit( + State(state): State, + session: Session, + Path((org, project, name)): Path<(String, String, String)>, + Form(form): Form, +) -> Result { + let orgs = &session.user.orgs; + let current_org = require_org_membership(&state, orgs, &org)?; + require_admin(&state, current_org)?; + + if form.csrf_token != session.csrf_token { + return Err(error_page( + &state, + StatusCode::FORBIDDEN, + "Invalid request", + "CSRF validation failed. Please try again.", + )); + } + + let config = match form.policy_type.as_str() { + "soak_time" => { + let source = form.source_environment.trim(); + let target = form.target_environment.trim(); + let duration = form.duration_seconds.unwrap_or(0); + if source.is_empty() || target.is_empty() || duration <= 0 { + return Err(error_page( + &state, + StatusCode::BAD_REQUEST, + "Invalid request", + "Soak time requires source environment, target environment, and a positive duration.", + )); + } + PolicyConfig::SoakTime { + source_environment: source.to_string(), + target_environment: target.to_string(), + duration_seconds: duration, + } + } + "branch_restriction" => { + let target = form.target_environment.trim(); + let pattern = form.branch_pattern.trim(); + if target.is_empty() || pattern.is_empty() { + return Err(error_page( + &state, + StatusCode::BAD_REQUEST, + "Invalid request", + "Branch restriction requires a target environment and branch pattern.", + )); + } + PolicyConfig::BranchRestriction { + target_environment: target.to_string(), + branch_pattern: pattern.to_string(), + } + } + _ => { + return Err(error_page( + &state, + StatusCode::BAD_REQUEST, + "Invalid request", + "Invalid policy type.", + )); + } + }; + + let input = UpdatePolicyInput { + enabled: None, + config: Some(config), + }; + + state + .platform_client + .update_policy(&session.access_token, &org, &project, &name, &input) + .await + .map_err(|e| internal_error(&state, "failed to update policy", &e))?; + + Ok(Redirect::to(&format!("/orgs/{org}/projects/{project}/policies")).into_response()) +} + +// ─── Release Pipelines ────────────────────────────────────────────── + +#[tracing::instrument(skip(state, session), fields(org, project))] +async fn pipelines_page( + State(state): State, + session: Session, + Path((org, project)): Path<(String, String)>, +) -> Result { + let orgs = &session.user.orgs; + let current_org = require_org_membership(&state, orgs, &org)?; + + if !validate_slug(&project) { + return Err(error_page( + &state, + StatusCode::BAD_REQUEST, + "Invalid request", + "Invalid project name.", + )); + } + + let (pipelines, projects) = tokio::join!( + state + .platform_client + .list_release_pipelines(&session.access_token, &org, &project), + state + .platform_client + .list_projects(&session.access_token, &org), + ); + let pipelines = pipelines.map_err(|e| internal_error(&state, "list_pipelines", &e))?; + let projects = warn_default("list_projects", projects); + + let is_admin = current_org.role == "owner" || current_org.role == "admin"; + + let pipeline_items: Vec = pipelines + .iter() + .map(|p| { + let stage_count = p.stages.len(); + context! { + id => p.id, + name => p.name, + enabled => p.enabled, + stages_json => serde_json::to_string(&p.stages).unwrap_or_default(), + stage_count => stage_count, + created_at => p.created_at, + updated_at => p.updated_at, + } + }) + .collect(); + + let html = state + .templates + .render( + "pages/pipelines.html.jinja", + context! { + page_title => format!("Pipelines · {} · {}", project, org), + user => context! { + username => session.user.username, + }, + csrf_token => session.csrf_token, + orgs => orgs_context(orgs), + current_org => org, + current_project => project, + projects => projects, + pipelines => pipeline_items, + is_admin => is_admin, + }, + ) + .map_err(|e| { + internal_error(&state, "template error", &e) + })?; + + Ok(Html(html).into_response()) +} + +#[derive(Deserialize)] +struct CreatePipelineForm { + _csrf: String, + name: String, + #[serde(default)] + stages_json: String, +} + +#[tracing::instrument(skip(state, session, form), fields(org, project))] +async fn create_pipeline_submit( + State(state): State, + session: Session, + Path((org, project)): Path<(String, String)>, + Form(form): Form, +) -> Result { + let orgs = &session.user.orgs; + let current_org = require_org_membership(&state, orgs, &org)?; + require_admin(&state, current_org)?; + + if !auth::validate_csrf(&session, &form._csrf) { + return Err(error_page( + &state, + StatusCode::FORBIDDEN, + "Invalid request", + "CSRF validation failed. Please try again.", + )); + } + + if form.name.trim().is_empty() { + return Err(error_page( + &state, + StatusCode::BAD_REQUEST, + "Invalid request", + "Pipeline name is required.", + )); + } + + let stages: Vec = if form.stages_json.trim().is_empty() { + Vec::new() + } else { + serde_json::from_str(&form.stages_json).map_err(|_| { + error_page( + &state, + StatusCode::BAD_REQUEST, + "Invalid request", + "stages_json is not valid JSON.", + ) + })? + }; + + let input = CreateReleasePipelineInput { + name: form.name.trim().to_string(), + stages, + }; + + state + .platform_client + .create_release_pipeline(&session.access_token, &org, &project, &input) + .await + .map_err(|e| { + internal_error(&state, "failed to create pipeline", &e) + })?; + + Ok(Redirect::to(&format!("/orgs/{org}/projects/{project}/pipelines")).into_response()) +} + +#[derive(Deserialize)] +struct TogglePipelineForm { + _csrf: String, + #[serde(default)] + enabled: Option, +} + +#[tracing::instrument(skip(state, session, form), fields(org, project, name))] +async fn toggle_pipeline( + State(state): State, + session: Session, + Path((org, project, name)): Path<(String, String, String)>, + Form(form): Form, +) -> Result { + let orgs = &session.user.orgs; + let current_org = require_org_membership(&state, orgs, &org)?; + require_admin(&state, current_org)?; + + if !auth::validate_csrf(&session, &form._csrf) { + return Err(error_page( + &state, + StatusCode::FORBIDDEN, + "Invalid request", + "CSRF validation failed. Please try again.", + )); + } + + let input = UpdateReleasePipelineInput { + enabled: Some(form.enabled.is_some()), + stages: None, + }; + + state + .platform_client + .update_release_pipeline(&session.access_token, &org, &project, &name, &input) + .await + .map_err(|e| { + internal_error(&state, "failed to toggle pipeline", &e) + })?; + + Ok(Redirect::to(&format!("/orgs/{org}/projects/{project}/pipelines")).into_response()) +} + +#[derive(Deserialize)] +struct UpdatePipelineForm { + _csrf: String, + #[serde(default)] + stages_json: String, +} + +#[tracing::instrument(skip(state, session, form), fields(org, project, name))] +async fn update_pipeline_submit( + State(state): State, + session: Session, + Path((org, project, name)): Path<(String, String, String)>, + Form(form): Form, +) -> Result { + let orgs = &session.user.orgs; + let current_org = require_org_membership(&state, orgs, &org)?; + require_admin(&state, current_org)?; + + if !auth::validate_csrf(&session, &form._csrf) { + return Err(error_page( + &state, + StatusCode::FORBIDDEN, + "Invalid request", + "CSRF validation failed. Please try again.", + )); + } + + let stages: Vec = if form.stages_json.trim().is_empty() { + Vec::new() + } else { + serde_json::from_str(&form.stages_json).map_err(|_| { + error_page( + &state, + StatusCode::BAD_REQUEST, + "Invalid request", + "stages_json is not valid JSON.", + ) + })? + }; + + let input = UpdateReleasePipelineInput { + enabled: None, + stages: Some(stages), + }; + + state + .platform_client + .update_release_pipeline(&session.access_token, &org, &project, &name, &input) + .await + .map_err(|e| { + internal_error(&state, "failed to update pipeline", &e) + })?; + + Ok(Redirect::to(&format!("/orgs/{org}/projects/{project}/pipelines")).into_response()) +} + +#[derive(Deserialize)] +struct DeletePipelineForm { + _csrf: String, +} + +#[tracing::instrument(skip(state, session, form), fields(org, project, name))] +async fn delete_pipeline( + State(state): State, + session: Session, + Path((org, project, name)): Path<(String, String, String)>, + Form(form): Form, +) -> Result { + let orgs = &session.user.orgs; + let current_org = require_org_membership(&state, orgs, &org)?; + require_admin(&state, current_org)?; + + if !auth::validate_csrf(&session, &form._csrf) { + return Err(error_page( + &state, + StatusCode::FORBIDDEN, + "Invalid request", + "CSRF validation failed. Please try again.", + )); + } + + state + .platform_client + .delete_release_pipeline(&session.access_token, &org, &project, &name) + .await + .map_err(|e| { + internal_error(&state, "failed to delete pipeline", &e) + })?; + + Ok(Redirect::to(&format!("/orgs/{org}/projects/{project}/pipelines")).into_response()) +} + +fn non_empty(s: &str) -> Option { + let trimmed = s.trim(); + if trimmed.is_empty() { + None + } else { + Some(trimmed.to_string()) + } +} + diff --git a/crates/forage-server/src/state.rs b/crates/forage-server/src/state.rs index 4c6ccfd..e7b40e6 100644 --- a/crates/forage-server/src/state.rs +++ b/crates/forage-server/src/state.rs @@ -1,5 +1,6 @@ use std::sync::Arc; +use crate::forest_client::GrpcForestClient; use crate::templates::TemplateEngine; use forage_core::auth::ForestAuth; use forage_core::platform::ForestPlatform; @@ -11,6 +12,7 @@ pub struct AppState { pub forest_client: Arc, pub platform_client: Arc, pub sessions: Arc, + pub grpc_client: Option>, } impl AppState { @@ -25,6 +27,12 @@ impl AppState { forest_client, platform_client, sessions, + grpc_client: None, } } + + pub fn with_grpc_client(mut self, client: Arc) -> Self { + self.grpc_client = Some(client); + self + } } diff --git a/crates/forage-server/src/templates.rs b/crates/forage-server/src/templates.rs index 15c4145..a3856c8 100644 --- a/crates/forage-server/src/templates.rs +++ b/crates/forage-server/src/templates.rs @@ -40,6 +40,44 @@ fn timeago(value: &str) -> String { } } +/// Format a future ISO 8601 / RFC 3339 timestamp as a relative countdown. +fn timeuntil(value: &str) -> String { + let Ok(dt) = chrono::DateTime::parse_from_rfc3339(value) + .or_else(|_| chrono::DateTime::parse_from_rfc3339(&format!("{value}Z"))) + else { + return value.to_string(); + }; + + let now = chrono::Utc::now(); + let diff = dt.signed_duration_since(now); + + if diff.num_seconds() <= 0 { + "now".into() + } else if diff.num_seconds() < 60 { + let s = diff.num_seconds(); + format!("in {s}s") + } else if diff.num_minutes() < 60 { + let m = diff.num_minutes(); + let s = diff.num_seconds() % 60; + if s > 0 { + format!("in {m}m {s}s") + } else { + format!("in {m}m") + } + } else if diff.num_hours() < 24 { + let h = diff.num_hours(); + let m = diff.num_minutes() % 60; + if m > 0 { + format!("in {h}h {m}m") + } else { + format!("in {h}h") + } + } else { + let d = diff.num_days(); + format!("in {d}d") + } +} + /// Format an ISO 8601 / RFC 3339 timestamp as a full human-readable datetime. fn datetime(value: &str) -> String { let Ok(dt) = chrono::DateTime::parse_from_rfc3339(value) @@ -73,7 +111,11 @@ impl TemplateEngine { let mut env = Environment::new(); env.set_loader(minijinja::path_loader(path)); env.add_filter("timeago", |v: String| -> String { timeago(&v) }); + env.add_filter("timeuntil", |v: String| -> String { timeuntil(&v) }); env.add_filter("datetime", |v: String| -> String { datetime(&v) }); + env.add_filter("urlencode", |v: String| -> String { + urlencoding::encode(&v).into_owned() + }); Ok(Self { env }) } diff --git a/crates/forage-server/src/test_support.rs b/crates/forage-server/src/test_support.rs index ff6d2a4..79db492 100644 --- a/crates/forage-server/src/test_support.rs +++ b/crates/forage-server/src/test_support.rs @@ -4,7 +4,9 @@ use axum::Router; use chrono::Utc; use forage_core::auth::*; use forage_core::platform::{ - Artifact, ArtifactContext, Destination, ForestPlatform, Organisation, OrgMember, PlatformError, + Artifact, ArtifactContext, CreatePolicyInput, CreateReleasePipelineInput, CreateTriggerInput, + Destination, Environment, ForestPlatform, Organisation, OrgMember, PlatformError, Policy, + ReleasePipeline, Trigger, UpdatePolicyInput, UpdateReleasePipelineInput, UpdateTriggerInput, }; use forage_core::session::{ CachedOrg, CachedUser, InMemorySessionStore, SessionData, SessionStore, @@ -41,7 +43,16 @@ pub(crate) struct MockPlatformBehavior { pub remove_member_result: Option>, pub update_member_role_result: Option>, pub get_artifact_by_slug_result: Option>, + pub list_environments_result: Option, PlatformError>>, pub list_destinations_result: Option, PlatformError>>, + pub list_triggers_result: Option, PlatformError>>, + pub create_trigger_result: Option>, + pub update_trigger_result: Option>, + pub delete_trigger_result: Option>, + pub list_release_pipelines_result: Option, PlatformError>>, + pub create_release_pipeline_result: Option>, + pub update_release_pipeline_result: Option>, + pub delete_release_pipeline_result: Option>, } pub(crate) fn ok_tokens() -> AuthTokens { @@ -214,6 +225,18 @@ impl ForestAuth for MockForestClient { })) } + async fn get_user_by_username( + &self, + _access_token: &str, + username: &str, + ) -> Result { + Ok(UserProfile { + user_id: "user-123".into(), + username: username.into(), + created_at: Some("2025-01-15T10:00:00Z".into()), + }) + } + async fn remove_email( &self, _access_token: &str, @@ -386,6 +409,15 @@ impl ForestPlatform for MockPlatformClient { })) } + async fn list_environments( + &self, + _access_token: &str, + _organisation: &str, + ) -> Result, PlatformError> { + let b = self.behavior.lock().unwrap(); + b.list_environments_result.clone().unwrap_or(Ok(vec![])) + } + async fn list_destinations( &self, _access_token: &str, @@ -394,6 +426,255 @@ impl ForestPlatform for MockPlatformClient { let b = self.behavior.lock().unwrap(); b.list_destinations_result.clone().unwrap_or(Ok(vec![])) } + + async fn create_environment( + &self, + _access_token: &str, + organisation: &str, + name: &str, + description: Option<&str>, + sort_order: i32, + ) -> Result { + Ok(Environment { + id: format!("env-{name}"), + organisation: organisation.into(), + name: name.into(), + description: description.map(|s| s.to_string()), + sort_order, + created_at: "2026-03-08T00:00:00Z".into(), + }) + } + + async fn create_destination( + &self, + _access_token: &str, + _organisation: &str, + _name: &str, + _environment: &str, + _metadata: &std::collections::HashMap, + _dest_type: Option<&forage_core::platform::DestinationType>, + ) -> Result<(), PlatformError> { + Ok(()) + } + + async fn update_destination( + &self, + _access_token: &str, + _name: &str, + _metadata: &std::collections::HashMap, + ) -> Result<(), PlatformError> { + Ok(()) + } + + async fn get_destination_states( + &self, + _access_token: &str, + _organisation: &str, + _project: Option<&str>, + ) -> Result { + Ok(forage_core::platform::DeploymentStates { + destinations: vec![], + }) + } + + async fn get_release_intent_states( + &self, + _access_token: &str, + _organisation: &str, + _project: Option<&str>, + _include_completed: bool, + ) -> Result, PlatformError> { + Ok(vec![]) + } + + async fn release_artifact( + &self, + _access_token: &str, + _artifact_id: &str, + _destinations: &[String], + _environments: &[String], + _use_pipeline: bool, + ) -> Result<(), PlatformError> { + Ok(()) + } + + async fn list_triggers( + &self, + _access_token: &str, + _organisation: &str, + _project: &str, + ) -> Result, PlatformError> { + let b = self.behavior.lock().unwrap(); + b.list_triggers_result.clone().unwrap_or(Ok(vec![])) + } + + async fn create_trigger( + &self, + _access_token: &str, + _organisation: &str, + _project: &str, + input: &CreateTriggerInput, + ) -> Result { + let b = self.behavior.lock().unwrap(); + b.create_trigger_result.clone().unwrap_or(Ok(Trigger { + id: "trigger-1".into(), + name: input.name.clone(), + enabled: true, + branch_pattern: input.branch_pattern.clone(), + title_pattern: input.title_pattern.clone(), + author_pattern: input.author_pattern.clone(), + commit_message_pattern: input.commit_message_pattern.clone(), + source_type_pattern: input.source_type_pattern.clone(), + target_environments: input.target_environments.clone(), + target_destinations: input.target_destinations.clone(), + force_release: input.force_release, + use_pipeline: input.use_pipeline, + created_at: "2026-03-08T00:00:00Z".into(), + updated_at: "2026-03-08T00:00:00Z".into(), + })) + } + + async fn update_trigger( + &self, + _access_token: &str, + _organisation: &str, + _project: &str, + name: &str, + input: &UpdateTriggerInput, + ) -> Result { + let b = self.behavior.lock().unwrap(); + b.update_trigger_result.clone().unwrap_or(Ok(Trigger { + id: "trigger-1".into(), + name: name.into(), + enabled: input.enabled.unwrap_or(true), + branch_pattern: input.branch_pattern.clone(), + title_pattern: input.title_pattern.clone(), + author_pattern: input.author_pattern.clone(), + commit_message_pattern: input.commit_message_pattern.clone(), + source_type_pattern: input.source_type_pattern.clone(), + target_environments: input.target_environments.clone(), + target_destinations: input.target_destinations.clone(), + force_release: input.force_release.unwrap_or(false), + use_pipeline: input.use_pipeline.unwrap_or(false), + created_at: "2026-03-08T00:00:00Z".into(), + updated_at: "2026-03-08T00:00:00Z".into(), + })) + } + + async fn delete_trigger( + &self, + _access_token: &str, + _organisation: &str, + _project: &str, + _name: &str, + ) -> Result<(), PlatformError> { + let b = self.behavior.lock().unwrap(); + b.delete_trigger_result.clone().unwrap_or(Ok(())) + } + + async fn list_policies( + &self, + _access_token: &str, + _organisation: &str, + _project: &str, + ) -> Result, PlatformError> { + Ok(vec![]) + } + + async fn create_policy( + &self, + _access_token: &str, + _organisation: &str, + _project: &str, + _input: &CreatePolicyInput, + ) -> Result { + Err(PlatformError::Other("not implemented in mock".into())) + } + + async fn update_policy( + &self, + _access_token: &str, + _organisation: &str, + _project: &str, + _name: &str, + _input: &UpdatePolicyInput, + ) -> Result { + Err(PlatformError::Other("not implemented in mock".into())) + } + + async fn delete_policy( + &self, + _access_token: &str, + _organisation: &str, + _project: &str, + _name: &str, + ) -> Result<(), PlatformError> { + Ok(()) + } + + async fn list_release_pipelines( + &self, + _access_token: &str, + _organisation: &str, + _project: &str, + ) -> Result, PlatformError> { + let b = self.behavior.lock().unwrap(); + b.list_release_pipelines_result + .clone() + .unwrap_or(Ok(vec![])) + } + + async fn create_release_pipeline( + &self, + _access_token: &str, + _organisation: &str, + _project: &str, + input: &CreateReleasePipelineInput, + ) -> Result { + let b = self.behavior.lock().unwrap(); + b.create_release_pipeline_result + .clone() + .unwrap_or(Ok(ReleasePipeline { + id: "pipeline-1".into(), + name: input.name.clone(), + enabled: true, + stages: input.stages.clone(), + created_at: "2026-03-08T00:00:00Z".into(), + updated_at: "2026-03-08T00:00:00Z".into(), + })) + } + + async fn update_release_pipeline( + &self, + _access_token: &str, + _organisation: &str, + _project: &str, + name: &str, + input: &UpdateReleasePipelineInput, + ) -> Result { + let b = self.behavior.lock().unwrap(); + b.update_release_pipeline_result + .clone() + .unwrap_or(Ok(ReleasePipeline { + id: "pipeline-1".into(), + name: name.into(), + enabled: input.enabled.unwrap_or(true), + stages: input.stages.clone().unwrap_or_default(), + created_at: "2026-03-08T00:00:00Z".into(), + updated_at: "2026-03-08T00:00:00Z".into(), + })) + } + + async fn delete_release_pipeline( + &self, + _access_token: &str, + _organisation: &str, + _project: &str, + _name: &str, + ) -> Result<(), PlatformError> { + let b = self.behavior.lock().unwrap(); + b.delete_release_pipeline_result.clone().unwrap_or(Ok(())) + } } pub(crate) fn make_templates() -> TemplateEngine { diff --git a/crates/forage-server/src/tests/platform_tests.rs b/crates/forage-server/src/tests/platform_tests.rs index 613fd46..d879d96 100644 --- a/crates/forage-server/src/tests/platform_tests.rs +++ b/crates/forage-server/src/tests/platform_tests.rs @@ -582,7 +582,7 @@ async fn projects_list_non_member_returns_403() { } #[tokio::test] -async fn projects_list_platform_unavailable_degrades_gracefully() { +async fn projects_list_platform_unavailable_returns_500() { let platform = MockPlatformClient::with_behavior(MockPlatformBehavior { list_projects_result: Some(Err(PlatformError::Unavailable( "connection refused".into(), @@ -603,12 +603,13 @@ async fn projects_list_platform_unavailable_degrades_gracefully() { ) .await .unwrap(); - assert_eq!(response.status(), StatusCode::OK); + assert_eq!(response.status(), StatusCode::INTERNAL_SERVER_ERROR); let body = axum::body::to_bytes(response.into_body(), usize::MAX) .await .unwrap(); let html = String::from_utf8(body.to_vec()).unwrap(); - assert!(html.contains("No projects yet")); + assert!(html.contains("Something went wrong")); + assert!(html.contains("connection refused")); } // ─── Project detail ───────────────────────────────────────────────── @@ -634,9 +635,10 @@ async fn project_detail_returns_200_with_artifacts() { .await .unwrap(); let html = String::from_utf8(body.to_vec()).unwrap(); - assert!(html.contains("my-api")); - assert!(html.contains("Deploy v1.0")); - assert!(html.contains("my-api-abc123")); + // The timeline is now rendered by a Svelte web component + assert!(html.contains("release-timeline")); + assert!(html.contains("org=\"testorg\"")); + assert!(html.contains("project=\"my-api\"")); } #[tokio::test] @@ -664,7 +666,9 @@ async fn project_detail_empty_artifacts_shows_empty_state() { .await .unwrap(); let html = String::from_utf8(body.to_vec()).unwrap(); - assert!(html.contains("No releases yet")); + // Empty state is now rendered client-side by the Svelte component + assert!(html.contains("release-timeline")); + assert!(html.contains("project=\"my-api\"")); } #[tokio::test] @@ -698,6 +702,7 @@ async fn project_detail_shows_enriched_artifact_data() { type_organisation: None, type_name: None, type_version: None, + status: None, }], created_at: "2026-03-07T12:00:00Z".into(), }])), @@ -722,10 +727,79 @@ async fn project_detail_shows_enriched_artifact_data() { .await .unwrap(); let html = String::from_utf8(body.to_vec()).unwrap(); - assert!(html.contains("v2.0.0")); - assert!(html.contains("main")); - assert!(html.contains("abc1234")); - assert!(html.contains("production")); + // Enriched data is now rendered client-side by the Svelte component + assert!(html.contains("release-timeline")); + assert!(html.contains("project=\"my-api\"")); +} + +#[tokio::test] +async fn timeline_api_returns_json_with_artifacts() { + let (state, sessions) = test_state(); + let cookie = create_test_session(&sessions).await; + let app = build_router(state); + + let response = app + .oneshot( + Request::builder() + .uri("/api/orgs/testorg/projects/my-api/timeline") + .header("cookie", &cookie) + .body(Body::empty()) + .unwrap(), + ) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::OK); + let body = axum::body::to_bytes(response.into_body(), usize::MAX) + .await + .unwrap(); + let json: serde_json::Value = serde_json::from_slice(&body).unwrap(); + assert!(json["timeline"].is_array()); + assert!(json["lanes"].is_array()); + // Should have at least one timeline item from the mock data + assert!(!json["timeline"].as_array().unwrap().is_empty()); +} + +#[tokio::test] +async fn org_timeline_api_returns_json() { + let (state, sessions) = test_state(); + let cookie = create_test_session(&sessions).await; + let app = build_router(state); + + let response = app + .oneshot( + Request::builder() + .uri("/api/orgs/testorg/timeline") + .header("cookie", &cookie) + .body(Body::empty()) + .unwrap(), + ) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::OK); + let body = axum::body::to_bytes(response.into_body(), usize::MAX) + .await + .unwrap(); + let json: serde_json::Value = serde_json::from_slice(&body).unwrap(); + assert!(json["timeline"].is_array()); + assert!(json["lanes"].is_array()); +} + +#[tokio::test] +async fn timeline_api_requires_auth() { + let (state, _sessions) = test_state(); + let app = build_router(state); + + let response = app + .oneshot( + Request::builder() + .uri("/api/orgs/testorg/projects/my-api/timeline") + .body(Body::empty()) + .unwrap(), + ) + .await + .unwrap(); + // Should redirect to login (302) when not authenticated + assert_eq!(response.status(), StatusCode::SEE_OTHER); } // ─── Artifact detail ──────────────────────────────────────────────── @@ -787,6 +861,7 @@ async fn artifact_detail_shows_enriched_data() { type_organisation: None, type_name: None, type_version: None, + status: None, }, ArtifactDestination { name: "staging".into(), @@ -794,6 +869,7 @@ async fn artifact_detail_shows_enriched_data() { type_organisation: None, type_name: None, type_version: None, + status: None, }, ], created_at: "2026-03-07T12:00:00Z".into(), @@ -1081,7 +1157,7 @@ async fn destinations_page_shows_empty_state() { .await .unwrap(); let html = String::from_utf8(body.to_vec()).unwrap(); - assert!(html.contains("No destinations yet")); + assert!(html.contains("No environments yet")); } // ─── Releases ──────────────────────────────────────────────────────── @@ -1169,5 +1245,288 @@ async fn releases_page_shows_empty_state() { .await .unwrap(); let html = String::from_utf8(body.to_vec()).unwrap(); - assert!(html.contains("No releases yet")); + // Empty state is now rendered client-side by the Svelte component + assert!(html.contains("release-timeline")); + assert!(html.contains("org=\"testorg\"")); +} + +// ─── User profile ────────────────────────────────────────────────── + +#[tokio::test] +async fn user_profile_shows_username() { + let (state, sessions) = test_state(); + let cookie = create_test_session(&sessions).await; + let app = build_router(state); + + let response = app + .oneshot( + Request::builder() + .uri("/users/testuser") + .header("cookie", &cookie) + .body(Body::empty()) + .unwrap(), + ) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::OK); + let body = axum::body::to_bytes(response.into_body(), usize::MAX) + .await + .unwrap(); + let html = String::from_utf8(body.to_vec()).unwrap(); + assert!(html.contains("testuser")); + assert!(html.contains("Member since")); +} + +// ─── Triggers (auto-release) ──────────────────────────────────────── + +#[tokio::test] +async fn triggers_page_returns_200() { + let (state, sessions) = test_state(); + let cookie = create_test_session(&sessions).await; + let app = build_router(state); + + let response = app + .oneshot( + Request::builder() + .uri("/orgs/testorg/projects/my-api/triggers") + .header("cookie", &cookie) + .body(Body::empty()) + .unwrap(), + ) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::OK); + let body = axum::body::to_bytes(response.into_body(), usize::MAX) + .await + .unwrap(); + let html = String::from_utf8(body.to_vec()).unwrap(); + assert!(html.contains("Triggers")); +} + +#[tokio::test] +async fn triggers_page_shows_existing_triggers() { + use forage_core::platform::Trigger; + + let platform = MockPlatformClient::with_behavior(MockPlatformBehavior { + list_triggers_result: Some(Ok(vec![Trigger { + id: "t1".into(), + name: "deploy-main".into(), + enabled: true, + branch_pattern: Some("main".into()), + title_pattern: None, + author_pattern: None, + commit_message_pattern: None, + source_type_pattern: None, + target_environments: vec!["staging".into()], + target_destinations: vec![], + force_release: false, + use_pipeline: false, + created_at: "2026-03-08T00:00:00Z".into(), + updated_at: "2026-03-08T00:00:00Z".into(), + }])), + ..Default::default() + }); + let (state, sessions) = test_state_with(MockForestClient::new(), platform); + let cookie = create_test_session(&sessions).await; + let app = build_router(state); + + let response = app + .oneshot( + Request::builder() + .uri("/orgs/testorg/projects/my-api/triggers") + .header("cookie", &cookie) + .body(Body::empty()) + .unwrap(), + ) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::OK); + let body = axum::body::to_bytes(response.into_body(), usize::MAX) + .await + .unwrap(); + let html = String::from_utf8(body.to_vec()).unwrap(); + assert!(html.contains("deploy-main")); + assert!(html.contains("staging")); +} + +#[tokio::test] +async fn create_trigger_requires_admin() { + let (state, sessions) = test_state(); + let cookie = create_test_session_member(&sessions).await; + let app = build_router(state); + + let response = app + .oneshot( + Request::builder() + .method("POST") + .uri("/orgs/testorg/projects/my-api/triggers") + .header("cookie", &cookie) + .header("content-type", "application/x-www-form-urlencoded") + .body(Body::from("csrf_token=test-csrf&name=test-trigger")) + .unwrap(), + ) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::FORBIDDEN); +} + +#[tokio::test] +async fn create_trigger_requires_csrf() { + let (state, sessions) = test_state(); + let cookie = create_test_session(&sessions).await; + let app = build_router(state); + + let response = app + .oneshot( + Request::builder() + .method("POST") + .uri("/orgs/testorg/projects/my-api/triggers") + .header("cookie", &cookie) + .header("content-type", "application/x-www-form-urlencoded") + .body(Body::from("csrf_token=wrong-token&name=test-trigger")) + .unwrap(), + ) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::FORBIDDEN); +} + +#[tokio::test] +async fn create_trigger_success_redirects() { + let (state, sessions) = test_state(); + let cookie = create_test_session(&sessions).await; + let app = build_router(state); + + let response = app + .oneshot( + Request::builder() + .method("POST") + .uri("/orgs/testorg/projects/my-api/triggers") + .header("cookie", &cookie) + .header("content-type", "application/x-www-form-urlencoded") + .body(Body::from("csrf_token=test-csrf&name=deploy-main&branch_pattern=main&target_environments=staging") + ) + .unwrap(), + ) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::SEE_OTHER); + assert_eq!( + response.headers().get("location").unwrap(), + "/orgs/testorg/projects/my-api/triggers" + ); +} + +#[tokio::test] +async fn toggle_trigger_requires_admin() { + let (state, sessions) = test_state(); + let cookie = create_test_session_member(&sessions).await; + let app = build_router(state); + + let response = app + .oneshot( + Request::builder() + .method("POST") + .uri("/orgs/testorg/projects/my-api/triggers/deploy-main/toggle") + .header("cookie", &cookie) + .header("content-type", "application/x-www-form-urlencoded") + .body(Body::from("csrf_token=test-csrf")) + .unwrap(), + ) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::FORBIDDEN); +} + +#[tokio::test] +async fn delete_trigger_success_redirects() { + let (state, sessions) = test_state(); + let cookie = create_test_session(&sessions).await; + let app = build_router(state); + + let response = app + .oneshot( + Request::builder() + .method("POST") + .uri("/orgs/testorg/projects/my-api/triggers/deploy-main/delete") + .header("cookie", &cookie) + .header("content-type", "application/x-www-form-urlencoded") + .body(Body::from("csrf_token=test-csrf")) + .unwrap(), + ) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::SEE_OTHER); + assert_eq!( + response.headers().get("location").unwrap(), + "/orgs/testorg/projects/my-api/triggers" + ); +} + +// ─── Deployment Policies ──────────────────────────────────────────── + +#[tokio::test] +async fn policies_page_returns_200() { + let (state, sessions) = test_state(); + let cookie = create_test_session(&sessions).await; + let app = build_router(state); + + let response = app + .oneshot( + Request::builder() + .uri("/orgs/testorg/projects/my-api/policies") + .header("cookie", &cookie) + .body(Body::empty()) + .unwrap(), + ) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::OK); + let body = axum::body::to_bytes(response.into_body(), usize::MAX) + .await + .unwrap(); + let html = String::from_utf8(body.to_vec()).unwrap(); + assert!(html.contains("Deployment Policies")); +} + +#[tokio::test] +async fn create_policy_requires_admin() { + let (state, sessions) = test_state(); + let cookie = create_test_session_member(&sessions).await; + let app = build_router(state); + + let response = app + .oneshot( + Request::builder() + .method("POST") + .uri("/orgs/testorg/projects/my-api/policies") + .header("cookie", &cookie) + .header("content-type", "application/x-www-form-urlencoded") + .body(Body::from("csrf_token=test-csrf&name=test-policy&policy_type=soak_time")) + .unwrap(), + ) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::FORBIDDEN); +} + +#[tokio::test] +async fn create_policy_requires_csrf() { + let (state, sessions) = test_state(); + let cookie = create_test_session(&sessions).await; + let app = build_router(state); + + let response = app + .oneshot( + Request::builder() + .method("POST") + .uri("/orgs/testorg/projects/my-api/policies") + .header("cookie", &cookie) + .header("content-type", "application/x-www-form-urlencoded") + .body(Body::from("csrf_token=wrong-token&name=test-policy&policy_type=soak_time")) + .unwrap(), + ) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::FORBIDDEN); } diff --git a/destinations-page.png b/destinations-page.png deleted file mode 100644 index 479f426..0000000 Binary files a/destinations-page.png and /dev/null differ diff --git a/frontend/package-lock.json b/frontend/package-lock.json new file mode 100644 index 0000000..fc2ed00 --- /dev/null +++ b/frontend/package-lock.json @@ -0,0 +1,1405 @@ +{ + "name": "forage-frontend", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "forage-frontend", + "devDependencies": { + "@sveltejs/vite-plugin-svelte": "^5.0.0", + "svelte": "^5.0.0", + "vite": "^6.0.0" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.12.tgz", + "integrity": "sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.12.tgz", + "integrity": "sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.12.tgz", + "integrity": "sha512-6AAmLG7zwD1Z159jCKPvAxZd4y/VTO0VkprYy+3N2FtJ8+BQWFXU+OxARIwA46c5tdD9SsKGZ/1ocqBS/gAKHg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.12.tgz", + "integrity": "sha512-5jbb+2hhDHx5phYR2By8GTWEzn6I9UqR11Kwf22iKbNpYrsmRB18aX/9ivc5cabcUiAT/wM+YIZ6SG9QO6a8kg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.12.tgz", + "integrity": "sha512-N3zl+lxHCifgIlcMUP5016ESkeQjLj/959RxxNYIthIg+CQHInujFuXeWbWMgnTo4cp5XVHqFPmpyu9J65C1Yg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.12.tgz", + "integrity": "sha512-HQ9ka4Kx21qHXwtlTUVbKJOAnmG1ipXhdWTmNXiPzPfWKpXqASVcWdnf2bnL73wgjNrFXAa3yYvBSd9pzfEIpA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.12.tgz", + "integrity": "sha512-gA0Bx759+7Jve03K1S0vkOu5Lg/85dou3EseOGUes8flVOGxbhDDh/iZaoek11Y8mtyKPGF3vP8XhnkDEAmzeg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.12.tgz", + "integrity": "sha512-TGbO26Yw2xsHzxtbVFGEXBFH0FRAP7gtcPE7P5yP7wGy7cXK2oO7RyOhL5NLiqTlBh47XhmIUXuGciXEqYFfBQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.12.tgz", + "integrity": "sha512-lPDGyC1JPDou8kGcywY0YILzWlhhnRjdof3UlcoqYmS9El818LLfJJc3PXXgZHrHCAKs/Z2SeZtDJr5MrkxtOw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.12.tgz", + "integrity": "sha512-8bwX7a8FghIgrupcxb4aUmYDLp8pX06rGh5HqDT7bB+8Rdells6mHvrFHHW2JAOPZUbnjUpKTLg6ECyzvas2AQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.12.tgz", + "integrity": "sha512-0y9KrdVnbMM2/vG8KfU0byhUN+EFCny9+8g202gYqSSVMonbsCfLjUO+rCci7pM0WBEtz+oK/PIwHkzxkyharA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.12.tgz", + "integrity": "sha512-h///Lr5a9rib/v1GGqXVGzjL4TMvVTv+s1DPoxQdz7l/AYv6LDSxdIwzxkrPW438oUXiDtwM10o9PmwS/6Z0Ng==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.12.tgz", + "integrity": "sha512-iyRrM1Pzy9GFMDLsXn1iHUm18nhKnNMWscjmp4+hpafcZjrr2WbT//d20xaGljXDBYHqRcl8HnxbX6uaA/eGVw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.12.tgz", + "integrity": "sha512-9meM/lRXxMi5PSUqEXRCtVjEZBGwB7P/D4yT8UG/mwIdze2aV4Vo6U5gD3+RsoHXKkHCfSxZKzmDssVlRj1QQA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.12.tgz", + "integrity": "sha512-Zr7KR4hgKUpWAwb1f3o5ygT04MzqVrGEGXGLnj15YQDJErYu/BGg+wmFlIDOdJp0PmB0lLvxFIOXZgFRrdjR0w==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.12.tgz", + "integrity": "sha512-MsKncOcgTNvdtiISc/jZs/Zf8d0cl/t3gYWX8J9ubBnVOwlk65UIEEvgBORTiljloIWnBzLs4qhzPkJcitIzIg==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.12.tgz", + "integrity": "sha512-uqZMTLr/zR/ed4jIGnwSLkaHmPjOjJvnm6TVVitAa08SLS9Z0VM8wIRx7gWbJB5/J54YuIMInDquWyYvQLZkgw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.12.tgz", + "integrity": "sha512-xXwcTq4GhRM7J9A8Gv5boanHhRa/Q9KLVmcyXHCTaM4wKfIpWkdXiMog/KsnxzJ0A1+nD+zoecuzqPmCRyBGjg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.12.tgz", + "integrity": "sha512-Ld5pTlzPy3YwGec4OuHh1aCVCRvOXdH8DgRjfDy/oumVovmuSzWfnSJg+VtakB9Cm0gxNO9BzWkj6mtO1FMXkQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.12.tgz", + "integrity": "sha512-fF96T6KsBo/pkQI950FARU9apGNTSlZGsv1jZBAlcLL1MLjLNIWPBkj5NlSz8aAzYKg+eNqknrUJ24QBybeR5A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.12.tgz", + "integrity": "sha512-MZyXUkZHjQxUvzK7rN8DJ3SRmrVrke8ZyRusHlP+kuwqTcfWLyqMOE3sScPPyeIXN/mDJIfGXvcMqCgYKekoQw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.12.tgz", + "integrity": "sha512-rm0YWsqUSRrjncSXGA7Zv78Nbnw4XL6/dzr20cyrQf7ZmRcsovpcRBdhD43Nuk3y7XIoW2OxMVvwuRvk9XdASg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.12.tgz", + "integrity": "sha512-3wGSCDyuTHQUzt0nV7bocDy72r2lI33QL3gkDNGkod22EsYl04sMf0qLb8luNKTOmgF/eDEDP5BFNwoBKH441w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.12.tgz", + "integrity": "sha512-rMmLrur64A7+DKlnSuwqUdRKyd3UE7oPJZmnljqEptesKM8wx9J8gx5u0+9Pq0fQQW8vqeKebwNXdfOyP+8Bsg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.12.tgz", + "integrity": "sha512-HkqnmmBoCbCwxUKKNPBixiWDGCpQGVsrQfJoVGYLPT41XWF8lHuE5N6WhVia2n4o5QK5M4tYr21827fNhi4byQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.12.tgz", + "integrity": "sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.59.0.tgz", + "integrity": "sha512-upnNBkA6ZH2VKGcBj9Fyl9IGNPULcjXRlg0LLeaioQWueH30p6IXtJEbKAgvyv+mJaMxSm1l6xwDXYjpEMiLMg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.59.0.tgz", + "integrity": "sha512-hZ+Zxj3SySm4A/DylsDKZAeVg0mvi++0PYVceVyX7hemkw7OreKdCvW2oQ3T1FMZvCaQXqOTHb8qmBShoqk69Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.59.0.tgz", + "integrity": "sha512-W2Psnbh1J8ZJw0xKAd8zdNgF9HRLkdWwwdWqubSVk0pUuQkoHnv7rx4GiF9rT4t5DIZGAsConRE3AxCdJ4m8rg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.59.0.tgz", + "integrity": "sha512-ZW2KkwlS4lwTv7ZVsYDiARfFCnSGhzYPdiOU4IM2fDbL+QGlyAbjgSFuqNRbSthybLbIJ915UtZBtmuLrQAT/w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.59.0.tgz", + "integrity": "sha512-EsKaJ5ytAu9jI3lonzn3BgG8iRBjV4LxZexygcQbpiU0wU0ATxhNVEpXKfUa0pS05gTcSDMKpn3Sx+QB9RlTTA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.59.0.tgz", + "integrity": "sha512-d3DuZi2KzTMjImrxoHIAODUZYoUUMsuUiY4SRRcJy6NJoZ6iIqWnJu9IScV9jXysyGMVuW+KNzZvBLOcpdl3Vg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.59.0.tgz", + "integrity": "sha512-t4ONHboXi/3E0rT6OZl1pKbl2Vgxf9vJfWgmUoCEVQVxhW6Cw/c8I6hbbu7DAvgp82RKiH7TpLwxnJeKv2pbsw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.59.0.tgz", + "integrity": "sha512-CikFT7aYPA2ufMD086cVORBYGHffBo4K8MQ4uPS/ZnY54GKj36i196u8U+aDVT2LX4eSMbyHtyOh7D7Zvk2VvA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.59.0.tgz", + "integrity": "sha512-jYgUGk5aLd1nUb1CtQ8E+t5JhLc9x5WdBKew9ZgAXg7DBk0ZHErLHdXM24rfX+bKrFe+Xp5YuJo54I5HFjGDAA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.59.0.tgz", + "integrity": "sha512-peZRVEdnFWZ5Bh2KeumKG9ty7aCXzzEsHShOZEFiCQlDEepP1dpUl/SrUNXNg13UmZl+gzVDPsiCwnV1uI0RUA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.59.0.tgz", + "integrity": "sha512-gbUSW/97f7+r4gHy3Jlup8zDG190AuodsWnNiXErp9mT90iCy9NKKU0Xwx5k8VlRAIV2uU9CsMnEFg/xXaOfXg==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-musl": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.59.0.tgz", + "integrity": "sha512-yTRONe79E+o0FWFijasoTjtzG9EBedFXJMl888NBEDCDV9I2wGbFFfJQQe63OijbFCUZqxpHz1GzpbtSFikJ4Q==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.59.0.tgz", + "integrity": "sha512-sw1o3tfyk12k3OEpRddF68a1unZ5VCN7zoTNtSn2KndUE+ea3m3ROOKRCZxEpmT9nsGnogpFP9x6mnLTCaoLkA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-musl": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.59.0.tgz", + "integrity": "sha512-+2kLtQ4xT3AiIxkzFVFXfsmlZiG5FXYW7ZyIIvGA7Bdeuh9Z0aN4hVyXS/G1E9bTP/vqszNIN/pUKCk/BTHsKA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.59.0.tgz", + "integrity": "sha512-NDYMpsXYJJaj+I7UdwIuHHNxXZ/b/N2hR15NyH3m2qAtb/hHPA4g4SuuvrdxetTdndfj9b1WOmy73kcPRoERUg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.59.0.tgz", + "integrity": "sha512-nLckB8WOqHIf1bhymk+oHxvM9D3tyPndZH8i8+35p/1YiVoVswPid2yLzgX7ZJP0KQvnkhM4H6QZ5m0LzbyIAg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.59.0.tgz", + "integrity": "sha512-oF87Ie3uAIvORFBpwnCvUzdeYUqi2wY6jRFWJAy1qus/udHFYIkplYRW+wo+GRUP4sKzYdmE1Y3+rY5Gc4ZO+w==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.59.0.tgz", + "integrity": "sha512-3AHmtQq/ppNuUspKAlvA8HtLybkDflkMuLK4DPo77DfthRb71V84/c4MlWJXixZz4uruIH4uaa07IqoAkG64fg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.59.0.tgz", + "integrity": "sha512-2UdiwS/9cTAx7qIUZB/fWtToJwvt0Vbo0zmnYt7ED35KPg13Q0ym1g442THLC7VyI6JfYTP4PiSOWyoMdV2/xg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-openbsd-x64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.59.0.tgz", + "integrity": "sha512-M3bLRAVk6GOwFlPTIxVBSYKUaqfLrn8l0psKinkCFxl4lQvOSz8ZrKDz2gxcBwHFpci0B6rttydI4IpS4IS/jQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ] + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.59.0.tgz", + "integrity": "sha512-tt9KBJqaqp5i5HUZzoafHZX8b5Q2Fe7UjYERADll83O4fGqJ49O1FsL6LpdzVFQcpwvnyd0i+K/VSwu/o/nWlA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.59.0.tgz", + "integrity": "sha512-V5B6mG7OrGTwnxaNUzZTDTjDS7F75PO1ae6MJYdiMu60sq0CqN5CVeVsbhPxalupvTX8gXVSU9gq+Rx1/hvu6A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.59.0.tgz", + "integrity": "sha512-UKFMHPuM9R0iBegwzKF4y0C4J9u8C6MEJgFuXTBerMk7EJ92GFVFYBfOZaSGLu6COf7FxpQNqhNS4c4icUPqxA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.59.0.tgz", + "integrity": "sha512-laBkYlSS1n2L8fSo1thDNGrCTQMmxjYY5G0WFWjFFYZkKPjsMBsgJfGf4TLxXrF6RyhI60L8TMOjBMvXiTcxeA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.59.0.tgz", + "integrity": "sha512-2HRCml6OztYXyJXAvdDXPKcawukWY2GpR5/nxKp4iBgiO3wcoEGkAaqctIbZcNB6KlUQBIqt8VYkNSj2397EfA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@sveltejs/acorn-typescript": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/@sveltejs/acorn-typescript/-/acorn-typescript-1.0.9.tgz", + "integrity": "sha512-lVJX6qEgs/4DOcRTpo56tmKzVPtoWAaVbL4hfO7t7NVwl9AAXzQR6cihesW1BmNMPl+bK6dreu2sOKBP2Q9CIA==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "acorn": "^8.9.0" + } + }, + "node_modules/@sveltejs/vite-plugin-svelte": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte/-/vite-plugin-svelte-5.1.1.tgz", + "integrity": "sha512-Y1Cs7hhTc+a5E9Va/xwKlAJoariQyHY+5zBgCZg4PFWNYQ1nMN9sjK1zhw1gK69DuqVP++sht/1GZg1aRwmAXQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sveltejs/vite-plugin-svelte-inspector": "^4.0.1", + "debug": "^4.4.1", + "deepmerge": "^4.3.1", + "kleur": "^4.1.5", + "magic-string": "^0.30.17", + "vitefu": "^1.0.6" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22" + }, + "peerDependencies": { + "svelte": "^5.0.0", + "vite": "^6.0.0" + } + }, + "node_modules/@sveltejs/vite-plugin-svelte-inspector": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte-inspector/-/vite-plugin-svelte-inspector-4.0.1.tgz", + "integrity": "sha512-J/Nmb2Q2y7mck2hyCX4ckVHcR5tu2J+MtBEQqpDrrgELZ2uvraQcK/ioCV61AqkdXFgriksOKIceDcQmqnGhVw==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^4.3.7" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22" + }, + "peerDependencies": { + "@sveltejs/vite-plugin-svelte": "^5.0.0", + "svelte": "^5.0.0", + "vite": "^6.0.0" + } + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/trusted-types": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.7.tgz", + "integrity": "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==", + "dev": true, + "license": "MIT" + }, + "node_modules/acorn": { + "version": "8.16.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.16.0.tgz", + "integrity": "sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/aria-query": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.1.tgz", + "integrity": "sha512-Z/ZeOgVl7bcSYZ/u/rh0fOpvEpq//LZmdbkXyc7syVzjPAhfOa9ebsdTSjEBDU4vs5nC98Kfduj1uFo0qyET3g==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/axobject-query": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-4.1.0.tgz", + "integrity": "sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/clsx": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", + "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/devalue": { + "version": "5.6.3", + "resolved": "https://registry.npmjs.org/devalue/-/devalue-5.6.3.tgz", + "integrity": "sha512-nc7XjUU/2Lb+SvEFVGcWLiKkzfw8+qHI7zn8WYXKkLMgfGSHbgCEaR6bJpev8Cm6Rmrb19Gfd/tZvGqx9is3wg==", + "dev": true, + "license": "MIT" + }, + "node_modules/esbuild": { + "version": "0.25.12", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.12.tgz", + "integrity": "sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.25.12", + "@esbuild/android-arm": "0.25.12", + "@esbuild/android-arm64": "0.25.12", + "@esbuild/android-x64": "0.25.12", + "@esbuild/darwin-arm64": "0.25.12", + "@esbuild/darwin-x64": "0.25.12", + "@esbuild/freebsd-arm64": "0.25.12", + "@esbuild/freebsd-x64": "0.25.12", + "@esbuild/linux-arm": "0.25.12", + "@esbuild/linux-arm64": "0.25.12", + "@esbuild/linux-ia32": "0.25.12", + "@esbuild/linux-loong64": "0.25.12", + "@esbuild/linux-mips64el": "0.25.12", + "@esbuild/linux-ppc64": "0.25.12", + "@esbuild/linux-riscv64": "0.25.12", + "@esbuild/linux-s390x": "0.25.12", + "@esbuild/linux-x64": "0.25.12", + "@esbuild/netbsd-arm64": "0.25.12", + "@esbuild/netbsd-x64": "0.25.12", + "@esbuild/openbsd-arm64": "0.25.12", + "@esbuild/openbsd-x64": "0.25.12", + "@esbuild/openharmony-arm64": "0.25.12", + "@esbuild/sunos-x64": "0.25.12", + "@esbuild/win32-arm64": "0.25.12", + "@esbuild/win32-ia32": "0.25.12", + "@esbuild/win32-x64": "0.25.12" + } + }, + "node_modules/esm-env": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/esm-env/-/esm-env-1.2.2.tgz", + "integrity": "sha512-Epxrv+Nr/CaL4ZcFGPJIYLWFom+YeV1DqMLHJoEd9SYRxNbaFruBwfEX/kkHUJf55j2+TUbmDcmuilbP1TmXHA==", + "dev": true, + "license": "MIT" + }, + "node_modules/esrap": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/esrap/-/esrap-2.2.3.tgz", + "integrity": "sha512-8fOS+GIGCQZl/ZIlhl59htOlms6U8NvX6ZYgYHpRU/b6tVSh3uHkOHZikl3D4cMbYM0JlpBe+p/BkZEi8J9XIQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.4.15" + } + }, + "node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/is-reference": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-reference/-/is-reference-3.0.3.tgz", + "integrity": "sha512-ixkJoqQvAP88E6wLydLGGqCJsrFUnqoH6HnaczB8XmDH1oaWU+xxdptvikTgaEhtZ53Ky6YXiBuUI2WXLMCwjw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.6" + } + }, + "node_modules/kleur": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz", + "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/locate-character": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-character/-/locate-character-3.0.0.tgz", + "integrity": "sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA==", + "dev": true, + "license": "MIT" + }, + "node_modules/magic-string": { + "version": "0.30.21", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz", + "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.5" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/postcss": { + "version": "8.5.8", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.8.tgz", + "integrity": "sha512-OW/rX8O/jXnm82Ey1k44pObPtdblfiuWnrd8X7GJ7emImCOstunGbXUpp7HdBrFQX6rJzn3sPT397Wp5aCwCHg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/rollup": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.59.0.tgz", + "integrity": "sha512-2oMpl67a3zCH9H79LeMcbDhXW/UmWG/y2zuqnF2jQq5uq9TbM9TVyXvA4+t+ne2IIkBdrLpAaRQAvo7YI/Yyeg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.59.0", + "@rollup/rollup-android-arm64": "4.59.0", + "@rollup/rollup-darwin-arm64": "4.59.0", + "@rollup/rollup-darwin-x64": "4.59.0", + "@rollup/rollup-freebsd-arm64": "4.59.0", + "@rollup/rollup-freebsd-x64": "4.59.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.59.0", + "@rollup/rollup-linux-arm-musleabihf": "4.59.0", + "@rollup/rollup-linux-arm64-gnu": "4.59.0", + "@rollup/rollup-linux-arm64-musl": "4.59.0", + "@rollup/rollup-linux-loong64-gnu": "4.59.0", + "@rollup/rollup-linux-loong64-musl": "4.59.0", + "@rollup/rollup-linux-ppc64-gnu": "4.59.0", + "@rollup/rollup-linux-ppc64-musl": "4.59.0", + "@rollup/rollup-linux-riscv64-gnu": "4.59.0", + "@rollup/rollup-linux-riscv64-musl": "4.59.0", + "@rollup/rollup-linux-s390x-gnu": "4.59.0", + "@rollup/rollup-linux-x64-gnu": "4.59.0", + "@rollup/rollup-linux-x64-musl": "4.59.0", + "@rollup/rollup-openbsd-x64": "4.59.0", + "@rollup/rollup-openharmony-arm64": "4.59.0", + "@rollup/rollup-win32-arm64-msvc": "4.59.0", + "@rollup/rollup-win32-ia32-msvc": "4.59.0", + "@rollup/rollup-win32-x64-gnu": "4.59.0", + "@rollup/rollup-win32-x64-msvc": "4.59.0", + "fsevents": "~2.3.2" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/svelte": { + "version": "5.53.7", + "resolved": "https://registry.npmjs.org/svelte/-/svelte-5.53.7.tgz", + "integrity": "sha512-uxck1KI7JWtlfP3H6HOWi/94soAl23jsGJkBzN2BAWcQng0+lTrRNhxActFqORgnO9BHVd1hKJhG+ljRuIUWfQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/remapping": "^2.3.4", + "@jridgewell/sourcemap-codec": "^1.5.0", + "@sveltejs/acorn-typescript": "^1.0.5", + "@types/estree": "^1.0.5", + "@types/trusted-types": "^2.0.7", + "acorn": "^8.12.1", + "aria-query": "5.3.1", + "axobject-query": "^4.1.0", + "clsx": "^2.1.1", + "devalue": "^5.6.3", + "esm-env": "^1.2.1", + "esrap": "^2.2.2", + "is-reference": "^3.0.3", + "locate-character": "^3.0.0", + "magic-string": "^0.30.11", + "zimmerframe": "^1.1.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/vite": { + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/vite/-/vite-6.4.1.tgz", + "integrity": "sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "^0.25.0", + "fdir": "^6.4.4", + "picomatch": "^4.0.2", + "postcss": "^8.5.3", + "rollup": "^4.34.9", + "tinyglobby": "^0.2.13" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", + "jiti": ">=1.21.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/vitefu": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vitefu/-/vitefu-1.1.2.tgz", + "integrity": "sha512-zpKATdUbzbsycPFBN71nS2uzBUQiVnFoOrr2rvqv34S1lcAgMKKkjWleLGeiJlZ8lwCXvtWaRn7R3ZC16SYRuw==", + "dev": true, + "license": "MIT", + "workspaces": [ + "tests/deps/*", + "tests/projects/*", + "tests/projects/workspace/packages/*" + ], + "peerDependencies": { + "vite": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-beta.0" + }, + "peerDependenciesMeta": { + "vite": { + "optional": true + } + } + }, + "node_modules/zimmerframe": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/zimmerframe/-/zimmerframe-1.1.4.tgz", + "integrity": "sha512-B58NGBEoc8Y9MWWCQGl/gq9xBCe4IiKM0a2x7GZdQKOW5Exr8S1W24J6OgM1njK8xCRGvAJIL/MxXHf6SkmQKQ==", + "dev": true, + "license": "MIT" + } + } +} diff --git a/frontend/package.json b/frontend/package.json new file mode 100644 index 0000000..097d23f --- /dev/null +++ b/frontend/package.json @@ -0,0 +1,14 @@ +{ + "name": "forage-frontend", + "private": true, + "type": "module", + "scripts": { + "build": "vite build", + "dev": "vite build --watch" + }, + "devDependencies": { + "@sveltejs/vite-plugin-svelte": "^5.0.0", + "svelte": "^5.0.0", + "vite": "^6.0.0" + } +} diff --git a/frontend/src/ReleaseLogs.svelte b/frontend/src/ReleaseLogs.svelte new file mode 100644 index 0000000..220c52e --- /dev/null +++ b/frontend/src/ReleaseLogs.svelte @@ -0,0 +1,469 @@ + + + + +
+ {#if destNames.length === 0 && !done} +
+ {#if connected} + Waiting for logs… + {:else} + No logs available + {/if} +
+ {:else if destNames.length === 0 && done} +
No logs recorded for this release.
+ {:else} + +
+
+ {#each destNames as dest} + + {/each} +
+
+ {#if connected && !done} + + Live + + {/if} + + +
+
+ + +
+ {#each activeLines as entry, i} +
+ {#if showTimestamps} + {formatElapsed(entry.timestamp, activeBaseTime)} + {/if} + {entry.line} +
+ {/each} +
+ + {#if !autoScroll} + + {/if} + {/if} +
+ + diff --git a/frontend/src/ReleaseTimeline.svelte b/frontend/src/ReleaseTimeline.svelte new file mode 100644 index 0000000..746c4b1 --- /dev/null +++ b/frontend/src/ReleaseTimeline.svelte @@ -0,0 +1,670 @@ + + + + + + +{#if initialLoading} +
+ +

Loading releases...

+
+{:else if error} +
+

{error}

+ +
+{:else if timeline.length === 0} +
+

No releases yet.

+

Create a release with forest release create

+
+{:else} +
+ +
+ {#each lanes as lane (lane.name)} + {@const bar = laneBarData[lane.name]} + {@const [barColor, lightColor] = bar?.color || [lane.color, "#e5e7eb"]} +
+ {#if bar} + {#if bar.hasHatch} +
+ {/if} + {#if bar.solidH > 0} +
+ {/if} + {#each bar.dots as dotY, di (di)} +
+ {/each} + {/if} +
+ {/each} +
+ + +
+ {#each timeline as item (itemKey(item))} + {#if item.kind === "release" && item.release} + {@const release = item.release} +
+
+ +
+ {#if release.branch} + + + {release.branch} + + {/if} + {#if release.commit_sha} + {release.commit_sha.slice(0, 7)} + {/if} + + {#if release.source_user} + + + {release.source_user} + + {/if} + {#if release.project_name && release.project_name !== project} + {release.project_name} + {/if} +
+
+ + +
+ + {#if release.has_pipeline && !pipelineSummary(release.pipeline_stages)} + + {@const envAllDone = release.env_groups && release.env_groups.length > 0 && release.env_groups.every(g => g.status === "SUCCEEDED")} + + {#if envAllDone} + + Deployed + {:else} + + Queued + {/if} + {:else if release.has_pipeline && pipelineSummary(release.pipeline_stages)} + {@const summary = pipelineSummary(release.pipeline_stages)} + + {#if summary.icon === "pulse"} + + {:else if summary.icon === "check-circle"} + + {:else if summary.icon === "x-circle"} + + {:else if summary.icon === "clock"} + + {:else} + + {/if} + {summary.label} + + {#each release.pipeline_stages as stage (stage.id || stage.environment || stage.stage_type)} + {#if stage.stage_type === "deploy" && summaryShowsStage(summary, stage.status)} + {@const badge = envBadgeClasses(stage.environment || "")} + {@const dot = statusDotColor(stage.status) || badge.dot} + + {stage.environment} + + + {/if} + {/each} + + {summary.done}/{summary.total} + + {:else if release.env_groups && release.env_groups.length > 0} + {@const allSucceeded = release.env_groups.every(g => g.status === "SUCCEEDED")} + {#if allSucceeded} + + Deployed + {:else} + {#each release.env_groups as group, gi (gi)} + {#if group.status !== "SUCCEEDED"} + {@const cfg = STATUS_CONFIG[group.status] || STATUS_CONFIG.SUCCEEDED} + {#if cfg.icon === "pulse"} + + {:else if cfg.icon === "check-circle"} + + {:else} + + {/if} + {cfg.label} + {#each group.envs as env (env)} + {@const badge = envBadgeClasses(env)} + + {env} + + + {/each} + {/if} + {/each} + {/if} + {:else} + + Pending + {/if} + + + + + +
+ {#if release.description} +

{release.description}

+ {/if} +
+ {release.slug} + {#if release.version} + {release.version} + {/if} +
+
+ + + {#if release.has_pipeline} +
+ {#each release.pipeline_stages as stage, i (stage.id || `${stage.stage_type}-${stage.environment}-${i}`)} +
+ {#if stage.status === "SUCCEEDED"} + + {:else if stage.status === "RUNNING"} + + {:else if stage.status === "QUEUED"} + + {:else if stage.status === "FAILED"} + + {:else} + + {/if} + + {#if stage.stage_type === "deploy"} + + {deployStageLabel(stage.status)} + + {@const badge = envBadgeClasses(stage.environment || "")} + + {stage.environment} + + + {:else if stage.stage_type === "wait"} + + {waitStageLabel(stage.status)} {stage.duration_seconds}s + + {/if} + + {#if stage.started_at && (stage.status === "RUNNING" || stage.status === "QUEUED" || stage.completed_at)} + {elapsedStr(stage.started_at, stage.completed_at, stage.status)} + {/if} + + + + pipeline + +
+ {/each} +
+ {/if} + + + {#each release.destinations as dest, i (dest.name)} + {@const destBadge = envBadgeClasses(dest.environment || "")} +
+ {#if dest.status === "SUCCEEDED"} + + {:else if dest.status === "RUNNING" || dest.status === "ASSIGNED"} + + {:else if dest.status === "QUEUED"} + + {:else if dest.status === "FAILED"} + + {:else} + + {/if} + + {dest.environment} + + + {dest.name} + {#if dest.status === "SUCCEEDED"} + Deployed + {:else if dest.status === "RUNNING"} + Deploying + {:else if dest.status === "QUEUED"} + Queued{dest.queue_position ? ` #${dest.queue_position}` : ""} + {:else if dest.status === "FAILED"} + Failed + {/if} + {#if dest.completed_at} + + {/if} +
+ {/each} +
+
+ + {:else if item.kind === "hidden"} +
+ + + {item.count} hidden commit{item.count !== 1 ? "s" : ""} + · + Show commit{item.count !== 1 ? "s" : ""} + + +
+ {#each item.releases || [] as release (release.slug)} +
+
+ +
+ {#if release.commit_sha} + {release.commit_sha.slice(0, 7)} + {/if} + +
+
+
+ {/each} +
+
+ {/if} + {/each} +
+ + +
+ {#each lanes as lane (lane.name)} +
+ {lane.name} +
+ {/each} +
+
+{/if} + + diff --git a/frontend/src/lib/api.js b/frontend/src/lib/api.js new file mode 100644 index 0000000..8569495 --- /dev/null +++ b/frontend/src/lib/api.js @@ -0,0 +1,96 @@ +/** + * Fetch timeline data from the JSON API. + * @param {string} org + * @param {string} project + * @returns {Promise<{timeline: Array, lanes: Array}>} + */ +export async function fetchTimeline(org, project) { + const url = project + ? `/api/orgs/${org}/projects/${project}/timeline` + : `/api/orgs/${org}/timeline`; + const res = await fetch(url, { + credentials: "same-origin", + }); + if (!res.ok) throw new Error(`Timeline fetch failed: ${res.status}`); + return res.json(); +} + +/** + * Connect to SSE endpoint for live updates. + * Returns a disconnect function. + * @param {string} org + * @param {string} project + * @param {(type: string, data: object) => void} onEvent + * @returns {() => void} disconnect + */ +export function connectSSE(org, project, onEvent) { + const url = project + ? `/orgs/${org}/projects/${project}/events` + : `/orgs/${org}/events`; + let retryDelay = 1000; + let es = null; + let stopped = false; + + function connect() { + if (stopped) return; + es = new EventSource(url); + + es.addEventListener("open", () => { + retryDelay = 1000; + }); + + for (const type of ["destination", "release", "artifact", "pipeline"]) { + es.addEventListener(type, (e) => { + try { + const data = JSON.parse(e.data); + onEvent(type, data); + } catch (err) { + console.warn(`[release-timeline] bad ${type} event:`, err); + } + }); + } + + es.addEventListener("error", () => { + es.close(); + if (!stopped) { + setTimeout(connect, retryDelay); + retryDelay = Math.min(retryDelay * 2, 30000); + } + }); + } + + connect(); + + return () => { + stopped = true; + if (es) es.close(); + }; +} + +/** + * Format elapsed time from seconds. + */ +export function formatElapsed(seconds) { + if (seconds < 0) seconds = 0; + if (seconds < 60) return `${seconds}s`; + const m = Math.floor(seconds / 60); + const s = seconds % 60; + if (m < 60) return `${m}m ${s}s`; + const h = Math.floor(m / 60); + return `${h}h ${m % 60}m`; +} + +/** + * Format a relative timestamp. + */ +export function timeAgo(dateStr) { + if (!dateStr) return ""; + const date = new Date(dateStr); + const now = Date.now(); + const diff = Math.floor((now - date.getTime()) / 1000); + if (diff < 10) return "just now"; + if (diff < 60) return `${diff}s ago`; + if (diff < 3600) return `${Math.floor(diff / 60)}m ago`; + if (diff < 86400) return `${Math.floor(diff / 3600)}h ago`; + return `${Math.floor(diff / 86400)}d ago`; +} diff --git a/frontend/src/lib/colors.js b/frontend/src/lib/colors.js new file mode 100644 index 0000000..48ba11a --- /dev/null +++ b/frontend/src/lib/colors.js @@ -0,0 +1,53 @@ +/** Environment-to-color mapping (matches swim-lanes.js + platform.rs) */ +const ENV_COLORS = { + prod: ["#ec4899", "#fce7f3"], + production: ["#ec4899", "#fce7f3"], + preprod: ["#f97316", "#ffedd5"], + "pre-prod": ["#f97316", "#ffedd5"], + staging: ["#eab308", "#fef9c3"], + stage: ["#eab308", "#fef9c3"], + dev: ["#8b5cf6", "#ede9fe"], + development: ["#8b5cf6", "#ede9fe"], + test: ["#06b6d4", "#cffafe"], +}; + +const DEFAULT_COLORS = ["#6b7280", "#e5e7eb"]; + +export function envColors(name) { + const lower = name.toLowerCase(); + if (ENV_COLORS[lower]) return ENV_COLORS[lower]; + for (const [key, colors] of Object.entries(ENV_COLORS)) { + if (lower.includes(key)) return colors; + } + return DEFAULT_COLORS; +} + +export function envLaneColor(name) { + return envColors(name)[0]; +} + +export function envBadgeClasses(env) { + const lower = env.toLowerCase(); + if (lower.includes("prod") && !lower.includes("preprod") && !lower.includes("pre-prod")) { + return { bg: "bg-pink-100 text-pink-800", dot: "bg-pink-500" }; + } + if (lower.includes("preprod") || lower.includes("pre-prod")) { + return { bg: "bg-orange-100 text-orange-800", dot: "bg-orange-500" }; + } + if (lower.includes("stag")) { + return { bg: "bg-yellow-100 text-yellow-800", dot: "bg-yellow-500" }; + } + if (lower.includes("dev")) { + return { bg: "bg-violet-100 text-violet-800", dot: "bg-violet-500" }; + } + return { bg: "bg-gray-100 text-gray-700", dot: "bg-gray-400" }; +} + +export function statusDotColor(status) { + switch (status) { + case "SUCCEEDED": return "bg-green-500"; + case "RUNNING": return "bg-yellow-500"; + case "FAILED": return "bg-red-500"; + default: return null; + } +} diff --git a/frontend/src/lib/status.js b/frontend/src/lib/status.js new file mode 100644 index 0000000..b32298f --- /dev/null +++ b/frontend/src/lib/status.js @@ -0,0 +1,63 @@ +/** Status display configuration — matches live-events.js */ +export const STATUS_CONFIG = { + SUCCEEDED: { label: "Deployed to", stageLabel: "Deployed to", color: "text-green-600", icon: "check-circle", iconColor: "text-green-500" }, + RUNNING: { label: "Deploying to", stageLabel: "Deploying to", color: "text-yellow-700", icon: "pulse", iconColor: "text-yellow-500" }, + ASSIGNED: { label: "Deploying to", stageLabel: "Deploying to", color: "text-yellow-700", icon: "pulse", iconColor: "text-yellow-500" }, + QUEUED: { label: "Queued for", stageLabel: "Queued for", color: "text-blue-600", icon: "clock", iconColor: "text-blue-400" }, + FAILED: { label: "Failed on", stageLabel: "Failed on", color: "text-red-600", icon: "x-circle", iconColor: "text-red-500" }, + TIMED_OUT: { label: "Timed out on", stageLabel: "Timed out on", color: "text-orange-600", icon: "clock", iconColor: "text-orange-500" }, + CANCELLED: { label: "Cancelled", stageLabel: "Cancelled", color: "text-gray-500", icon: "ban", iconColor: "text-gray-400" }, +}; + +export function pipelineSummary(stages) { + if (!stages || stages.length === 0) return null; + let allDone = true, anyFailed = false, anyRunning = false, anyWaiting = false, anyQueued = false; + let done = 0; + const total = stages.length; + + for (const s of stages) { + if (s.status === "SUCCEEDED") done++; + if (s.status !== "SUCCEEDED") allDone = false; + if (s.status === "FAILED") anyFailed = true; + if (s.status === "RUNNING") anyRunning = true; + if (s.status === "QUEUED") anyQueued = true; + if (s.stage_type === "wait" && s.status === "RUNNING") anyWaiting = true; + } + + if (allDone) return { label: "Pipeline complete", color: "text-gray-600", icon: "check-circle", iconColor: "text-green-500", done, total }; + if (anyFailed) return { label: "Pipeline failed", color: "text-red-600", icon: "x-circle", iconColor: "text-red-500", done, total }; + if (anyWaiting) return { label: "Waiting for time window", color: "text-yellow-700", icon: "clock", iconColor: "text-yellow-500", done, total }; + if (anyRunning) return { label: "Deploying to", color: "text-yellow-700", icon: "pulse", iconColor: "text-yellow-500", done, total }; + if (anyQueued) return { label: "Queued", color: "text-blue-600", icon: "clock", iconColor: "text-blue-400", done, total }; + return { label: "Pipeline pending", color: "text-gray-400", icon: "pending", iconColor: "text-gray-300", done, total }; +} + +export function envGroupSummary(envGroups) { + if (!envGroups || envGroups.length === 0) return null; + return envGroups.map(g => ({ + ...g, + config: STATUS_CONFIG[g.status] || STATUS_CONFIG.SUCCEEDED, + })); +} + +export function waitStageLabel(status) { + switch (status) { + case "SUCCEEDED": return "Waited"; + case "RUNNING": return "Waiting"; + case "FAILED": return "Wait failed"; + case "CANCELLED": return "Wait cancelled"; + default: return "Wait"; + } +} + +export function deployStageLabel(status) { + switch (status) { + case "SUCCEEDED": return "Deployed to"; + case "RUNNING": return "Deploying to"; + case "QUEUED": return "Queued for"; + case "FAILED": return "Failed on"; + case "TIMED_OUT": return "Timed out on"; + case "CANCELLED": return "Cancelled"; + default: return "Deploy to"; + } +} diff --git a/frontend/src/main.js b/frontend/src/main.js new file mode 100644 index 0000000..0121a8f --- /dev/null +++ b/frontend/src/main.js @@ -0,0 +1,3 @@ +// Register all Svelte web components +import "./ReleaseTimeline.svelte"; +import "./ReleaseLogs.svelte"; diff --git a/frontend/vite.config.js b/frontend/vite.config.js new file mode 100644 index 0000000..dd90946 --- /dev/null +++ b/frontend/vite.config.js @@ -0,0 +1,27 @@ +import { defineConfig } from "vite"; +import { svelte } from "@sveltejs/vite-plugin-svelte"; + +export default defineConfig({ + plugins: [ + svelte({ + compilerOptions: { + customElement: true, + }, + }), + ], + build: { + lib: { + entry: "src/main.js", + formats: ["iife"], + name: "ForageComponents", + fileName: () => "forage-components.js", + }, + outDir: "../static/js/components", + emptyOutDir: true, + rollupOptions: { + output: { + inlineDynamicImports: true, + }, + }, + }, +}); diff --git a/interface/proto/forest/v1/auto_release_policies.proto b/interface/proto/forest/v1/auto_release_policies.proto new file mode 100644 index 0000000..8a1bce9 --- /dev/null +++ b/interface/proto/forest/v1/auto_release_policies.proto @@ -0,0 +1,79 @@ +syntax = "proto3"; + +package forest.v1; + +import "forest/v1/releases.proto"; + +message AutoReleasePolicy { + string id = 1; + string name = 2; + bool enabled = 3; + optional string branch_pattern = 4; + optional string title_pattern = 5; + optional string author_pattern = 6; + optional string commit_message_pattern = 7; + optional string source_type_pattern = 8; + repeated string target_environments = 9; + repeated string target_destinations = 10; + bool force_release = 11; + string created_at = 12; + string updated_at = 13; + // When true, trigger the project's release pipeline instead of + // deploying directly to target destinations/environments. + bool use_pipeline = 14; +} + +message CreateAutoReleasePolicyRequest { + Project project = 1; + string name = 2; + optional string branch_pattern = 3; + optional string title_pattern = 4; + optional string author_pattern = 5; + optional string commit_message_pattern = 6; + optional string source_type_pattern = 7; + repeated string target_environments = 8; + repeated string target_destinations = 9; + bool force_release = 10; + bool use_pipeline = 11; +} +message CreateAutoReleasePolicyResponse { + AutoReleasePolicy policy = 1; +} + +message UpdateAutoReleasePolicyRequest { + Project project = 1; + string name = 2; + optional bool enabled = 3; + optional string branch_pattern = 4; + optional string title_pattern = 5; + optional string author_pattern = 6; + optional string commit_message_pattern = 7; + optional string source_type_pattern = 8; + repeated string target_environments = 9; + repeated string target_destinations = 10; + optional bool force_release = 11; + optional bool use_pipeline = 12; +} +message UpdateAutoReleasePolicyResponse { + AutoReleasePolicy policy = 1; +} + +message DeleteAutoReleasePolicyRequest { + Project project = 1; + string name = 2; +} +message DeleteAutoReleasePolicyResponse {} + +message ListAutoReleasePoliciesRequest { + Project project = 1; +} +message ListAutoReleasePoliciesResponse { + repeated AutoReleasePolicy policies = 1; +} + +service AutoReleasePolicyService { + rpc CreateAutoReleasePolicy(CreateAutoReleasePolicyRequest) returns (CreateAutoReleasePolicyResponse); + rpc UpdateAutoReleasePolicy(UpdateAutoReleasePolicyRequest) returns (UpdateAutoReleasePolicyResponse); + rpc DeleteAutoReleasePolicy(DeleteAutoReleasePolicyRequest) returns (DeleteAutoReleasePolicyResponse); + rpc ListAutoReleasePolicies(ListAutoReleasePoliciesRequest) returns (ListAutoReleasePoliciesResponse); +} diff --git a/interface/proto/forest/v1/destinations.proto b/interface/proto/forest/v1/destinations.proto new file mode 100644 index 0000000..e32c6e9 --- /dev/null +++ b/interface/proto/forest/v1/destinations.proto @@ -0,0 +1,57 @@ +syntax = "proto3"; + +package forest.v1; + +message CreateDestinationRequest { + string name = 1; + string environment = 2; + map metadata = 3; + DestinationType type = 4; + string organisation = 5; +} +message CreateDestinationResponse {} + +message UpdateDestinationRequest { + string name = 1; + map metadata = 2; +} +message UpdateDestinationResponse {} + +message DeleteDestinationRequest { + string name = 1; +} +message DeleteDestinationResponse {} + +message GetDestinationsRequest { + string organisation = 1; +} +message GetDestinationsResponse { + repeated Destination destinations = 1; +} + +message ListDestinationTypesRequest {} +message ListDestinationTypesResponse { + repeated DestinationType types = 1; +} + +service DestinationService { + rpc CreateDestination(CreateDestinationRequest) returns (CreateDestinationResponse) {} + rpc UpdateDestination(UpdateDestinationRequest) returns (UpdateDestinationResponse) {} + rpc DeleteDestination(DeleteDestinationRequest) returns (DeleteDestinationResponse) {} + rpc GetDestinations(GetDestinationsRequest) returns (GetDestinationsResponse); + rpc ListDestinationTypes(ListDestinationTypesRequest) returns (ListDestinationTypesResponse); +} + +message Destination { + string name = 1; + string environment = 2; + map metadata = 3; + DestinationType type = 4; + string organisation = 5; +} + +message DestinationType { + string organisation = 1; + string name = 2; + uint64 version = 3; +} diff --git a/interface/proto/forest/v1/environments.proto b/interface/proto/forest/v1/environments.proto new file mode 100644 index 0000000..d5b3f13 --- /dev/null +++ b/interface/proto/forest/v1/environments.proto @@ -0,0 +1,67 @@ +syntax = "proto3"; + +package forest.v1; + +message Environment { + string id = 1; + string organisation = 2; + string name = 3; + optional string description = 4; + int32 sort_order = 5; + string created_at = 6; +} + +message CreateEnvironmentRequest { + string organisation = 1; + string name = 2; + optional string description = 3; + int32 sort_order = 4; +} +message CreateEnvironmentResponse { + Environment environment = 1; +} + +message GetEnvironmentRequest { + oneof identifier { + string id = 1; + EnvironmentLookup lookup = 2; + } +} + +message EnvironmentLookup { + string organisation = 1; + string name = 2; +} + +message GetEnvironmentResponse { + Environment environment = 1; +} + +message ListEnvironmentsRequest { + string organisation = 1; +} +message ListEnvironmentsResponse { + repeated Environment environments = 1; +} + +message UpdateEnvironmentRequest { + string id = 1; + optional string description = 2; + optional int32 sort_order = 3; +} +message UpdateEnvironmentResponse { + Environment environment = 1; +} + +message DeleteEnvironmentRequest { + string id = 1; +} +message DeleteEnvironmentResponse {} + +service EnvironmentService { + rpc CreateEnvironment(CreateEnvironmentRequest) returns (CreateEnvironmentResponse); + rpc GetEnvironment(GetEnvironmentRequest) returns (GetEnvironmentResponse); + rpc ListEnvironments(ListEnvironmentsRequest) returns (ListEnvironmentsResponse); + rpc UpdateEnvironment(UpdateEnvironmentRequest) returns (UpdateEnvironmentResponse); + rpc DeleteEnvironment(DeleteEnvironmentRequest) returns (DeleteEnvironmentResponse); +} diff --git a/interface/proto/forest/v1/release_pipelines.proto b/interface/proto/forest/v1/release_pipelines.proto new file mode 100644 index 0000000..edacc03 --- /dev/null +++ b/interface/proto/forest/v1/release_pipelines.proto @@ -0,0 +1,100 @@ +syntax = "proto3"; + +package forest.v1; + +import "forest/v1/releases.proto"; + +// ── Stage type enum (useful for UI dropdowns / filtering) ──────────── + +enum StageType { + STAGE_TYPE_UNSPECIFIED = 0; + STAGE_TYPE_DEPLOY = 1; + STAGE_TYPE_WAIT = 2; +} + +// ── Per-type config messages ───────────────────────────────────────── + +message DeployStageConfig { + string environment = 1; +} + +message WaitStageConfig { + int64 duration_seconds = 1; +} + +// ── A single pipeline stage ────────────────────────────────────────── + +message PipelineStage { + string id = 1; + repeated string depends_on = 2; + + oneof config { + DeployStageConfig deploy = 10; + WaitStageConfig wait = 11; + } +} + +// ── Runtime stage status (for observing pipeline progress) ─────────── + +enum PipelineStageStatus { + PIPELINE_STAGE_STATUS_UNSPECIFIED = 0; + PIPELINE_STAGE_STATUS_PENDING = 1; + PIPELINE_STAGE_STATUS_ACTIVE = 2; + PIPELINE_STAGE_STATUS_SUCCEEDED = 3; + PIPELINE_STAGE_STATUS_FAILED = 4; + PIPELINE_STAGE_STATUS_CANCELLED = 5; +} + +// ── Pipeline resource ──────────────────────────────────────────────── + +message ReleasePipeline { + string id = 1; + string name = 2; + bool enabled = 3; + repeated PipelineStage stages = 4; + string created_at = 5; + string updated_at = 6; +} + +// ── CRUD messages ──────────────────────────────────────────────────── + +message CreateReleasePipelineRequest { + Project project = 1; + string name = 2; + repeated PipelineStage stages = 3; +} +message CreateReleasePipelineResponse { + ReleasePipeline pipeline = 1; +} + +message UpdateReleasePipelineRequest { + Project project = 1; + string name = 2; + optional bool enabled = 3; + // When set, replaces all stages. When absent, stages are unchanged. + repeated PipelineStage stages = 4; + bool update_stages = 5; +} +message UpdateReleasePipelineResponse { + ReleasePipeline pipeline = 1; +} + +message DeleteReleasePipelineRequest { + Project project = 1; + string name = 2; +} +message DeleteReleasePipelineResponse {} + +message ListReleasePipelinesRequest { + Project project = 1; +} +message ListReleasePipelinesResponse { + repeated ReleasePipeline pipelines = 1; +} + +service ReleasePipelineService { + rpc CreateReleasePipeline(CreateReleasePipelineRequest) returns (CreateReleasePipelineResponse); + rpc UpdateReleasePipeline(UpdateReleasePipelineRequest) returns (UpdateReleasePipelineResponse); + rpc DeleteReleasePipeline(DeleteReleasePipelineRequest) returns (DeleteReleasePipelineResponse); + rpc ListReleasePipelines(ListReleasePipelinesRequest) returns (ListReleasePipelinesResponse); +} diff --git a/interface/proto/forest/v1/releases.proto b/interface/proto/forest/v1/releases.proto index 6595f82..31c80c6 100644 --- a/interface/proto/forest/v1/releases.proto +++ b/interface/proto/forest/v1/releases.proto @@ -31,6 +31,10 @@ message ReleaseRequest { string artifact_id = 1; repeated string destinations = 2; repeated string environments = 3; + bool force = 4; + // When true, use the project's release pipeline (DAG) instead of + // deploying directly to the specified destinations/environments. + bool use_pipeline = 5; } message ReleaseResponse { // List of release intents created (one per destination) @@ -88,6 +92,54 @@ message GetProjectsResponse { +message GetReleasesByActorRequest { + string actor_id = 1; // user_id or app_id + string actor_type = 2; // "user" or "app" + int32 page_size = 3; + string page_token = 4; +} + +message GetReleasesByActorResponse { + repeated ReleaseIntentSummary releases = 1; + string next_page_token = 2; +} + +message ReleaseIntentSummary { + string release_intent_id = 1; + string artifact_id = 2; + Project project = 3; + repeated ReleaseDestinationStatus destinations = 4; + string created_at = 5; +} + +message ReleaseDestinationStatus { + string destination = 1; + string environment = 2; + string status = 3; +} + +message GetDestinationStatesRequest { + string organisation = 1; + optional string project = 2; +} + +message GetDestinationStatesResponse { + repeated DestinationState destinations = 1; +} + +message DestinationState { + string destination_id = 1; + string destination_name = 2; + string environment = 3; + optional string release_id = 4; + optional string artifact_id = 5; + optional string status = 6; + optional string error_message = 7; + optional string queued_at = 8; + optional string completed_at = 9; + optional int32 queue_position = 10; +} + service ReleaseService { rpc AnnotateRelease(AnnotateReleaseRequest) returns (AnnotateReleaseResponse); rpc Release(ReleaseRequest) returns (ReleaseResponse); @@ -95,8 +147,10 @@ service ReleaseService { rpc GetArtifactBySlug(GetArtifactBySlugRequest) returns (GetArtifactBySlugResponse); rpc GetArtifactsByProject(GetArtifactsByProjectRequest) returns (GetArtifactsByProjectResponse); + rpc GetReleasesByActor(GetReleasesByActorRequest) returns (GetReleasesByActorResponse); rpc GetOrganisations(GetOrganisationsRequest) returns (GetOrganisationsResponse); rpc GetProjects(GetProjectsRequest) returns (GetProjectsResponse); + rpc GetDestinationStates(GetDestinationStatesRequest) returns (GetDestinationStatesResponse); } message Source { @@ -131,6 +185,7 @@ message ArtifactDestination { string type_organisation = 3; string type_name = 4; uint64 type_version = 5; + string status = 6; } message Project { diff --git a/interface/proto/forest/v1/users.proto b/interface/proto/forest/v1/users.proto index 527165d..a348e2f 100644 --- a/interface/proto/forest/v1/users.proto +++ b/interface/proto/forest/v1/users.proto @@ -18,6 +18,9 @@ service UsersService { rpc DeleteUser(DeleteUserRequest) returns (DeleteUserResponse); rpc ListUsers(ListUsersRequest) returns (ListUsersResponse); + // Stats + rpc GetUserStats(GetUserStatsRequest) returns (GetUserStatsResponse); + // Password management rpc ChangePassword(ChangePasswordRequest) returns (ChangePasswordResponse); @@ -280,6 +283,28 @@ message DeletePersonalAccessTokenRequest { message DeletePersonalAccessTokenResponse {} +// ─── Stats ────────────────────────────────────────────────────────── + +message GetUserStatsRequest { + oneof identifier { + string user_id = 1; + string username = 2; + } +} + +message GetUserStatsResponse { + UserStats stats = 1; +} + +message UserStats { + int64 total_releases = 1; + int64 successful_releases = 2; + int64 failed_releases = 3; + int64 in_progress_releases = 4; + int64 total_annotations = 5; + int64 total_uploads = 6; +} + // ─── MFA ───────────────────────────────────────────────────────────── enum MfaType { diff --git a/mise.toml b/mise.toml index 7b544a1..cf5a06b 100644 --- a/mise.toml +++ b/mise.toml @@ -1,11 +1,15 @@ [tools] rust = "latest" +[env] +_.file = ".env" + # ─── Core Development ────────────────────────────────────────────── [tasks.develop] alias = ["d", "dev"] description = "Start the forage development server" +depends = ["tailwind:build"] run = "cargo run -p forage-server" [tasks.build] @@ -97,6 +101,8 @@ run = "cargo sqlx prepare --workspace" [tasks."tailwind:build"] description = "Build tailwind CSS" +sources = ["templates/**/*.jinja", "static/css/input.css", "static/js/**/*.js"] +outputs = ["static/css/style.css"] run = "npx @tailwindcss/cli -i static/css/input.css -o static/css/style.css --minify" [tasks."tailwind:watch"] diff --git a/nav-dashboard-final.png b/nav-dashboard-final.png deleted file mode 100644 index be3eb66..0000000 Binary files a/nav-dashboard-final.png and /dev/null differ diff --git a/nav-dashboard.png b/nav-dashboard.png deleted file mode 100644 index 9a4d8d3..0000000 Binary files a/nav-dashboard.png and /dev/null differ diff --git a/nav-final-account.png b/nav-final-account.png deleted file mode 100644 index 860b6ea..0000000 Binary files a/nav-final-account.png and /dev/null differ diff --git a/nav-final-padded.png b/nav-final-padded.png deleted file mode 100644 index be3eb66..0000000 Binary files a/nav-final-padded.png and /dev/null differ diff --git a/nav-members.png b/nav-members.png deleted file mode 100644 index a7545bb..0000000 Binary files a/nav-members.png and /dev/null differ diff --git a/nav-projects.png b/nav-projects.png deleted file mode 100644 index 1921f00..0000000 Binary files a/nav-projects.png and /dev/null differ diff --git a/nav-pt3.png b/nav-pt3.png deleted file mode 100644 index 3541757..0000000 Binary files a/nav-pt3.png and /dev/null differ diff --git a/nav-pt5.png b/nav-pt5.png deleted file mode 100644 index 2dd7415..0000000 Binary files a/nav-pt5.png and /dev/null differ diff --git a/nav-spacing-fixed.png b/nav-spacing-fixed.png deleted file mode 100644 index e66a7c6..0000000 Binary files a/nav-spacing-fixed.png and /dev/null differ diff --git a/nav-usage.png b/nav-usage.png deleted file mode 100644 index f0847fc..0000000 Binary files a/nav-usage.png and /dev/null differ diff --git a/nav-with-tailwind.png b/nav-with-tailwind.png deleted file mode 100644 index be3eb66..0000000 Binary files a/nav-with-tailwind.png and /dev/null differ diff --git a/releases-collapsed.png b/releases-collapsed.png deleted file mode 100644 index d5af6c7..0000000 Binary files a/releases-collapsed.png and /dev/null differ diff --git a/releases-current.png b/releases-current.png deleted file mode 100644 index 79d2bdd..0000000 Binary files a/releases-current.png and /dev/null differ diff --git a/releases-v10-avatar-dots.png b/releases-v10-avatar-dots.png deleted file mode 100644 index 2462b7a..0000000 Binary files a/releases-v10-avatar-dots.png and /dev/null differ diff --git a/releases-v2.png b/releases-v2.png deleted file mode 100644 index c38bd6e..0000000 Binary files a/releases-v2.png and /dev/null differ diff --git a/releases-v3.png b/releases-v3.png deleted file mode 100644 index 57415e0..0000000 Binary files a/releases-v3.png and /dev/null differ diff --git a/releases-v4.png b/releases-v4.png deleted file mode 100644 index a11b5f8..0000000 Binary files a/releases-v4.png and /dev/null differ diff --git a/releases-v5.png b/releases-v5.png deleted file mode 100644 index 04729ba..0000000 Binary files a/releases-v5.png and /dev/null differ diff --git a/releases-v6-dots.png b/releases-v6-dots.png deleted file mode 100644 index f0d2f84..0000000 Binary files a/releases-v6-dots.png and /dev/null differ diff --git a/releases-v7-hidden.png b/releases-v7-hidden.png deleted file mode 100644 index 82b92e3..0000000 Binary files a/releases-v7-hidden.png and /dev/null differ diff --git a/releases-v8-expanded.png b/releases-v8-expanded.png deleted file mode 100644 index 80518cd..0000000 Binary files a/releases-v8-expanded.png and /dev/null differ diff --git a/releases-v9-toggle.png b/releases-v9-toggle.png deleted file mode 100644 index f09cedc..0000000 Binary files a/releases-v9-toggle.png and /dev/null differ diff --git a/static/css/input.css b/static/css/input.css index f1d8c73..b1cdc61 100644 --- a/static/css/input.css +++ b/static/css/input.css @@ -1 +1,70 @@ @import "tailwindcss"; + +/* ── Dark mode (system preference) ──────────────────────────────────────── */ +/* Remap Tailwind's color variables so all existing utilities adapt automatically. */ +@media (prefers-color-scheme: dark) { + :root, :host { + /* Neutrals — invert the gray scale */ + --color-white: oklch(14.5% 0.015 260); + --color-black: oklch(98% 0.002 248); + --color-gray-50: oklch(17.5% 0.02 260); + --color-gray-100: oklch(21% 0.024 265); + --color-gray-200: oklch(27.8% 0.025 257); + --color-gray-300: oklch(37.3% 0.025 260); + --color-gray-400: oklch(55.1% 0.02 264); + --color-gray-500: oklch(60% 0.02 264); + --color-gray-600: oklch(70.7% 0.017 261); + --color-gray-700: oklch(80% 0.012 258); + --color-gray-800: oklch(87.2% 0.008 258); + --color-gray-900: oklch(93% 0.005 265); + --color-gray-950: oklch(96.7% 0.003 265); + + /* Green — darken light tints, lighten dark shades */ + --color-green-50: oklch(20% 0.04 155); + --color-green-100: oklch(25% 0.06 155); + --color-green-200: oklch(30% 0.08 155); + --color-green-300: oklch(42% 0.12 154); + --color-green-700: oklch(75% 0.15 150); + --color-green-800: oklch(80% 0.12 150); + + /* Red */ + --color-red-50: oklch(22% 0.04 17); + --color-red-200: oklch(32% 0.06 18); + --color-red-600: oklch(65% 0.2 27); + --color-red-700: oklch(72% 0.18 27); + --color-red-800: oklch(77% 0.15 27); + + /* Blue */ + --color-blue-100: oklch(22% 0.04 255); + --color-blue-600: oklch(62% 0.2 263); + --color-blue-700: oklch(72% 0.17 264); + --color-blue-800: oklch(77% 0.15 265); + + /* Orange */ + --color-orange-100: oklch(25% 0.05 75); + --color-orange-800: oklch(78% 0.13 37); + + /* Yellow */ + --color-yellow-100: oklch(25% 0.06 103); + --color-yellow-700: oklch(72% 0.12 66); + --color-yellow-800: oklch(77% 0.1 62); + + /* Violet */ + --color-violet-100: oklch(22% 0.04 295); + --color-violet-200: oklch(28% 0.06 294); + --color-violet-400: oklch(45% 0.14 293); + --color-violet-600: oklch(60% 0.2 293); + --color-violet-800: oklch(75% 0.18 293); + + /* Purple */ + --color-purple-100: oklch(22% 0.04 307); + --color-purple-800: oklch(75% 0.17 304); + + /* Pink */ + --color-pink-100: oklch(22% 0.04 342); + --color-pink-800: oklch(75% 0.15 4); + + /* Amber */ + --color-amber-400: oklch(80% 0.17 84); + } +} diff --git a/static/css/style.css b/static/css/style.css index 4b12f05..3d14425 100644 --- a/static/css/style.css +++ b/static/css/style.css @@ -1,1332 +1,2 @@ /*! tailwindcss v4.2.1 | MIT License | https://tailwindcss.com */ -@layer properties; -@layer theme, base, components, utilities; -@layer theme { - :root, :host { - --font-sans: ui-sans-serif, system-ui, sans-serif, "Apple Color Emoji", - "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji"; - --font-mono: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", - "Courier New", monospace; - --color-red-50: oklch(97.1% 0.013 17.38); - --color-red-200: oklch(88.5% 0.062 18.334); - --color-red-600: oklch(57.7% 0.245 27.325); - --color-red-700: oklch(50.5% 0.213 27.518); - --color-red-800: oklch(44.4% 0.177 26.899); - --color-orange-100: oklch(95.4% 0.038 75.164); - --color-orange-500: oklch(70.5% 0.213 47.604); - --color-orange-800: oklch(47% 0.157 37.304); - --color-amber-400: oklch(82.8% 0.189 84.429); - --color-yellow-100: oklch(97.3% 0.071 103.193); - --color-yellow-500: oklch(79.5% 0.184 86.047); - --color-yellow-700: oklch(55.4% 0.135 66.442); - --color-yellow-800: oklch(47.6% 0.114 61.907); - --color-green-50: oklch(98.2% 0.018 155.826); - --color-green-100: oklch(96.2% 0.044 156.743); - --color-green-200: oklch(92.5% 0.084 155.995); - --color-green-300: oklch(87.1% 0.15 154.449); - --color-green-400: oklch(79.2% 0.209 151.711); - --color-green-500: oklch(72.3% 0.219 149.579); - --color-green-700: oklch(52.7% 0.154 150.069); - --color-green-800: oklch(44.8% 0.119 151.328); - --color-blue-50: oklch(97% 0.014 254.604); - --color-blue-100: oklch(93.2% 0.032 255.585); - --color-blue-400: oklch(70.7% 0.165 254.624); - --color-blue-600: oklch(54.6% 0.245 262.881); - --color-blue-700: oklch(48.8% 0.243 264.376); - --color-blue-800: oklch(42.4% 0.199 265.638); - --color-violet-100: oklch(94.3% 0.029 294.588); - --color-violet-500: oklch(60.6% 0.25 292.717); - --color-violet-800: oklch(43.2% 0.232 292.759); - --color-purple-100: oklch(94.6% 0.033 307.174); - --color-purple-400: oklch(71.4% 0.203 305.504); - --color-purple-500: oklch(62.7% 0.265 303.9); - --color-purple-800: oklch(43.8% 0.218 303.724); - --color-pink-100: oklch(94.8% 0.028 342.258); - --color-pink-500: oklch(65.6% 0.241 354.308); - --color-pink-800: oklch(45.9% 0.187 3.815); - --color-gray-50: oklch(98.5% 0.002 247.839); - --color-gray-100: oklch(96.7% 0.003 264.542); - --color-gray-200: oklch(92.8% 0.006 264.531); - --color-gray-300: oklch(87.2% 0.01 258.338); - --color-gray-400: oklch(70.7% 0.022 261.325); - --color-gray-500: oklch(55.1% 0.027 264.364); - --color-gray-600: oklch(44.6% 0.03 256.802); - --color-gray-700: oklch(37.3% 0.034 259.733); - --color-gray-800: oklch(27.8% 0.033 256.848); - --color-gray-900: oklch(21% 0.034 264.665); - --color-gray-950: oklch(13% 0.028 261.692); - --color-black: #000; - --color-white: #fff; - --spacing: 0.25rem; - --container-md: 28rem; - --container-lg: 32rem; - --container-2xl: 42rem; - --container-4xl: 56rem; - --container-5xl: 64rem; - --container-6xl: 72rem; - --text-xs: 0.75rem; - --text-xs--line-height: calc(1 / 0.75); - --text-sm: 0.875rem; - --text-sm--line-height: calc(1.25 / 0.875); - --text-base: 1rem; - --text-base--line-height: calc(1.5 / 1); - --text-lg: 1.125rem; - --text-lg--line-height: calc(1.75 / 1.125); - --text-xl: 1.25rem; - --text-xl--line-height: calc(1.75 / 1.25); - --text-2xl: 1.5rem; - --text-2xl--line-height: calc(2 / 1.5); - --text-3xl: 1.875rem; - --text-3xl--line-height: calc(2.25 / 1.875); - --text-4xl: 2.25rem; - --text-4xl--line-height: calc(2.5 / 2.25); - --text-5xl: 3rem; - --text-5xl--line-height: 1; - --text-6xl: 3.75rem; - --text-6xl--line-height: 1; - --font-weight-normal: 400; - --font-weight-medium: 500; - --font-weight-semibold: 600; - --font-weight-bold: 700; - --tracking-tight: -0.025em; - --tracking-wide: 0.025em; - --leading-tight: 1.25; - --radius-md: 0.375rem; - --radius-lg: 0.5rem; - --default-transition-duration: 150ms; - --default-transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1); - --default-font-family: var(--font-sans); - --default-mono-font-family: var(--font-mono); - } -} -@layer base { - *, ::after, ::before, ::backdrop, ::file-selector-button { - box-sizing: border-box; - margin: 0; - padding: 0; - border: 0 solid; - } - html, :host { - line-height: 1.5; - -webkit-text-size-adjust: 100%; - tab-size: 4; - font-family: var(--default-font-family, ui-sans-serif, system-ui, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji"); - font-feature-settings: var(--default-font-feature-settings, normal); - font-variation-settings: var(--default-font-variation-settings, normal); - -webkit-tap-highlight-color: transparent; - } - hr { - height: 0; - color: inherit; - border-top-width: 1px; - } - abbr:where([title]) { - -webkit-text-decoration: underline dotted; - text-decoration: underline dotted; - } - h1, h2, h3, h4, h5, h6 { - font-size: inherit; - font-weight: inherit; - } - a { - color: inherit; - -webkit-text-decoration: inherit; - text-decoration: inherit; - } - b, strong { - font-weight: bolder; - } - code, kbd, samp, pre { - font-family: var(--default-mono-font-family, ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace); - font-feature-settings: var(--default-mono-font-feature-settings, normal); - font-variation-settings: var(--default-mono-font-variation-settings, normal); - font-size: 1em; - } - small { - font-size: 80%; - } - sub, sup { - font-size: 75%; - line-height: 0; - position: relative; - vertical-align: baseline; - } - sub { - bottom: -0.25em; - } - sup { - top: -0.5em; - } - table { - text-indent: 0; - border-color: inherit; - border-collapse: collapse; - } - :-moz-focusring { - outline: auto; - } - progress { - vertical-align: baseline; - } - summary { - display: list-item; - } - ol, ul, menu { - list-style: none; - } - img, svg, video, canvas, audio, iframe, embed, object { - display: block; - vertical-align: middle; - } - img, video { - max-width: 100%; - height: auto; - } - button, input, select, optgroup, textarea, ::file-selector-button { - font: inherit; - font-feature-settings: inherit; - font-variation-settings: inherit; - letter-spacing: inherit; - color: inherit; - border-radius: 0; - background-color: transparent; - opacity: 1; - } - :where(select:is([multiple], [size])) optgroup { - font-weight: bolder; - } - :where(select:is([multiple], [size])) optgroup option { - padding-inline-start: 20px; - } - ::file-selector-button { - margin-inline-end: 4px; - } - ::placeholder { - opacity: 1; - } - @supports (not (-webkit-appearance: -apple-pay-button)) or (contain-intrinsic-size: 1px) { - ::placeholder { - color: currentcolor; - @supports (color: color-mix(in lab, red, red)) { - color: color-mix(in oklab, currentcolor 50%, transparent); - } - } - } - textarea { - resize: vertical; - } - ::-webkit-search-decoration { - -webkit-appearance: none; - } - ::-webkit-date-and-time-value { - min-height: 1lh; - text-align: inherit; - } - ::-webkit-datetime-edit { - display: inline-flex; - } - ::-webkit-datetime-edit-fields-wrapper { - padding: 0; - } - ::-webkit-datetime-edit, ::-webkit-datetime-edit-year-field, ::-webkit-datetime-edit-month-field, ::-webkit-datetime-edit-day-field, ::-webkit-datetime-edit-hour-field, ::-webkit-datetime-edit-minute-field, ::-webkit-datetime-edit-second-field, ::-webkit-datetime-edit-millisecond-field, ::-webkit-datetime-edit-meridiem-field { - padding-block: 0; - } - ::-webkit-calendar-picker-indicator { - line-height: 1; - } - :-moz-ui-invalid { - box-shadow: none; - } - button, input:where([type="button"], [type="reset"], [type="submit"]), ::file-selector-button { - appearance: button; - } - ::-webkit-inner-spin-button, ::-webkit-outer-spin-button { - height: auto; - } - [hidden]:where(:not([hidden="until-found"])) { - display: none !important; - } -} -@layer utilities { - .visible { - visibility: visible; - } - .absolute { - position: absolute; - } - .fixed { - position: fixed; - } - .relative { - position: relative; - } - .static { - position: static; - } - .-top-3 { - top: calc(var(--spacing) * -3); - } - .left-0 { - left: calc(var(--spacing) * 0); - } - .left-4 { - left: calc(var(--spacing) * 4); - } - .z-20 { - z-index: 20; - } - .container { - width: 100%; - @media (width >= 40rem) { - max-width: 40rem; - } - @media (width >= 48rem) { - max-width: 48rem; - } - @media (width >= 64rem) { - max-width: 64rem; - } - @media (width >= 80rem) { - max-width: 80rem; - } - @media (width >= 96rem) { - max-width: 96rem; - } - } - .mx-auto { - margin-inline: auto; - } - .mt-0\.5 { - margin-top: calc(var(--spacing) * 0.5); - } - .mt-1 { - margin-top: calc(var(--spacing) * 1); - } - .mt-2 { - margin-top: calc(var(--spacing) * 2); - } - .mt-4 { - margin-top: calc(var(--spacing) * 4); - } - .mt-6 { - margin-top: calc(var(--spacing) * 6); - } - .mt-8 { - margin-top: calc(var(--spacing) * 8); - } - .mt-10 { - margin-top: calc(var(--spacing) * 10); - } - .mt-12 { - margin-top: calc(var(--spacing) * 12); - } - .mt-24 { - margin-top: calc(var(--spacing) * 24); - } - .-mb-px { - margin-bottom: -1px; - } - .mb-1 { - margin-bottom: calc(var(--spacing) * 1); - } - .mb-2 { - margin-bottom: calc(var(--spacing) * 2); - } - .mb-3 { - margin-bottom: calc(var(--spacing) * 3); - } - .mb-4 { - margin-bottom: calc(var(--spacing) * 4); - } - .mb-6 { - margin-bottom: calc(var(--spacing) * 6); - } - .mb-8 { - margin-bottom: calc(var(--spacing) * 8); - } - .mb-12 { - margin-bottom: calc(var(--spacing) * 12); - } - .ml-0\.5 { - margin-left: calc(var(--spacing) * 0.5); - } - .ml-2 { - margin-left: calc(var(--spacing) * 2); - } - .ml-4 { - margin-left: calc(var(--spacing) * 4); - } - .ml-auto { - margin-left: auto; - } - .block { - display: block; - } - .flex { - display: flex; - } - .grid { - display: grid; - } - .hidden { - display: none; - } - .inline { - display: inline; - } - .inline-block { - display: inline-block; - } - .inline-flex { - display: inline-flex; - } - .table { - display: table; - } - .h-1\.5 { - height: calc(var(--spacing) * 1.5); - } - .h-2 { - height: calc(var(--spacing) * 2); - } - .h-3 { - height: calc(var(--spacing) * 3); - } - .h-3\.5 { - height: calc(var(--spacing) * 3.5); - } - .h-4 { - height: calc(var(--spacing) * 4); - } - .h-6 { - height: calc(var(--spacing) * 6); - } - .h-12 { - height: calc(var(--spacing) * 12); - } - .w-1\.5 { - width: calc(var(--spacing) * 1.5); - } - .w-2 { - width: calc(var(--spacing) * 2); - } - .w-3 { - width: calc(var(--spacing) * 3); - } - .w-3\.5 { - width: calc(var(--spacing) * 3.5); - } - .w-4 { - width: calc(var(--spacing) * 4); - } - .w-6 { - width: calc(var(--spacing) * 6); - } - .w-12 { - width: calc(var(--spacing) * 12); - } - .w-48 { - width: calc(var(--spacing) * 48); - } - .w-full { - width: 100%; - } - .max-w-2xl { - max-width: var(--container-2xl); - } - .max-w-4xl { - max-width: var(--container-4xl); - } - .max-w-5xl { - max-width: var(--container-5xl); - } - .max-w-6xl { - max-width: var(--container-6xl); - } - .max-w-lg { - max-width: var(--container-lg); - } - .max-w-md { - max-width: var(--container-md); - } - .min-w-0 { - min-width: calc(var(--spacing) * 0); - } - .min-w-\[140px\] { - min-width: 140px; - } - .flex-1 { - flex: 1; - } - .shrink-0 { - flex-shrink: 0; - } - .transform { - transform: var(--tw-rotate-x,) var(--tw-rotate-y,) var(--tw-rotate-z,) var(--tw-skew-x,) var(--tw-skew-y,); - } - .cursor-not-allowed { - cursor: not-allowed; - } - .cursor-pointer { - cursor: pointer; - } - .list-none { - list-style-type: none; - } - .grid-cols-1 { - grid-template-columns: repeat(1, minmax(0, 1fr)); - } - .grid-cols-2 { - grid-template-columns: repeat(2, minmax(0, 1fr)); - } - .flex-wrap { - flex-wrap: wrap; - } - .items-center { - align-items: center; - } - .items-end { - align-items: flex-end; - } - .justify-between { - justify-content: space-between; - } - .justify-center { - justify-content: center; - } - .justify-end { - justify-content: flex-end; - } - .gap-1 { - gap: calc(var(--spacing) * 1); - } - .gap-1\.5 { - gap: calc(var(--spacing) * 1.5); - } - .gap-2 { - gap: calc(var(--spacing) * 2); - } - .gap-3 { - gap: calc(var(--spacing) * 3); - } - .gap-4 { - gap: calc(var(--spacing) * 4); - } - .gap-6 { - gap: calc(var(--spacing) * 6); - } - .gap-8 { - gap: calc(var(--spacing) * 8); - } - .space-y-2 { - :where(& > :not(:last-child)) { - --tw-space-y-reverse: 0; - margin-block-start: calc(calc(var(--spacing) * 2) * var(--tw-space-y-reverse)); - margin-block-end: calc(calc(var(--spacing) * 2) * calc(1 - var(--tw-space-y-reverse))); - } - } - .space-y-3 { - :where(& > :not(:last-child)) { - --tw-space-y-reverse: 0; - margin-block-start: calc(calc(var(--spacing) * 3) * var(--tw-space-y-reverse)); - margin-block-end: calc(calc(var(--spacing) * 3) * calc(1 - var(--tw-space-y-reverse))); - } - } - .space-y-4 { - :where(& > :not(:last-child)) { - --tw-space-y-reverse: 0; - margin-block-start: calc(calc(var(--spacing) * 4) * var(--tw-space-y-reverse)); - margin-block-end: calc(calc(var(--spacing) * 4) * calc(1 - var(--tw-space-y-reverse))); - } - } - .divide-y { - :where(& > :not(:last-child)) { - --tw-divide-y-reverse: 0; - border-bottom-style: var(--tw-border-style); - border-top-style: var(--tw-border-style); - border-top-width: calc(1px * var(--tw-divide-y-reverse)); - border-bottom-width: calc(1px * calc(1 - var(--tw-divide-y-reverse))); - } - } - .divide-gray-100 { - :where(& > :not(:last-child)) { - border-color: var(--color-gray-100); - } - } - .divide-gray-200 { - :where(& > :not(:last-child)) { - border-color: var(--color-gray-200); - } - } - .truncate { - overflow: hidden; - text-overflow: ellipsis; - white-space: nowrap; - } - .overflow-hidden { - overflow: hidden; - } - .overflow-x-auto { - overflow-x: auto; - } - .rounded { - border-radius: 0.25rem; - } - .rounded-full { - border-radius: calc(infinity * 1px); - } - .rounded-lg { - border-radius: var(--radius-lg); - } - .rounded-md { - border-radius: var(--radius-md); - } - .border { - border-style: var(--tw-border-style); - border-width: 1px; - } - .border-2 { - border-style: var(--tw-border-style); - border-width: 2px; - } - .border-t { - border-top-style: var(--tw-border-style); - border-top-width: 1px; - } - .border-b { - border-bottom-style: var(--tw-border-style); - border-bottom-width: 1px; - } - .border-b-2 { - border-bottom-style: var(--tw-border-style); - border-bottom-width: 2px; - } - .border-gray-50 { - border-color: var(--color-gray-50); - } - .border-gray-100 { - border-color: var(--color-gray-100); - } - .border-gray-200 { - border-color: var(--color-gray-200); - } - .border-gray-300 { - border-color: var(--color-gray-300); - } - .border-gray-900 { - border-color: var(--color-gray-900); - } - .border-green-200 { - border-color: var(--color-green-200); - } - .border-green-300 { - border-color: var(--color-green-300); - } - .border-red-200 { - border-color: var(--color-red-200); - } - .border-transparent { - border-color: transparent; - } - .bg-blue-50 { - background-color: var(--color-blue-50); - } - .bg-blue-100 { - background-color: var(--color-blue-100); - } - .bg-gray-50 { - background-color: var(--color-gray-50); - } - .bg-gray-100 { - background-color: var(--color-gray-100); - } - .bg-gray-200 { - background-color: var(--color-gray-200); - } - .bg-gray-400 { - background-color: var(--color-gray-400); - } - .bg-gray-900 { - background-color: var(--color-gray-900); - } - .bg-gray-950 { - background-color: var(--color-gray-950); - } - .bg-green-50 { - background-color: var(--color-green-50); - } - .bg-green-100 { - background-color: var(--color-green-100); - } - .bg-orange-100 { - background-color: var(--color-orange-100); - } - .bg-orange-500 { - background-color: var(--color-orange-500); - } - .bg-pink-100 { - background-color: var(--color-pink-100); - } - .bg-pink-500 { - background-color: var(--color-pink-500); - } - .bg-purple-100 { - background-color: var(--color-purple-100); - } - .bg-purple-500 { - background-color: var(--color-purple-500); - } - .bg-red-50 { - background-color: var(--color-red-50); - } - .bg-violet-100 { - background-color: var(--color-violet-100); - } - .bg-violet-500 { - background-color: var(--color-violet-500); - } - .bg-white { - background-color: var(--color-white); - } - .bg-yellow-100 { - background-color: var(--color-yellow-100); - } - .bg-yellow-500 { - background-color: var(--color-yellow-500); - } - .p-3 { - padding: calc(var(--spacing) * 3); - } - .p-4 { - padding: calc(var(--spacing) * 4); - } - .p-5 { - padding: calc(var(--spacing) * 5); - } - .p-6 { - padding: calc(var(--spacing) * 6); - } - .p-8 { - padding: calc(var(--spacing) * 8); - } - .p-12 { - padding: calc(var(--spacing) * 12); - } - .px-1 { - padding-inline: calc(var(--spacing) * 1); - } - .px-1\.5 { - padding-inline: calc(var(--spacing) * 1.5); - } - .px-2 { - padding-inline: calc(var(--spacing) * 2); - } - .px-2\.5 { - padding-inline: calc(var(--spacing) * 2.5); - } - .px-3 { - padding-inline: calc(var(--spacing) * 3); - } - .px-4 { - padding-inline: calc(var(--spacing) * 4); - } - .px-5 { - padding-inline: calc(var(--spacing) * 5); - } - .px-6 { - padding-inline: calc(var(--spacing) * 6); - } - .py-0\.5 { - padding-block: calc(var(--spacing) * 0.5); - } - .py-1 { - padding-block: calc(var(--spacing) * 1); - } - .py-1\.5 { - padding-block: calc(var(--spacing) * 1.5); - } - .py-2 { - padding-block: calc(var(--spacing) * 2); - } - .py-3 { - padding-block: calc(var(--spacing) * 3); - } - .py-4 { - padding-block: calc(var(--spacing) * 4); - } - .py-10 { - padding-block: calc(var(--spacing) * 10); - } - .py-12 { - padding-block: calc(var(--spacing) * 12); - } - .py-16 { - padding-block: calc(var(--spacing) * 16); - } - .pt-3 { - padding-top: calc(var(--spacing) * 3); - } - .pt-8 { - padding-top: calc(var(--spacing) * 8); - } - .pt-12 { - padding-top: calc(var(--spacing) * 12); - } - .pt-16 { - padding-top: calc(var(--spacing) * 16); - } - .pt-24 { - padding-top: calc(var(--spacing) * 24); - } - .pb-3 { - padding-bottom: calc(var(--spacing) * 3); - } - .pb-8 { - padding-bottom: calc(var(--spacing) * 8); - } - .pb-16 { - padding-bottom: calc(var(--spacing) * 16); - } - .text-center { - text-align: center; - } - .text-left { - text-align: left; - } - .text-right { - text-align: right; - } - .font-mono { - font-family: var(--font-mono); - } - .text-2xl { - font-size: var(--text-2xl); - line-height: var(--tw-leading, var(--text-2xl--line-height)); - } - .text-3xl { - font-size: var(--text-3xl); - line-height: var(--tw-leading, var(--text-3xl--line-height)); - } - .text-4xl { - font-size: var(--text-4xl); - line-height: var(--tw-leading, var(--text-4xl--line-height)); - } - .text-5xl { - font-size: var(--text-5xl); - line-height: var(--tw-leading, var(--text-5xl--line-height)); - } - .text-6xl { - font-size: var(--text-6xl); - line-height: var(--tw-leading, var(--text-6xl--line-height)); - } - .text-base { - font-size: var(--text-base); - line-height: var(--tw-leading, var(--text-base--line-height)); - } - .text-lg { - font-size: var(--text-lg); - line-height: var(--tw-leading, var(--text-lg--line-height)); - } - .text-sm { - font-size: var(--text-sm); - line-height: var(--tw-leading, var(--text-sm--line-height)); - } - .text-xl { - font-size: var(--text-xl); - line-height: var(--tw-leading, var(--text-xl--line-height)); - } - .text-xs { - font-size: var(--text-xs); - line-height: var(--tw-leading, var(--text-xs--line-height)); - } - .leading-tight { - --tw-leading: var(--leading-tight); - line-height: var(--leading-tight); - } - .font-bold { - --tw-font-weight: var(--font-weight-bold); - font-weight: var(--font-weight-bold); - } - .font-medium { - --tw-font-weight: var(--font-weight-medium); - font-weight: var(--font-weight-medium); - } - .font-normal { - --tw-font-weight: var(--font-weight-normal); - font-weight: var(--font-weight-normal); - } - .font-semibold { - --tw-font-weight: var(--font-weight-semibold); - font-weight: var(--font-weight-semibold); - } - .tracking-tight { - --tw-tracking: var(--tracking-tight); - letter-spacing: var(--tracking-tight); - } - .tracking-wide { - --tw-tracking: var(--tracking-wide); - letter-spacing: var(--tracking-wide); - } - .break-all { - word-break: break-all; - } - .whitespace-nowrap { - white-space: nowrap; - } - .text-amber-400 { - color: var(--color-amber-400); - } - .text-blue-400 { - color: var(--color-blue-400); - } - .text-blue-600 { - color: var(--color-blue-600); - } - .text-blue-700 { - color: var(--color-blue-700); - } - .text-blue-800 { - color: var(--color-blue-800); - } - .text-gray-300 { - color: var(--color-gray-300); - } - .text-gray-400 { - color: var(--color-gray-400); - } - .text-gray-500 { - color: var(--color-gray-500); - } - .text-gray-600 { - color: var(--color-gray-600); - } - .text-gray-700 { - color: var(--color-gray-700); - } - .text-gray-800 { - color: var(--color-gray-800); - } - .text-gray-900 { - color: var(--color-gray-900); - } - .text-green-400 { - color: var(--color-green-400); - } - .text-green-500 { - color: var(--color-green-500); - } - .text-green-700 { - color: var(--color-green-700); - } - .text-green-800 { - color: var(--color-green-800); - } - .text-orange-800 { - color: var(--color-orange-800); - } - .text-pink-800 { - color: var(--color-pink-800); - } - .text-purple-400 { - color: var(--color-purple-400); - } - .text-purple-800 { - color: var(--color-purple-800); - } - .text-red-600 { - color: var(--color-red-600); - } - .text-red-700 { - color: var(--color-red-700); - } - .text-violet-800 { - color: var(--color-violet-800); - } - .text-white { - color: var(--color-white); - } - .text-yellow-700 { - color: var(--color-yellow-700); - } - .text-yellow-800 { - color: var(--color-yellow-800); - } - .lowercase { - text-transform: lowercase; - } - .uppercase { - text-transform: uppercase; - } - .underline { - text-decoration-line: underline; - } - .antialiased { - -webkit-font-smoothing: antialiased; - -moz-osx-font-smoothing: grayscale; - } - .opacity-75 { - opacity: 75%; - } - .shadow-lg { - --tw-shadow: 0 10px 15px -3px var(--tw-shadow-color, rgb(0 0 0 / 0.1)), 0 4px 6px -4px var(--tw-shadow-color, rgb(0 0 0 / 0.1)); - box-shadow: var(--tw-inset-shadow), var(--tw-inset-ring-shadow), var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow); - } - .filter { - filter: var(--tw-blur,) var(--tw-brightness,) var(--tw-contrast,) var(--tw-grayscale,) var(--tw-hue-rotate,) var(--tw-invert,) var(--tw-saturate,) var(--tw-sepia,) var(--tw-drop-shadow,); - } - .transition-colors { - transition-property: color, background-color, border-color, outline-color, text-decoration-color, fill, stroke, --tw-gradient-from, --tw-gradient-via, --tw-gradient-to; - transition-timing-function: var(--tw-ease, var(--default-transition-timing-function)); - transition-duration: var(--tw-duration, var(--default-transition-duration)); - } - .transition-transform { - transition-property: transform, translate, scale, rotate; - transition-timing-function: var(--tw-ease, var(--default-transition-timing-function)); - transition-duration: var(--tw-duration, var(--default-transition-duration)); - } - .group-open\:hidden { - &:is(:where(.group):is([open], :popover-open, :open) *) { - display: none; - } - } - .group-open\:inline { - &:is(:where(.group):is([open], :popover-open, :open) *) { - display: inline; - } - } - .group-open\:rotate-90 { - &:is(:where(.group):is([open], :popover-open, :open) *) { - rotate: 90deg; - } - } - .hover\:border-gray-300 { - &:hover { - @media (hover: hover) { - border-color: var(--color-gray-300); - } - } - } - .hover\:border-gray-400 { - &:hover { - @media (hover: hover) { - border-color: var(--color-gray-400); - } - } - } - .hover\:bg-gray-50 { - &:hover { - @media (hover: hover) { - background-color: var(--color-gray-50); - } - } - } - .hover\:bg-gray-800 { - &:hover { - @media (hover: hover) { - background-color: var(--color-gray-800); - } - } - } - .hover\:bg-red-50 { - &:hover { - @media (hover: hover) { - background-color: var(--color-red-50); - } - } - } - .hover\:text-black { - &:hover { - @media (hover: hover) { - color: var(--color-black); - } - } - } - .hover\:text-blue-600 { - &:hover { - @media (hover: hover) { - color: var(--color-blue-600); - } - } - } - .hover\:text-gray-600 { - &:hover { - @media (hover: hover) { - color: var(--color-gray-600); - } - } - } - .hover\:text-gray-700 { - &:hover { - @media (hover: hover) { - color: var(--color-gray-700); - } - } - } - .hover\:text-gray-900 { - &:hover { - @media (hover: hover) { - color: var(--color-gray-900); - } - } - } - .hover\:text-red-800 { - &:hover { - @media (hover: hover) { - color: var(--color-red-800); - } - } - } - .hover\:underline { - &:hover { - @media (hover: hover) { - text-decoration-line: underline; - } - } - } - .focus\:border-transparent { - &:focus { - border-color: transparent; - } - } - .focus\:ring-2 { - &:focus { - --tw-ring-shadow: var(--tw-ring-inset,) 0 0 0 calc(2px + var(--tw-ring-offset-width)) var(--tw-ring-color, currentcolor); - box-shadow: var(--tw-inset-shadow), var(--tw-inset-ring-shadow), var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow); - } - } - .focus\:ring-gray-900 { - &:focus { - --tw-ring-color: var(--color-gray-900); - } - } - .focus\:outline-none { - &:focus { - --tw-outline-style: none; - outline-style: none; - } - } - .md\:grid-cols-2 { - @media (width >= 48rem) { - grid-template-columns: repeat(2, minmax(0, 1fr)); - } - } - .md\:grid-cols-3 { - @media (width >= 48rem) { - grid-template-columns: repeat(3, minmax(0, 1fr)); - } - } - .md\:grid-cols-4 { - @media (width >= 48rem) { - grid-template-columns: repeat(4, minmax(0, 1fr)); - } - } -} -@property --tw-rotate-x { - syntax: "*"; - inherits: false; -} -@property --tw-rotate-y { - syntax: "*"; - inherits: false; -} -@property --tw-rotate-z { - syntax: "*"; - inherits: false; -} -@property --tw-skew-x { - syntax: "*"; - inherits: false; -} -@property --tw-skew-y { - syntax: "*"; - inherits: false; -} -@property --tw-space-y-reverse { - syntax: "*"; - inherits: false; - initial-value: 0; -} -@property --tw-divide-y-reverse { - syntax: "*"; - inherits: false; - initial-value: 0; -} -@property --tw-border-style { - syntax: "*"; - inherits: false; - initial-value: solid; -} -@property --tw-leading { - syntax: "*"; - inherits: false; -} -@property --tw-font-weight { - syntax: "*"; - inherits: false; -} -@property --tw-tracking { - syntax: "*"; - inherits: false; -} -@property --tw-shadow { - syntax: "*"; - inherits: false; - initial-value: 0 0 #0000; -} -@property --tw-shadow-color { - syntax: "*"; - inherits: false; -} -@property --tw-shadow-alpha { - syntax: ""; - inherits: false; - initial-value: 100%; -} -@property --tw-inset-shadow { - syntax: "*"; - inherits: false; - initial-value: 0 0 #0000; -} -@property --tw-inset-shadow-color { - syntax: "*"; - inherits: false; -} -@property --tw-inset-shadow-alpha { - syntax: ""; - inherits: false; - initial-value: 100%; -} -@property --tw-ring-color { - syntax: "*"; - inherits: false; -} -@property --tw-ring-shadow { - syntax: "*"; - inherits: false; - initial-value: 0 0 #0000; -} -@property --tw-inset-ring-color { - syntax: "*"; - inherits: false; -} -@property --tw-inset-ring-shadow { - syntax: "*"; - inherits: false; - initial-value: 0 0 #0000; -} -@property --tw-ring-inset { - syntax: "*"; - inherits: false; -} -@property --tw-ring-offset-width { - syntax: ""; - inherits: false; - initial-value: 0px; -} -@property --tw-ring-offset-color { - syntax: "*"; - inherits: false; - initial-value: #fff; -} -@property --tw-ring-offset-shadow { - syntax: "*"; - inherits: false; - initial-value: 0 0 #0000; -} -@property --tw-blur { - syntax: "*"; - inherits: false; -} -@property --tw-brightness { - syntax: "*"; - inherits: false; -} -@property --tw-contrast { - syntax: "*"; - inherits: false; -} -@property --tw-grayscale { - syntax: "*"; - inherits: false; -} -@property --tw-hue-rotate { - syntax: "*"; - inherits: false; -} -@property --tw-invert { - syntax: "*"; - inherits: false; -} -@property --tw-opacity { - syntax: "*"; - inherits: false; -} -@property --tw-saturate { - syntax: "*"; - inherits: false; -} -@property --tw-sepia { - syntax: "*"; - inherits: false; -} -@property --tw-drop-shadow { - syntax: "*"; - inherits: false; -} -@property --tw-drop-shadow-color { - syntax: "*"; - inherits: false; -} -@property --tw-drop-shadow-alpha { - syntax: ""; - inherits: false; - initial-value: 100%; -} -@property --tw-drop-shadow-size { - syntax: "*"; - inherits: false; -} -@layer properties { - @supports ((-webkit-hyphens: none) and (not (margin-trim: inline))) or ((-moz-orient: inline) and (not (color:rgb(from red r g b)))) { - *, ::before, ::after, ::backdrop { - --tw-rotate-x: initial; - --tw-rotate-y: initial; - --tw-rotate-z: initial; - --tw-skew-x: initial; - --tw-skew-y: initial; - --tw-space-y-reverse: 0; - --tw-divide-y-reverse: 0; - --tw-border-style: solid; - --tw-leading: initial; - --tw-font-weight: initial; - --tw-tracking: initial; - --tw-shadow: 0 0 #0000; - --tw-shadow-color: initial; - --tw-shadow-alpha: 100%; - --tw-inset-shadow: 0 0 #0000; - --tw-inset-shadow-color: initial; - --tw-inset-shadow-alpha: 100%; - --tw-ring-color: initial; - --tw-ring-shadow: 0 0 #0000; - --tw-inset-ring-color: initial; - --tw-inset-ring-shadow: 0 0 #0000; - --tw-ring-inset: initial; - --tw-ring-offset-width: 0px; - --tw-ring-offset-color: #fff; - --tw-ring-offset-shadow: 0 0 #0000; - --tw-blur: initial; - --tw-brightness: initial; - --tw-contrast: initial; - --tw-grayscale: initial; - --tw-hue-rotate: initial; - --tw-invert: initial; - --tw-opacity: initial; - --tw-saturate: initial; - --tw-sepia: initial; - --tw-drop-shadow: initial; - --tw-drop-shadow-color: initial; - --tw-drop-shadow-alpha: 100%; - --tw-drop-shadow-size: initial; - } - } -} +@layer properties{@supports (((-webkit-hyphens:none)) and (not (margin-trim:inline))) or ((-moz-orient:inline) and (not (color:rgb(from red r g b)))){*,:before,:after,::backdrop{--tw-rotate-x:initial;--tw-rotate-y:initial;--tw-rotate-z:initial;--tw-skew-x:initial;--tw-skew-y:initial;--tw-space-y-reverse:0;--tw-divide-y-reverse:0;--tw-border-style:solid;--tw-leading:initial;--tw-font-weight:initial;--tw-tracking:initial;--tw-ordinal:initial;--tw-slashed-zero:initial;--tw-numeric-figure:initial;--tw-numeric-spacing:initial;--tw-numeric-fraction:initial;--tw-shadow:0 0 #0000;--tw-shadow-color:initial;--tw-shadow-alpha:100%;--tw-inset-shadow:0 0 #0000;--tw-inset-shadow-color:initial;--tw-inset-shadow-alpha:100%;--tw-ring-color:initial;--tw-ring-shadow:0 0 #0000;--tw-inset-ring-color:initial;--tw-inset-ring-shadow:0 0 #0000;--tw-ring-inset:initial;--tw-ring-offset-width:0px;--tw-ring-offset-color:#fff;--tw-ring-offset-shadow:0 0 #0000;--tw-blur:initial;--tw-brightness:initial;--tw-contrast:initial;--tw-grayscale:initial;--tw-hue-rotate:initial;--tw-invert:initial;--tw-opacity:initial;--tw-saturate:initial;--tw-sepia:initial;--tw-drop-shadow:initial;--tw-drop-shadow-color:initial;--tw-drop-shadow-alpha:100%;--tw-drop-shadow-size:initial;--tw-ease:initial}}}@layer theme{:root,:host{--font-sans:ui-sans-serif, system-ui, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji";--font-mono:ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace;--color-red-50:oklch(97.1% .013 17.38);--color-red-200:oklch(88.5% .062 18.334);--color-red-300:oklch(80.8% .114 19.571);--color-red-500:oklch(63.7% .237 25.331);--color-red-600:oklch(57.7% .245 27.325);--color-red-700:oklch(50.5% .213 27.518);--color-red-800:oklch(44.4% .177 26.899);--color-orange-100:oklch(95.4% .038 75.164);--color-orange-400:oklch(75% .183 55.934);--color-orange-500:oklch(70.5% .213 47.604);--color-orange-600:oklch(64.6% .222 41.116);--color-orange-700:oklch(55.3% .195 38.402);--color-orange-800:oklch(47% .157 37.304);--color-amber-50:oklch(98.7% .022 95.277);--color-amber-100:oklch(96.2% .059 95.617);--color-amber-200:oklch(92.4% .12 95.746);--color-amber-400:oklch(82.8% .189 84.429);--color-amber-600:oklch(66.6% .179 58.318);--color-amber-700:oklch(55.5% .163 48.998);--color-yellow-100:oklch(97.3% .071 103.193);--color-yellow-400:oklch(85.2% .199 91.936);--color-yellow-500:oklch(79.5% .184 86.047);--color-yellow-600:oklch(68.1% .162 75.834);--color-yellow-700:oklch(55.4% .135 66.442);--color-yellow-800:oklch(47.6% .114 61.907);--color-green-50:oklch(98.2% .018 155.826);--color-green-100:oklch(96.2% .044 156.743);--color-green-200:oklch(92.5% .084 155.995);--color-green-300:oklch(87.1% .15 154.449);--color-green-400:oklch(79.2% .209 151.711);--color-green-500:oklch(72.3% .219 149.579);--color-green-600:oklch(62.7% .194 149.214);--color-green-700:oklch(52.7% .154 150.069);--color-green-800:oklch(44.8% .119 151.328);--color-blue-50:oklch(97% .014 254.604);--color-blue-100:oklch(93.2% .032 255.585);--color-blue-200:oklch(88.2% .059 254.128);--color-blue-400:oklch(70.7% .165 254.624);--color-blue-500:oklch(62.3% .214 259.815);--color-blue-600:oklch(54.6% .245 262.881);--color-blue-700:oklch(48.8% .243 264.376);--color-blue-800:oklch(42.4% .199 265.638);--color-indigo-100:oklch(93% .034 272.788);--color-indigo-700:oklch(45.7% .24 277.023);--color-violet-100:oklch(94.3% .029 294.588);--color-violet-500:oklch(60.6% .25 292.717);--color-violet-600:oklch(54.1% .281 293.009);--color-violet-700:oklch(49.1% .27 292.581);--color-violet-800:oklch(43.2% .232 292.759);--color-purple-50:oklch(97.7% .014 308.299);--color-purple-100:oklch(94.6% .033 307.174);--color-purple-200:oklch(90.2% .063 306.703);--color-purple-400:oklch(71.4% .203 305.504);--color-purple-700:oklch(49.6% .265 301.924);--color-purple-800:oklch(43.8% .218 303.724);--color-pink-100:oklch(94.8% .028 342.258);--color-pink-500:oklch(65.6% .241 354.308);--color-pink-800:oklch(45.9% .187 3.815);--color-gray-50:oklch(98.5% .002 247.839);--color-gray-100:oklch(96.7% .003 264.542);--color-gray-200:oklch(92.8% .006 264.531);--color-gray-300:oklch(87.2% .01 258.338);--color-gray-400:oklch(70.7% .022 261.325);--color-gray-500:oklch(55.1% .027 264.364);--color-gray-600:oklch(44.6% .03 256.802);--color-gray-700:oklch(37.3% .034 259.733);--color-gray-800:oklch(27.8% .033 256.848);--color-gray-900:oklch(21% .034 264.665);--color-gray-950:oklch(13% .028 261.692);--color-black:#000;--color-white:#fff;--spacing:.25rem;--container-xs:20rem;--container-md:28rem;--container-lg:32rem;--container-2xl:42rem;--container-3xl:48rem;--container-4xl:56rem;--container-5xl:64rem;--container-6xl:72rem;--text-xs:.75rem;--text-xs--line-height:calc(1 / .75);--text-sm:.875rem;--text-sm--line-height:calc(1.25 / .875);--text-base:1rem;--text-base--line-height:calc(1.5 / 1);--text-lg:1.125rem;--text-lg--line-height:calc(1.75 / 1.125);--text-xl:1.25rem;--text-xl--line-height:calc(1.75 / 1.25);--text-2xl:1.5rem;--text-2xl--line-height:calc(2 / 1.5);--text-3xl:1.875rem;--text-3xl--line-height:calc(2.25 / 1.875);--text-4xl:2.25rem;--text-4xl--line-height:calc(2.5 / 2.25);--text-5xl:3rem;--text-5xl--line-height:1;--text-6xl:3.75rem;--text-6xl--line-height:1;--font-weight-normal:400;--font-weight-medium:500;--font-weight-semibold:600;--font-weight-bold:700;--tracking-tight:-.025em;--tracking-wide:.025em;--leading-tight:1.25;--radius-sm:.25rem;--radius-md:.375rem;--radius-lg:.5rem;--ease-in-out:cubic-bezier(.4, 0, .2, 1);--animate-spin:spin 1s linear infinite;--animate-pulse:pulse 2s cubic-bezier(.4, 0, .6, 1) infinite;--default-transition-duration:.15s;--default-transition-timing-function:cubic-bezier(.4, 0, .2, 1);--default-font-family:var(--font-sans);--default-mono-font-family:var(--font-mono)}}@layer base{*,:after,:before,::backdrop{box-sizing:border-box;border:0 solid;margin:0;padding:0}::file-selector-button{box-sizing:border-box;border:0 solid;margin:0;padding:0}html,:host{-webkit-text-size-adjust:100%;tab-size:4;line-height:1.5;font-family:var(--default-font-family,ui-sans-serif, system-ui, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji");font-feature-settings:var(--default-font-feature-settings,normal);font-variation-settings:var(--default-font-variation-settings,normal);-webkit-tap-highlight-color:transparent}hr{height:0;color:inherit;border-top-width:1px}abbr:where([title]){-webkit-text-decoration:underline dotted;text-decoration:underline dotted}h1,h2,h3,h4,h5,h6{font-size:inherit;font-weight:inherit}a{color:inherit;-webkit-text-decoration:inherit;-webkit-text-decoration:inherit;-webkit-text-decoration:inherit;text-decoration:inherit}b,strong{font-weight:bolder}code,kbd,samp,pre{font-family:var(--default-mono-font-family,ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace);font-feature-settings:var(--default-mono-font-feature-settings,normal);font-variation-settings:var(--default-mono-font-variation-settings,normal);font-size:1em}small{font-size:80%}sub,sup{vertical-align:baseline;font-size:75%;line-height:0;position:relative}sub{bottom:-.25em}sup{top:-.5em}table{text-indent:0;border-color:inherit;border-collapse:collapse}:-moz-focusring{outline:auto}progress{vertical-align:baseline}summary{display:list-item}ol,ul,menu{list-style:none}img,svg,video,canvas,audio,iframe,embed,object{vertical-align:middle;display:block}img,video{max-width:100%;height:auto}button,input,select,optgroup,textarea{font:inherit;font-feature-settings:inherit;font-variation-settings:inherit;letter-spacing:inherit;color:inherit;opacity:1;background-color:#0000;border-radius:0}::file-selector-button{font:inherit;font-feature-settings:inherit;font-variation-settings:inherit;letter-spacing:inherit;color:inherit;opacity:1;background-color:#0000;border-radius:0}:where(select:is([multiple],[size])) optgroup{font-weight:bolder}:where(select:is([multiple],[size])) optgroup option{padding-inline-start:20px}::file-selector-button{margin-inline-end:4px}::placeholder{opacity:1}@supports (not ((-webkit-appearance:-apple-pay-button))) or (contain-intrinsic-size:1px){::placeholder{color:currentColor}@supports (color:color-mix(in lab, red, red)){::placeholder{color:color-mix(in oklab, currentcolor 50%, transparent)}}}textarea{resize:vertical}::-webkit-search-decoration{-webkit-appearance:none}::-webkit-date-and-time-value{min-height:1lh;text-align:inherit}::-webkit-datetime-edit{display:inline-flex}::-webkit-datetime-edit-fields-wrapper{padding:0}::-webkit-datetime-edit{padding-block:0}::-webkit-datetime-edit-year-field{padding-block:0}::-webkit-datetime-edit-month-field{padding-block:0}::-webkit-datetime-edit-day-field{padding-block:0}::-webkit-datetime-edit-hour-field{padding-block:0}::-webkit-datetime-edit-minute-field{padding-block:0}::-webkit-datetime-edit-second-field{padding-block:0}::-webkit-datetime-edit-millisecond-field{padding-block:0}::-webkit-datetime-edit-meridiem-field{padding-block:0}::-webkit-calendar-picker-indicator{line-height:1}:-moz-ui-invalid{box-shadow:none}button,input:where([type=button],[type=reset],[type=submit]){appearance:button}::file-selector-button{appearance:button}::-webkit-inner-spin-button{height:auto}::-webkit-outer-spin-button{height:auto}[hidden]:where(:not([hidden=until-found])){display:none!important}}@layer components;@layer utilities{.visible{visibility:visible}.absolute{position:absolute}.fixed{position:fixed}.relative{position:relative}.static{position:static}.start{inset-inline-start:var(--spacing)}.end{inset-inline-end:var(--spacing)}.end\!{inset-inline-end:var(--spacing)!important}.-top-3{top:calc(var(--spacing) * -3)}.left-0{left:calc(var(--spacing) * 0)}.left-4{left:calc(var(--spacing) * 4)}.z-20{z-index:20}.container{width:100%}@media (min-width:40rem){.container{max-width:40rem}}@media (min-width:48rem){.container{max-width:48rem}}@media (min-width:64rem){.container{max-width:64rem}}@media (min-width:80rem){.container{max-width:80rem}}@media (min-width:96rem){.container{max-width:96rem}}.mx-auto{margin-inline:auto}.mt-0\.5{margin-top:calc(var(--spacing) * .5)}.mt-1{margin-top:calc(var(--spacing) * 1)}.mt-1\.5{margin-top:calc(var(--spacing) * 1.5)}.mt-2{margin-top:calc(var(--spacing) * 2)}.mt-3{margin-top:calc(var(--spacing) * 3)}.mt-4{margin-top:calc(var(--spacing) * 4)}.mt-6{margin-top:calc(var(--spacing) * 6)}.mt-8{margin-top:calc(var(--spacing) * 8)}.mt-10{margin-top:calc(var(--spacing) * 10)}.mt-12{margin-top:calc(var(--spacing) * 12)}.mt-auto{margin-top:auto}.mr-1\.5{margin-right:calc(var(--spacing) * 1.5)}.-mb-px{margin-bottom:-1px}.mb-1{margin-bottom:calc(var(--spacing) * 1)}.mb-2{margin-bottom:calc(var(--spacing) * 2)}.mb-3{margin-bottom:calc(var(--spacing) * 3)}.mb-4{margin-bottom:calc(var(--spacing) * 4)}.mb-6{margin-bottom:calc(var(--spacing) * 6)}.mb-8{margin-bottom:calc(var(--spacing) * 8)}.mb-12{margin-bottom:calc(var(--spacing) * 12)}.ml-0\.5{margin-left:calc(var(--spacing) * .5)}.ml-2{margin-left:calc(var(--spacing) * 2)}.ml-4{margin-left:calc(var(--spacing) * 4)}.ml-6{margin-left:calc(var(--spacing) * 6)}.ml-auto{margin-left:auto}.block{display:block}.flex{display:flex}.grid{display:grid}.hidden{display:none}.inline{display:inline}.inline-block{display:inline-block}.inline-flex{display:inline-flex}.table{display:table}.h-1\.5{height:calc(var(--spacing) * 1.5)}.h-2{height:calc(var(--spacing) * 2)}.h-2\.5{height:calc(var(--spacing) * 2.5)}.h-3{height:calc(var(--spacing) * 3)}.h-3\.5{height:calc(var(--spacing) * 3.5)}.h-4{height:calc(var(--spacing) * 4)}.h-5{height:calc(var(--spacing) * 5)}.h-6{height:calc(var(--spacing) * 6)}.h-8{height:calc(var(--spacing) * 8)}.h-12{height:calc(var(--spacing) * 12)}.h-20{height:calc(var(--spacing) * 20)}.min-h-screen{min-height:100vh}.w-1\.5{width:calc(var(--spacing) * 1.5)}.w-2{width:calc(var(--spacing) * 2)}.w-2\.5{width:calc(var(--spacing) * 2.5)}.w-3{width:calc(var(--spacing) * 3)}.w-3\.5{width:calc(var(--spacing) * 3.5)}.w-4{width:calc(var(--spacing) * 4)}.w-5{width:calc(var(--spacing) * 5)}.w-6{width:calc(var(--spacing) * 6)}.w-8{width:calc(var(--spacing) * 8)}.w-12{width:calc(var(--spacing) * 12)}.w-20{width:calc(var(--spacing) * 20)}.w-24{width:calc(var(--spacing) * 24)}.w-32{width:calc(var(--spacing) * 32)}.w-48{width:calc(var(--spacing) * 48)}.w-full{width:100%}.max-w-2xl{max-width:var(--container-2xl)}.max-w-3xl{max-width:var(--container-3xl)}.max-w-4xl{max-width:var(--container-4xl)}.max-w-5xl{max-width:var(--container-5xl)}.max-w-6xl{max-width:var(--container-6xl)}.max-w-lg{max-width:var(--container-lg)}.max-w-md{max-width:var(--container-md)}.max-w-xs{max-width:var(--container-xs)}.min-w-0{min-width:calc(var(--spacing) * 0)}.min-w-\[140px\]{min-width:140px}.flex-1{flex:1}.flex-shrink{flex-shrink:1}.shrink-0{flex-shrink:0}.grow{flex-grow:1}.transform{transform:var(--tw-rotate-x,) var(--tw-rotate-y,) var(--tw-rotate-z,) var(--tw-skew-x,) var(--tw-skew-y,)}.animate-pulse{animation:var(--animate-pulse)}.animate-spin{animation:var(--animate-spin)}.cursor-not-allowed{cursor:not-allowed}.cursor-pointer{cursor:pointer}.resize{resize:both}.resize-y{resize:vertical}.list-none{list-style-type:none}.grid-cols-1{grid-template-columns:repeat(1,minmax(0,1fr))}.grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.grid-cols-3{grid-template-columns:repeat(3,minmax(0,1fr))}.flex-col{flex-direction:column}.flex-wrap{flex-wrap:wrap}.items-center{align-items:center}.items-end{align-items:flex-end}.items-start{align-items:flex-start}.justify-between{justify-content:space-between}.justify-center{justify-content:center}.justify-end{justify-content:flex-end}.gap-0\.5{gap:calc(var(--spacing) * .5)}.gap-1{gap:calc(var(--spacing) * 1)}.gap-1\.5{gap:calc(var(--spacing) * 1.5)}.gap-2{gap:calc(var(--spacing) * 2)}.gap-3{gap:calc(var(--spacing) * 3)}.gap-4{gap:calc(var(--spacing) * 4)}.gap-6{gap:calc(var(--spacing) * 6)}.gap-8{gap:calc(var(--spacing) * 8)}:where(.space-y-1\.5>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing) * 1.5) * var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing) * 1.5) * calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-2>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing) * 2) * var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing) * 2) * calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-3>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing) * 3) * var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing) * 3) * calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-4>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing) * 4) * var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing) * 4) * calc(1 - var(--tw-space-y-reverse)))}:where(.space-y-6>:not(:last-child)){--tw-space-y-reverse:0;margin-block-start:calc(calc(var(--spacing) * 6) * var(--tw-space-y-reverse));margin-block-end:calc(calc(var(--spacing) * 6) * calc(1 - var(--tw-space-y-reverse)))}.gap-x-6{column-gap:calc(var(--spacing) * 6)}.gap-y-2{row-gap:calc(var(--spacing) * 2)}:where(.divide-y>:not(:last-child)){--tw-divide-y-reverse:0;border-bottom-style:var(--tw-border-style);border-top-style:var(--tw-border-style);border-top-width:calc(1px * var(--tw-divide-y-reverse));border-bottom-width:calc(1px * calc(1 - var(--tw-divide-y-reverse)))}:where(.divide-gray-100>:not(:last-child)){border-color:var(--color-gray-100)}:where(.divide-gray-200>:not(:last-child)){border-color:var(--color-gray-200)}.truncate{text-overflow:ellipsis;white-space:nowrap;overflow:hidden}.overflow-hidden{overflow:hidden}.overflow-x-auto{overflow-x:auto}.rounded{border-radius:.25rem}.rounded-full{border-radius:3.40282e38px}.rounded-lg{border-radius:var(--radius-lg)}.rounded-md{border-radius:var(--radius-md)}.rounded-sm{border-radius:var(--radius-sm)}.border{border-style:var(--tw-border-style);border-width:1px}.border-2{border-style:var(--tw-border-style);border-width:2px}.border-t{border-top-style:var(--tw-border-style);border-top-width:1px}.border-b{border-bottom-style:var(--tw-border-style);border-bottom-width:1px}.border-b-2{border-bottom-style:var(--tw-border-style);border-bottom-width:2px}.border-dashed{--tw-border-style:dashed;border-style:dashed}.border-amber-200{border-color:var(--color-amber-200)}.border-blue-200{border-color:var(--color-blue-200)}.border-gray-50{border-color:var(--color-gray-50)}.border-gray-100{border-color:var(--color-gray-100)}.border-gray-200{border-color:var(--color-gray-200)}.border-gray-300{border-color:var(--color-gray-300)}.border-gray-900{border-color:var(--color-gray-900)}.border-green-200{border-color:var(--color-green-200)}.border-green-300{border-color:var(--color-green-300)}.border-purple-200{border-color:var(--color-purple-200)}.border-red-200{border-color:var(--color-red-200)}.border-red-300{border-color:var(--color-red-300)}.border-transparent{border-color:#0000}.border-t-gray-600{border-top-color:var(--color-gray-600)}.bg-amber-50{background-color:var(--color-amber-50)}.bg-amber-100{background-color:var(--color-amber-100)}.bg-blue-50{background-color:var(--color-blue-50)}.bg-blue-100{background-color:var(--color-blue-100)}.bg-blue-400{background-color:var(--color-blue-400)}.bg-gray-50{background-color:var(--color-gray-50)}.bg-gray-50\/50{background-color:#f9fafb80}@supports (color:color-mix(in lab, red, red)){.bg-gray-50\/50{background-color:color-mix(in oklab, var(--color-gray-50) 50%, transparent)}}.bg-gray-100{background-color:var(--color-gray-100)}.bg-gray-200{background-color:var(--color-gray-200)}.bg-gray-300{background-color:var(--color-gray-300)}.bg-gray-400{background-color:var(--color-gray-400)}.bg-gray-900{background-color:var(--color-gray-900)}.bg-gray-950{background-color:var(--color-gray-950)}.bg-green-50{background-color:var(--color-green-50)}.bg-green-100{background-color:var(--color-green-100)}.bg-green-500{background-color:var(--color-green-500)}.bg-green-600{background-color:var(--color-green-600)}.bg-indigo-100{background-color:var(--color-indigo-100)}.bg-orange-100{background-color:var(--color-orange-100)}.bg-orange-400{background-color:var(--color-orange-400)}.bg-orange-500{background-color:var(--color-orange-500)}.bg-pink-100{background-color:var(--color-pink-100)}.bg-pink-500{background-color:var(--color-pink-500)}.bg-purple-50{background-color:var(--color-purple-50)}.bg-purple-100{background-color:var(--color-purple-100)}.bg-red-50{background-color:var(--color-red-50)}.bg-red-500{background-color:var(--color-red-500)}.bg-violet-100{background-color:var(--color-violet-100)}.bg-violet-500{background-color:var(--color-violet-500)}.bg-white{background-color:var(--color-white)}.bg-yellow-100{background-color:var(--color-yellow-100)}.bg-yellow-400{background-color:var(--color-yellow-400)}.bg-yellow-500{background-color:var(--color-yellow-500)}.p-1{padding:calc(var(--spacing) * 1)}.p-3{padding:calc(var(--spacing) * 3)}.p-4{padding:calc(var(--spacing) * 4)}.p-6{padding:calc(var(--spacing) * 6)}.p-8{padding:calc(var(--spacing) * 8)}.p-12{padding:calc(var(--spacing) * 12)}.px-1{padding-inline:calc(var(--spacing) * 1)}.px-1\.5{padding-inline:calc(var(--spacing) * 1.5)}.px-2{padding-inline:calc(var(--spacing) * 2)}.px-2\.5{padding-inline:calc(var(--spacing) * 2.5)}.px-3{padding-inline:calc(var(--spacing) * 3)}.px-4{padding-inline:calc(var(--spacing) * 4)}.px-5{padding-inline:calc(var(--spacing) * 5)}.px-6{padding-inline:calc(var(--spacing) * 6)}.py-0\.5{padding-block:calc(var(--spacing) * .5)}.py-1{padding-block:calc(var(--spacing) * 1)}.py-1\.5{padding-block:calc(var(--spacing) * 1.5)}.py-2{padding-block:calc(var(--spacing) * 2)}.py-2\.5{padding-block:calc(var(--spacing) * 2.5)}.py-3{padding-block:calc(var(--spacing) * 3)}.py-4{padding-block:calc(var(--spacing) * 4)}.py-10{padding-block:calc(var(--spacing) * 10)}.py-12{padding-block:calc(var(--spacing) * 12)}.py-16{padding-block:calc(var(--spacing) * 16)}.pt-0{padding-top:calc(var(--spacing) * 0)}.pt-1{padding-top:calc(var(--spacing) * 1)}.pt-2{padding-top:calc(var(--spacing) * 2)}.pt-3{padding-top:calc(var(--spacing) * 3)}.pt-6{padding-top:calc(var(--spacing) * 6)}.pt-8{padding-top:calc(var(--spacing) * 8)}.pt-12{padding-top:calc(var(--spacing) * 12)}.pt-16{padding-top:calc(var(--spacing) * 16)}.pt-24{padding-top:calc(var(--spacing) * 24)}.pr-1{padding-right:calc(var(--spacing) * 1)}.pb-2{padding-bottom:calc(var(--spacing) * 2)}.pb-3{padding-bottom:calc(var(--spacing) * 3)}.pb-8{padding-bottom:calc(var(--spacing) * 8)}.pb-12{padding-bottom:calc(var(--spacing) * 12)}.pb-16{padding-bottom:calc(var(--spacing) * 16)}.text-center{text-align:center}.text-left{text-align:left}.text-right{text-align:right}.font-mono{font-family:var(--font-mono)}.text-2xl{font-size:var(--text-2xl);line-height:var(--tw-leading,var(--text-2xl--line-height))}.text-3xl{font-size:var(--text-3xl);line-height:var(--tw-leading,var(--text-3xl--line-height))}.text-4xl{font-size:var(--text-4xl);line-height:var(--tw-leading,var(--text-4xl--line-height))}.text-5xl{font-size:var(--text-5xl);line-height:var(--tw-leading,var(--text-5xl--line-height))}.text-6xl{font-size:var(--text-6xl);line-height:var(--tw-leading,var(--text-6xl--line-height))}.text-base{font-size:var(--text-base);line-height:var(--tw-leading,var(--text-base--line-height))}.text-lg{font-size:var(--text-lg);line-height:var(--tw-leading,var(--text-lg--line-height))}.text-sm{font-size:var(--text-sm);line-height:var(--tw-leading,var(--text-sm--line-height))}.text-xl{font-size:var(--text-xl);line-height:var(--tw-leading,var(--text-xl--line-height))}.text-xs{font-size:var(--text-xs);line-height:var(--tw-leading,var(--text-xs--line-height))}.leading-tight{--tw-leading:var(--leading-tight);line-height:var(--leading-tight)}.font-bold{--tw-font-weight:var(--font-weight-bold);font-weight:var(--font-weight-bold)}.font-medium{--tw-font-weight:var(--font-weight-medium);font-weight:var(--font-weight-medium)}.font-normal{--tw-font-weight:var(--font-weight-normal);font-weight:var(--font-weight-normal)}.font-semibold{--tw-font-weight:var(--font-weight-semibold);font-weight:var(--font-weight-semibold)}.tracking-tight{--tw-tracking:var(--tracking-tight);letter-spacing:var(--tracking-tight)}.tracking-wide{--tw-tracking:var(--tracking-wide);letter-spacing:var(--tracking-wide)}.break-words{overflow-wrap:break-word}.break-all{word-break:break-all}.whitespace-nowrap{white-space:nowrap}.whitespace-pre-wrap{white-space:pre-wrap}.text-amber-400{color:var(--color-amber-400)}.text-amber-600{color:var(--color-amber-600)}.text-amber-700{color:var(--color-amber-700)}.text-blue-400{color:var(--color-blue-400)}.text-blue-600{color:var(--color-blue-600)}.text-blue-700{color:var(--color-blue-700)}.text-blue-800{color:var(--color-blue-800)}.text-gray-300{color:var(--color-gray-300)}.text-gray-400{color:var(--color-gray-400)}.text-gray-500{color:var(--color-gray-500)}.text-gray-600{color:var(--color-gray-600)}.text-gray-700{color:var(--color-gray-700)}.text-gray-800{color:var(--color-gray-800)}.text-gray-900{color:var(--color-gray-900)}.text-green-400{color:var(--color-green-400)}.text-green-500{color:var(--color-green-500)}.text-green-600{color:var(--color-green-600)}.text-green-700{color:var(--color-green-700)}.text-green-800{color:var(--color-green-800)}.text-indigo-700{color:var(--color-indigo-700)}.text-orange-500{color:var(--color-orange-500)}.text-orange-600{color:var(--color-orange-600)}.text-orange-700{color:var(--color-orange-700)}.text-orange-800{color:var(--color-orange-800)}.text-pink-800{color:var(--color-pink-800)}.text-purple-400{color:var(--color-purple-400)}.text-purple-700{color:var(--color-purple-700)}.text-purple-800{color:var(--color-purple-800)}.text-red-500{color:var(--color-red-500)}.text-red-600{color:var(--color-red-600)}.text-red-700{color:var(--color-red-700)}.text-violet-600{color:var(--color-violet-600)}.text-violet-700{color:var(--color-violet-700)}.text-violet-800{color:var(--color-violet-800)}.text-white{color:var(--color-white)}.text-yellow-500{color:var(--color-yellow-500)}.text-yellow-600{color:var(--color-yellow-600)}.text-yellow-700{color:var(--color-yellow-700)}.text-yellow-800{color:var(--color-yellow-800)}.lowercase{text-transform:lowercase}.uppercase{text-transform:uppercase}.italic{font-style:italic}.tabular-nums{--tw-numeric-spacing:tabular-nums;font-variant-numeric:var(--tw-ordinal,) var(--tw-slashed-zero,) var(--tw-numeric-figure,) var(--tw-numeric-spacing,) var(--tw-numeric-fraction,)}.underline{text-decoration-line:underline}.antialiased{-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.opacity-50{opacity:.5}.opacity-75{opacity:.75}.shadow{--tw-shadow:0 1px 3px 0 var(--tw-shadow-color,#0000001a), 0 1px 2px -1px var(--tw-shadow-color,#0000001a);box-shadow:var(--tw-inset-shadow), var(--tw-inset-ring-shadow), var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow)}.shadow-lg{--tw-shadow:0 10px 15px -3px var(--tw-shadow-color,#0000001a), 0 4px 6px -4px var(--tw-shadow-color,#0000001a);box-shadow:var(--tw-inset-shadow), var(--tw-inset-ring-shadow), var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow)}.filter{filter:var(--tw-blur,) var(--tw-brightness,) var(--tw-contrast,) var(--tw-grayscale,) var(--tw-hue-rotate,) var(--tw-invert,) var(--tw-saturate,) var(--tw-sepia,) var(--tw-drop-shadow,)}.transition{transition-property:color,background-color,border-color,outline-color,text-decoration-color,fill,stroke,--tw-gradient-from,--tw-gradient-via,--tw-gradient-to,opacity,box-shadow,transform,translate,scale,rotate,filter,-webkit-backdrop-filter,backdrop-filter,display,content-visibility,overlay,pointer-events;transition-timing-function:var(--tw-ease,var(--default-transition-timing-function));transition-duration:var(--tw-duration,var(--default-transition-duration))}.transition-colors{transition-property:color,background-color,border-color,outline-color,text-decoration-color,fill,stroke,--tw-gradient-from,--tw-gradient-via,--tw-gradient-to;transition-timing-function:var(--tw-ease,var(--default-transition-timing-function));transition-duration:var(--tw-duration,var(--default-transition-duration))}.transition-transform{transition-property:transform,translate,scale,rotate;transition-timing-function:var(--tw-ease,var(--default-transition-timing-function));transition-duration:var(--tw-duration,var(--default-transition-duration))}.ease-in-out{--tw-ease:var(--ease-in-out);transition-timing-function:var(--ease-in-out)}.select-none{-webkit-user-select:none;user-select:none}.group-open\:hidden:is(:where(.group):is([open],:popover-open,:open) *){display:none}.group-open\:inline:is(:where(.group):is([open],:popover-open,:open) *){display:inline}.group-open\:rotate-90:is(:where(.group):is([open],:popover-open,:open) *){rotate:90deg}@media (hover:hover){.group-hover\:text-gray-500:is(:where(.group):hover *){color:var(--color-gray-500)}.group-hover\:text-violet-700:is(:where(.group):hover *){color:var(--color-violet-700)}.hover\:border-gray-300:hover{border-color:var(--color-gray-300)}.hover\:border-gray-400:hover{border-color:var(--color-gray-400)}.hover\:bg-amber-100:hover{background-color:var(--color-amber-100)}.hover\:bg-gray-50:hover{background-color:var(--color-gray-50)}.hover\:bg-gray-800:hover{background-color:var(--color-gray-800)}.hover\:bg-green-50:hover{background-color:var(--color-green-50)}.hover\:bg-green-700:hover{background-color:var(--color-green-700)}.hover\:bg-red-50:hover{background-color:var(--color-red-50)}.hover\:text-black:hover{color:var(--color-black)}.hover\:text-gray-600:hover{color:var(--color-gray-600)}.hover\:text-gray-700:hover{color:var(--color-gray-700)}.hover\:text-gray-900:hover{color:var(--color-gray-900)}.hover\:text-green-700:hover{color:var(--color-green-700)}.hover\:text-red-500:hover{color:var(--color-red-500)}.hover\:text-red-800:hover{color:var(--color-red-800)}.hover\:underline:hover{text-decoration-line:underline}}.focus\:border-transparent:focus{border-color:#0000}.focus\:ring-1:focus{--tw-ring-shadow:var(--tw-ring-inset,) 0 0 0 calc(1px + var(--tw-ring-offset-width)) var(--tw-ring-color,currentcolor);box-shadow:var(--tw-inset-shadow), var(--tw-inset-ring-shadow), var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow)}.focus\:ring-2:focus{--tw-ring-shadow:var(--tw-ring-inset,) 0 0 0 calc(2px + var(--tw-ring-offset-width)) var(--tw-ring-color,currentcolor);box-shadow:var(--tw-inset-shadow), var(--tw-inset-ring-shadow), var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow)}.focus\:ring-gray-400:focus{--tw-ring-color:var(--color-gray-400)}.focus\:ring-gray-900:focus{--tw-ring-color:var(--color-gray-900)}.focus\:ring-green-500:focus{--tw-ring-color:var(--color-green-500)}.focus\:outline-none:focus{--tw-outline-style:none;outline-style:none}.has-\[\:checked\]\:border-blue-500:has(:checked){border-color:var(--color-blue-500)}.has-\[\:checked\]\:bg-blue-50:has(:checked){background-color:var(--color-blue-50)}@media (min-width:40rem){.sm\:grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.sm\:flex-row{flex-direction:row}.sm\:items-end{align-items:flex-end}}@media (min-width:48rem){.md\:grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.md\:grid-cols-3{grid-template-columns:repeat(3,minmax(0,1fr))}.md\:grid-cols-4{grid-template-columns:repeat(4,minmax(0,1fr))}}}@media (prefers-color-scheme:dark){:root,:host{--color-white:oklch(14.5% .015 260);--color-black:oklch(98% .002 248);--color-gray-50:oklch(17.5% .02 260);--color-gray-100:oklch(21% .024 265);--color-gray-200:oklch(27.8% .025 257);--color-gray-300:oklch(37.3% .025 260);--color-gray-400:oklch(55.1% .02 264);--color-gray-500:oklch(60% .02 264);--color-gray-600:oklch(70.7% .017 261);--color-gray-700:oklch(80% .012 258);--color-gray-800:oklch(87.2% .008 258);--color-gray-900:oklch(93% .005 265);--color-gray-950:oklch(96.7% .003 265);--color-green-50:oklch(20% .04 155);--color-green-100:oklch(25% .06 155);--color-green-200:oklch(30% .08 155);--color-green-300:oklch(42% .12 154);--color-green-700:oklch(75% .15 150);--color-green-800:oklch(80% .12 150);--color-red-50:oklch(22% .04 17);--color-red-200:oklch(32% .06 18);--color-red-600:oklch(65% .2 27);--color-red-700:oklch(72% .18 27);--color-red-800:oklch(77% .15 27);--color-blue-100:oklch(22% .04 255);--color-blue-600:oklch(62% .2 263);--color-blue-700:oklch(72% .17 264);--color-blue-800:oklch(77% .15 265);--color-orange-100:oklch(25% .05 75);--color-orange-800:oklch(78% .13 37);--color-yellow-100:oklch(25% .06 103);--color-yellow-700:oklch(72% .12 66);--color-yellow-800:oklch(77% .1 62);--color-violet-100:oklch(22% .04 295);--color-violet-200:oklch(28% .06 294);--color-violet-400:oklch(45% .14 293);--color-violet-600:oklch(60% .2 293);--color-violet-800:oklch(75% .18 293);--color-purple-100:oklch(22% .04 307);--color-purple-800:oklch(75% .17 304);--color-pink-100:oklch(22% .04 342);--color-pink-800:oklch(75% .15 4);--color-amber-400:oklch(80% .17 84)}}@property --tw-rotate-x{syntax:"*";inherits:false}@property --tw-rotate-y{syntax:"*";inherits:false}@property --tw-rotate-z{syntax:"*";inherits:false}@property --tw-skew-x{syntax:"*";inherits:false}@property --tw-skew-y{syntax:"*";inherits:false}@property --tw-space-y-reverse{syntax:"*";inherits:false;initial-value:0}@property --tw-divide-y-reverse{syntax:"*";inherits:false;initial-value:0}@property --tw-border-style{syntax:"*";inherits:false;initial-value:solid}@property --tw-leading{syntax:"*";inherits:false}@property --tw-font-weight{syntax:"*";inherits:false}@property --tw-tracking{syntax:"*";inherits:false}@property --tw-ordinal{syntax:"*";inherits:false}@property --tw-slashed-zero{syntax:"*";inherits:false}@property --tw-numeric-figure{syntax:"*";inherits:false}@property --tw-numeric-spacing{syntax:"*";inherits:false}@property --tw-numeric-fraction{syntax:"*";inherits:false}@property --tw-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-shadow-color{syntax:"*";inherits:false}@property --tw-shadow-alpha{syntax:"";inherits:false;initial-value:100%}@property --tw-inset-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-inset-shadow-color{syntax:"*";inherits:false}@property --tw-inset-shadow-alpha{syntax:"";inherits:false;initial-value:100%}@property --tw-ring-color{syntax:"*";inherits:false}@property --tw-ring-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-inset-ring-color{syntax:"*";inherits:false}@property --tw-inset-ring-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-ring-inset{syntax:"*";inherits:false}@property --tw-ring-offset-width{syntax:"";inherits:false;initial-value:0}@property --tw-ring-offset-color{syntax:"*";inherits:false;initial-value:#fff}@property --tw-ring-offset-shadow{syntax:"*";inherits:false;initial-value:0 0 #0000}@property --tw-blur{syntax:"*";inherits:false}@property --tw-brightness{syntax:"*";inherits:false}@property --tw-contrast{syntax:"*";inherits:false}@property --tw-grayscale{syntax:"*";inherits:false}@property --tw-hue-rotate{syntax:"*";inherits:false}@property --tw-invert{syntax:"*";inherits:false}@property --tw-opacity{syntax:"*";inherits:false}@property --tw-saturate{syntax:"*";inherits:false}@property --tw-sepia{syntax:"*";inherits:false}@property --tw-drop-shadow{syntax:"*";inherits:false}@property --tw-drop-shadow-color{syntax:"*";inherits:false}@property --tw-drop-shadow-alpha{syntax:"";inherits:false;initial-value:100%}@property --tw-drop-shadow-size{syntax:"*";inherits:false}@property --tw-ease{syntax:"*";inherits:false}@keyframes spin{to{transform:rotate(360deg)}}@keyframes pulse{50%{opacity:.5}} \ No newline at end of file diff --git a/static/js/components/forage-components.js b/static/js/components/forage-components.js new file mode 100644 index 0000000..aca3572 --- /dev/null +++ b/static/js/components/forage-components.js @@ -0,0 +1,19 @@ +var Hf=Object.defineProperty;var Dl=de=>{throw TypeError(de)};var Gf=(de,fe,Te)=>fe in de?Hf(de,fe,{enumerable:!0,configurable:!0,writable:!0,value:Te}):de[fe]=Te;var we=(de,fe,Te)=>Gf(de,typeof fe!="symbol"?fe+"":fe,Te),Js=(de,fe,Te)=>fe.has(de)||Dl("Cannot "+Te);var c=(de,fe,Te)=>(Js(de,fe,"read from private field"),Te?Te.call(de):fe.get(de)),H=(de,fe,Te)=>fe.has(de)?Dl("Cannot add the same private member more than once"):fe instanceof WeakSet?fe.add(de):fe.set(de,Te),U=(de,fe,Te,Ln)=>(Js(de,fe,"write to private field"),Ln?Ln.call(de,Te):fe.set(de,Te),Te),ke=(de,fe,Te)=>(Js(de,fe,"access private method"),Te);(function(){"use strict";var wl,kl,pr,sn,Ur,ln,on,an,hr,zt,fn,et,Xs,Zs,ei,ti,it,Nn,Ht,jr,tt,Gt,lt,Rt,er,Fr,_r,un,cn,vn,tr,Xn,xe,Nl,Tl,Al,ri,ss,is,ni,yl,It,Wt,ot,Br,Tn,An,Zn,rr,bt;typeof window<"u"&&((wl=window.__svelte??(window.__svelte={})).v??(wl.v=new Set)).add("5");let fe=!1,Te=!1;function Ln(){fe=!0}Ln();const Ml=1,Rl=2,si=4,Il=8,Ll=16,ql=1,Ol=2,Ul=4,jl=8,Fl=16,ii=1,Bl=2,li="[",ls="[!",oi="[?",os="]",Yr={},qe=Symbol(),ai="http://www.w3.org/1999/xhtml",as=!1;var fi=Array.isArray,Pl=Array.prototype.indexOf,Qr=Array.prototype.includes,qn=Array.from,On=Object.keys,Un=Object.defineProperty,br=Object.getOwnPropertyDescriptor,ui=Object.getOwnPropertyDescriptors,zl=Object.prototype,Hl=Array.prototype,fs=Object.getPrototypeOf,ci=Object.isExtensible;const Gl=()=>{};function Wl(e){return e()}function us(e){for(var t=0;t{e=s,t=i});return{promise:r,resolve:e,reject:t}}const Oe=2,Vr=4,wr=8,cs=1<<24,ir=16,St=32,lr=64,vs=128,ct=512,Re=1024,Ue=2048,vt=4096,We=8192,Ut=16384,kr=32768,Kr=65536,di=1<<17,Yl=1<<18,yr=1<<19,pi=1<<20,jt=1<<25,Er=65536,ds=1<<21,ps=1<<22,or=1<<23,$r=Symbol("$state"),hi=Symbol("legacy props"),Ql=Symbol(""),Cr=new class extends Error{constructor(){super(...arguments);we(this,"name","StaleReactionError");we(this,"message","The reaction that called `getAbortSignal()` was re-run or destroyed")}},Vl=!!((kl=globalThis.document)!=null&&kl.contentType)&&globalThis.document.contentType.includes("xml"),gn=3,mn=8;function _i(e){throw new Error("https://svelte.dev/e/lifecycle_outside_component")}function Kl(){throw new Error("https://svelte.dev/e/async_derived_orphan")}function Jl(e,t,r){throw new Error("https://svelte.dev/e/each_key_duplicate")}function Xl(e){throw new Error("https://svelte.dev/e/effect_in_teardown")}function Zl(){throw new Error("https://svelte.dev/e/effect_in_unowned_derived")}function eo(e){throw new Error("https://svelte.dev/e/effect_orphan")}function to(){throw new Error("https://svelte.dev/e/effect_update_depth_exceeded")}function ro(){throw new Error("https://svelte.dev/e/hydration_failed")}function no(e){throw new Error("https://svelte.dev/e/props_invalid_value")}function so(){throw new Error("https://svelte.dev/e/state_descriptors_fixed")}function io(){throw new Error("https://svelte.dev/e/state_prototype_fixed")}function lo(){throw new Error("https://svelte.dev/e/state_unsafe_mutation")}function oo(){throw new Error("https://svelte.dev/e/svelte_boundary_reset_onerror")}function jn(e){console.warn("https://svelte.dev/e/hydration_mismatch")}function ao(){console.warn("https://svelte.dev/e/svelte_boundary_reset_noop")}let X=!1;function Jt(e){X=e}let j;function Ye(e){if(e===null)throw jn(),Yr;return j=e}function Fn(){return Ye(Bt(j))}function y(e){if(X){if(Bt(j)!==null)throw jn(),Yr;j=e}}function ar(e=1){if(X){for(var t=e,r=j;t--;)r=Bt(r);j=r}}function Bn(e=!0){for(var t=0,r=j;;){if(r.nodeType===mn){var s=r.data;if(s===os){if(t===0)return r;t-=1}else(s===li||s===ls||s[0]==="["&&!isNaN(Number(s.slice(1))))&&(t+=1)}var i=Bt(r);e&&r.remove(),r=i}}function gi(e){if(!e||e.nodeType!==mn)throw jn(),Yr;return e.data}function mi(e){return e===this.v}function fo(e,t){return e!=e?t==t:e!==t||e!==null&&typeof e=="object"||typeof e=="function"}function xi(e){return!fo(e,this.v)}let pe=null;function Jr(e){pe=e}function hs(e,t=!1,r){pe={p:pe,i:!1,c:null,e:null,s:e,x:null,l:fe&&!t?{s:null,u:null,$:[]}:null}}function _s(e){var t=pe,r=t.e;if(r!==null){t.e=null;for(var s of r)Fi(s)}return e!==void 0&&(t.x=e),t.i=!0,pe=t.p,e??{}}function xn(){return!fe||pe!==null&&pe.l===null}let Sr=[];function bi(){var e=Sr;Sr=[],us(e)}function Xt(e){if(Sr.length===0&&!bn){var t=Sr;queueMicrotask(()=>{t===Sr&&bi()})}Sr.push(e)}function uo(){for(;Sr.length>0;)bi()}function wi(e){var t=V;if(t===null)return G.f|=or,e;if((t.f&kr)===0&&(t.f&Vr)===0)throw e;fr(e,t)}function fr(e,t){for(;t!==null;){if((t.f&vs)!==0){if((t.f&kr)===0)throw e;try{t.b.error(e);return}catch(r){e=r}}t=t.parent}throw e}const co=-7169;function me(e,t){e.f=e.f&co|t}function gs(e){(e.f&ct)!==0||e.deps===null?me(e,Re):me(e,vt)}function ki(e){if(e!==null)for(const t of e)(t.f&Oe)===0||(t.f&Er)===0||(t.f^=Er,ki(t.deps))}function yi(e,t,r){(e.f&Ue)!==0?t.add(e):(e.f&vt)!==0&&r.add(e),ki(e.deps),me(e,Re)}const Pn=new Set;let Z=null,je=null,Je=[],zn=null,bn=!1,Xr=null,vo=1;const Fs=class Fs{constructor(){H(this,et);we(this,"id",vo++);we(this,"current",new Map);we(this,"previous",new Map);H(this,pr,new Set);H(this,sn,new Set);H(this,Ur,0);H(this,ln,0);H(this,on,null);H(this,an,new Set);H(this,hr,new Set);H(this,zt,new Map);we(this,"is_fork",!1);H(this,fn,!1)}skip_effect(t){c(this,zt).has(t)||c(this,zt).set(t,{d:[],m:[]})}unskip_effect(t){var r=c(this,zt).get(t);if(r){c(this,zt).delete(t);for(var s of r.d)me(s,Ue),Ft(s);for(s of r.m)me(s,vt),Ft(s)}}process(t){var i;Je=[],this.apply();var r=Xr=[],s=[];for(const l of t)ke(this,et,Zs).call(this,l,r,s);if(Xr=null,ke(this,et,Xs).call(this)){ke(this,et,ei).call(this,s),ke(this,et,ei).call(this,r);for(const[l,o]of c(this,zt))Di(l,o)}else{Z=null;for(const l of c(this,pr))l(this);c(this,pr).clear(),c(this,Ur)===0&&ke(this,et,ti).call(this),$i(s),$i(r),c(this,an).clear(),c(this,hr).clear(),(i=c(this,on))==null||i.resolve()}je=null}capture(t,r){r!==qe&&!this.previous.has(t)&&this.previous.set(t,r),(t.f&or)===0&&(this.current.set(t,t.v),je==null||je.set(t,t.v))}activate(){Z=this,this.apply()}deactivate(){Z===this&&(Z=null,je=null)}flush(){var t;if(Je.length>0)Z=this,Ei();else if(c(this,Ur)===0&&!this.is_fork){for(const r of c(this,pr))r(this);c(this,pr).clear(),ke(this,et,ti).call(this),(t=c(this,on))==null||t.resolve()}this.deactivate()}discard(){for(const t of c(this,sn))t(this);c(this,sn).clear()}increment(t){U(this,Ur,c(this,Ur)+1),t&&U(this,ln,c(this,ln)+1)}decrement(t){U(this,Ur,c(this,Ur)-1),t&&U(this,ln,c(this,ln)-1),!c(this,fn)&&(U(this,fn,!0),Xt(()=>{U(this,fn,!1),ke(this,et,Xs).call(this)?Je.length>0&&this.flush():this.revive()}))}revive(){for(const t of c(this,an))c(this,hr).delete(t),me(t,Ue),Ft(t);for(const t of c(this,hr))me(t,vt),Ft(t);this.flush()}oncommit(t){c(this,pr).add(t)}ondiscard(t){c(this,sn).add(t)}settled(){return(c(this,on)??U(this,on,vi())).promise}static ensure(){if(Z===null){const t=Z=new Fs;Pn.add(Z),bn||Xt(()=>{Z===t&&t.flush()})}return Z}apply(){}};pr=new WeakMap,sn=new WeakMap,Ur=new WeakMap,ln=new WeakMap,on=new WeakMap,an=new WeakMap,hr=new WeakMap,zt=new WeakMap,fn=new WeakMap,et=new WeakSet,Xs=function(){return this.is_fork||c(this,ln)>0},Zs=function(t,r,s){t.f^=Re;for(var i=t.first;i!==null;){var l=i.f,o=(l&(St|lr))!==0,a=o&&(l&Re)!==0,f=(l&We)!==0,u=a||c(this,zt).has(i);if(!u&&i.fn!==null){o?f||(i.f^=Re):(l&Vr)!==0?r.push(i):(l&(wr|cs))!==0&&f?s.push(i):en(i)&&(Ir(i),(l&ir)!==0&&(c(this,hr).add(i),f&&me(i,Ue)));var d=i.first;if(d!==null){i=d;continue}}for(;i!==null;){var x=i.next;if(x!==null){i=x;break}i=i.parent}}},ei=function(t){for(var r=0;r1){this.previous.clear();var t=Z,r=je,s=!0;for(const o of Pn){if(o===this){s=!1;continue}const a=[];for(const[u,d]of this.current){if(o.current.has(u))if(s&&d!==o.current.get(u))o.current.set(u,d);else continue;a.push(u)}if(a.length===0)continue;const f=[...o.current.keys()].filter(u=>!this.current.has(u));if(f.length>0){var i=Je;Je=[];const u=new Set,d=new Map;for(const x of a)Ci(x,f,u,d);if(Je.length>0){Z=o,o.apply();for(const x of Je)ke(l=o,et,Zs).call(l,x,[],[]);o.deactivate()}Je=i}}Z=t,je=r}c(this,zt).clear(),Pn.delete(this)};let Zt=Fs;function wn(e){var t=bn;bn=!0;try{for(var r;;){if(uo(),Je.length===0&&(Z==null||Z.flush(),Je.length===0))return zn=null,r;Ei()}}finally{bn=t}}function Ei(){var e=null;try{for(var t=0;Je.length>0;){var r=Zt.ensure();if(t++>1e3){var s,i;po()}r.process(Je),ur.clear()}}finally{Je=[],zn=null,Xr=null}}function po(){try{to()}catch(e){fr(e,zn)}}let Dt=null;function $i(e){var t=e.length;if(t!==0){for(var r=0;r0)){ur.clear();for(const i of Dt){if((i.f&(Ut|We))!==0)continue;const l=[i];let o=i.parent;for(;o!==null;)Dt.has(o)&&(Dt.delete(o),l.push(o)),o=o.parent;for(let a=l.length-1;a>=0;a--){const f=l[a];(f.f&(Ut|We))===0&&Ir(f)}}Dt.clear()}}Dt=null}}function Ci(e,t,r,s){if(!r.has(e)&&(r.add(e),e.reactions!==null))for(const i of e.reactions){const l=i.f;(l&Oe)!==0?Ci(i,t,r,s):(l&(ps|ir))!==0&&(l&Ue)===0&&Si(i,t,s)&&(me(i,Ue),Ft(i))}}function Si(e,t,r){const s=r.get(e);if(s!==void 0)return s;if(e.deps!==null)for(const i of e.deps){if(Qr.call(t,i))return!0;if((i.f&Oe)!==0&&Si(i,t,r))return r.set(i,!0),!0}return r.set(e,!1),!1}function Ft(e){var t=zn=e,r=t.b;if(r!=null&&r.is_pending&&(e.f&(Vr|wr|cs))!==0&&(e.f&kr)===0){r.defer_effect(e);return}for(;t.parent!==null;){t=t.parent;var s=t.f;if(Xr!==null&&t===V&&(e.f&wr)===0)return;if((s&(lr|St))!==0){if((s&Re)===0)return;t.f^=Re}}Je.push(t)}function Di(e,t){if(!((e.f&St)!==0&&(e.f&Re)!==0)){(e.f&Ue)!==0?t.d.push(e):(e.f&vt)!==0&&t.m.push(e),me(e,Re);for(var r=e.first;r!==null;)Di(r,t),r=r.next}}function ho(e){let t=0,r=Nr(0),s;return()=>{$s()&&(n(r),En(()=>(t===0&&(s=v(()=>e(()=>yn(r)))),t+=1,()=>{Xt(()=>{t-=1,t===0&&(s==null||s(),s=void 0,yn(r))})})))}}var _o=Kr|yr;function go(e,t,r,s){new mo(e,t,r,s)}class mo{constructor(t,r,s,i){H(this,xe);we(this,"parent");we(this,"is_pending",!1);we(this,"transform_error");H(this,it);H(this,Nn,X?j:null);H(this,Ht);H(this,jr);H(this,tt);H(this,Gt,null);H(this,lt,null);H(this,Rt,null);H(this,er,null);H(this,Fr,0);H(this,_r,0);H(this,un,!1);H(this,cn,new Set);H(this,vn,new Set);H(this,tr,null);H(this,Xn,ho(()=>(U(this,tr,Nr(c(this,Fr))),()=>{U(this,tr,null)})));var l;U(this,it,t),U(this,Ht,r),U(this,jr,o=>{var a=V;a.b=this,a.f|=vs,s(o)}),this.parent=V.b,this.transform_error=i??((l=this.parent)==null?void 0:l.transform_error)??(o=>o),U(this,tt,Ss(()=>{if(X){const o=c(this,Nn);Fn();const a=o.data===ls;if(o.data.startsWith(oi)){const u=JSON.parse(o.data.slice(oi.length));ke(this,xe,Tl).call(this,u)}else a?ke(this,xe,Al).call(this):ke(this,xe,Nl).call(this)}else ke(this,xe,ri).call(this)},_o)),X&&U(this,it,j)}defer_effect(t){yi(t,c(this,cn),c(this,vn))}is_rendered(){return!this.is_pending&&(!this.parent||this.parent.is_rendered())}has_pending_snippet(){return!!c(this,Ht).pending}update_pending_count(t){ke(this,xe,ni).call(this,t),U(this,Fr,c(this,Fr)+t),!(!c(this,tr)||c(this,un))&&(U(this,un,!0),Xt(()=>{U(this,un,!1),c(this,tr)&&Zr(c(this,tr),c(this,Fr))}))}get_effect_pending(){return c(this,Xn).call(this),n(c(this,tr))}error(t){var r=c(this,Ht).onerror;let s=c(this,Ht).failed;if(!r&&!s)throw t;c(this,Gt)&&(Ve(c(this,Gt)),U(this,Gt,null)),c(this,lt)&&(Ve(c(this,lt)),U(this,lt,null)),c(this,Rt)&&(Ve(c(this,Rt)),U(this,Rt,null)),X&&(Ye(c(this,Nn)),ar(),Ye(Bn()));var i=!1,l=!1;const o=()=>{if(i){ao();return}i=!0,l&&oo(),c(this,Rt)!==null&&Ar(c(this,Rt),()=>{U(this,Rt,null)}),ke(this,xe,is).call(this,()=>{Zt.ensure(),ke(this,xe,ri).call(this)})},a=f=>{try{l=!0,r==null||r(f,o),l=!1}catch(u){fr(u,c(this,tt)&&c(this,tt).parent)}s&&U(this,Rt,ke(this,xe,is).call(this,()=>{Zt.ensure();try{return ht(()=>{var u=V;u.b=this,u.f|=vs,s(c(this,it),()=>f,()=>o)})}catch(u){return fr(u,c(this,tt).parent),null}}))};Xt(()=>{var f;try{f=this.transform_error(t)}catch(u){fr(u,c(this,tt)&&c(this,tt).parent);return}f!==null&&typeof f=="object"&&typeof f.then=="function"?f.then(a,u=>fr(u,c(this,tt)&&c(this,tt).parent)):a(f)})}}it=new WeakMap,Nn=new WeakMap,Ht=new WeakMap,jr=new WeakMap,tt=new WeakMap,Gt=new WeakMap,lt=new WeakMap,Rt=new WeakMap,er=new WeakMap,Fr=new WeakMap,_r=new WeakMap,un=new WeakMap,cn=new WeakMap,vn=new WeakMap,tr=new WeakMap,Xn=new WeakMap,xe=new WeakSet,Nl=function(){try{U(this,Gt,ht(()=>c(this,jr).call(this,c(this,it))))}catch(t){this.error(t)}},Tl=function(t){const r=c(this,Ht).failed;r&&U(this,Rt,ht(()=>{r(c(this,it),()=>t,()=>()=>{})}))},Al=function(){const t=c(this,Ht).pending;t&&(this.is_pending=!0,U(this,lt,ht(()=>t(c(this,it)))),Xt(()=>{var r=U(this,er,document.createDocumentFragment()),s=Xe();r.append(s),U(this,Gt,ke(this,xe,is).call(this,()=>(Zt.ensure(),ht(()=>c(this,jr).call(this,s))))),c(this,_r)===0&&(c(this,it).before(r),U(this,er,null),Ar(c(this,lt),()=>{U(this,lt,null)}),ke(this,xe,ss).call(this))}))},ri=function(){try{if(this.is_pending=this.has_pending_snippet(),U(this,_r,0),U(this,Fr,0),U(this,Gt,ht(()=>{c(this,jr).call(this,c(this,it))})),c(this,_r)>0){var t=U(this,er,document.createDocumentFragment());Ts(c(this,Gt),t);const r=c(this,Ht).pending;U(this,lt,ht(()=>r(c(this,it))))}else ke(this,xe,ss).call(this)}catch(r){this.error(r)}},ss=function(){this.is_pending=!1;for(const t of c(this,cn))me(t,Ue),Ft(t);for(const t of c(this,vn))me(t,vt),Ft(t);c(this,cn).clear(),c(this,vn).clear()},is=function(t){var r=V,s=G,i=pe;Pt(c(this,tt)),_t(c(this,tt)),Jr(c(this,tt).ctx);try{return t()}catch(l){return wi(l),null}finally{Pt(r),_t(s),Jr(i)}},ni=function(t){var r;if(!this.has_pending_snippet()){this.parent&&ke(r=this.parent,xe,ni).call(r,t);return}U(this,_r,c(this,_r)+t),c(this,_r)===0&&(ke(this,xe,ss).call(this),c(this,lt)&&Ar(c(this,lt),()=>{U(this,lt,null)}),c(this,er)&&(c(this,it).before(c(this,er)),U(this,er,null)))};function xo(e,t,r,s){const i=xn()?kn:Qe;var l=e.filter(x=>!x.settled);if(r.length===0&&l.length===0){s(t.map(i));return}var o=V,a=bo(),f=l.length===1?l[0].promise:l.length>1?Promise.all(l.map(x=>x.promise)):null;function u(x){a();try{s(x)}catch(k){(o.f&Ut)===0&&fr(k,o)}ms()}if(r.length===0){f.then(()=>u(t.map(i)));return}function d(){a(),Promise.all(r.map(x=>ko(x))).then(x=>u([...t.map(i),...x])).catch(x=>fr(x,o))}f?f.then(d):d()}function bo(){var e=V,t=G,r=pe,s=Z;return function(l=!0){Pt(e),_t(t),Jr(r),l&&(s==null||s.activate())}}function ms(e=!0){Pt(null),_t(null),Jr(null),e&&(Z==null||Z.deactivate())}function wo(){var e=V.b,t=Z,r=e.is_rendered();return e.update_pending_count(1),t.increment(r),()=>{e.update_pending_count(-1),t.decrement(r)}}function kn(e){var t=Oe|Ue,r=G!==null&&(G.f&Oe)!==0?G:null;return V!==null&&(V.f|=yr),{ctx:pe,deps:null,effects:null,equals:mi,f:t,fn:e,reactions:null,rv:0,v:qe,wv:0,parent:r??V,ac:null}}function ko(e,t,r){V===null&&Kl();var i=void 0,l=Nr(qe),o=!G,a=new Map;return Mo(()=>{var k;var f=vi();i=f.promise;try{Promise.resolve(e()).then(f.resolve,f.reject).finally(ms)}catch(S){f.reject(S),ms()}var u=Z;if(o){var d=wo();(k=a.get(u))==null||k.reject(Cr),a.delete(u),a.set(u,f)}const x=(S,E=void 0)=>{if(u.activate(),E)E!==Cr&&(l.f|=or,Zr(l,E));else{(l.f&or)!==0&&(l.f^=or),Zr(l,S);for(const[F,b]of a){if(a.delete(F),F===u)break;b.reject(Cr)}}d&&d()};f.promise.then(x,S=>x(null,S||"unknown"))}),ji(()=>{for(const f of a.values())f.reject(Cr)}),new Promise(f=>{function u(d){function x(){d===i?f(l):u(i)}d.then(x,x)}u(i)})}function Dr(e){const t=kn(e);return Yi(t),t}function Qe(e){const t=kn(e);return t.equals=xi,t}function yo(e){var t=e.effects;if(t!==null){e.effects=null;for(var r=0;r0&&!Ai&&Co()}return t}function Co(){Ai=!1;for(const e of bs)(e.f&Re)!==0&&me(e,vt),en(e)&&Ir(e);bs.clear()}function yn(e){Q(e,e.v+1)}function Mi(e,t){var r=e.reactions;if(r!==null)for(var s=xn(),i=r.length,l=0;l{if(Rr===l)return a();var f=G,u=Rr;_t(null),Vi(l);var d=a();return _t(f),Vi(u),d};return s&&r.set("length",Pe(e.length)),new Proxy(e,{defineProperty(a,f,u){(!("value"in u)||u.configurable===!1||u.enumerable===!1||u.writable===!1)&&so();var d=r.get(f);return d===void 0?o(()=>{var x=Pe(u.value);return r.set(f,x),x}):Q(d,u.value,!0),!0},deleteProperty(a,f){var u=r.get(f);if(u===void 0){if(f in a){const d=o(()=>Pe(qe));r.set(f,d),yn(i)}}else Q(u,qe),yn(i);return!0},get(a,f,u){var S;if(f===$r)return e;var d=r.get(f),x=f in a;if(d===void 0&&(!x||(S=br(a,f))!=null&&S.writable)&&(d=o(()=>{var E=Tr(x?a[f]:qe),F=Pe(E);return F}),r.set(f,d)),d!==void 0){var k=n(d);return k===qe?void 0:k}return Reflect.get(a,f,u)},getOwnPropertyDescriptor(a,f){var u=Reflect.getOwnPropertyDescriptor(a,f);if(u&&"value"in u){var d=r.get(f);d&&(u.value=n(d))}else if(u===void 0){var x=r.get(f),k=x==null?void 0:x.v;if(x!==void 0&&k!==qe)return{enumerable:!0,configurable:!0,value:k,writable:!0}}return u},has(a,f){var k;if(f===$r)return!0;var u=r.get(f),d=u!==void 0&&u.v!==qe||Reflect.has(a,f);if(u!==void 0||V!==null&&(!d||(k=br(a,f))!=null&&k.writable)){u===void 0&&(u=o(()=>{var S=d?Tr(a[f]):qe,E=Pe(S);return E}),r.set(f,u));var x=n(u);if(x===qe)return!1}return d},set(a,f,u,d){var ee;var x=r.get(f),k=f in a;if(s&&f==="length")for(var S=u;SPe(qe)),r.set(S+"",E))}if(x===void 0)(!k||(ee=br(a,f))!=null&&ee.writable)&&(x=o(()=>Pe(void 0)),Q(x,Tr(u)),r.set(f,x));else{k=x.v!==qe;var F=o(()=>Tr(u));Q(x,F)}var b=Reflect.getOwnPropertyDescriptor(a,f);if(b!=null&&b.set&&b.set.call(d,u),!k){if(s&&typeof f=="string"){var A=r.get("length"),ve=Number(f);Number.isInteger(ve)&&ve>=A.v&&Q(A,ve+1)}yn(i)}return!0},ownKeys(a){n(i);var f=Reflect.ownKeys(a).filter(x=>{var k=r.get(x);return k===void 0||k.v!==qe});for(var[u,d]of r)d.v!==qe&&!(u in a)&&f.push(u);return f},setPrototypeOf(){io()}})}var ws,Ri,Ii,Li;function ks(){if(ws===void 0){ws=window,Ri=/Firefox/.test(navigator.userAgent);var e=Element.prototype,t=Node.prototype,r=Text.prototype;Ii=br(t,"firstChild").get,Li=br(t,"nextSibling").get,ci(e)&&(e.__click=void 0,e.__className=void 0,e.__attributes=null,e.__style=void 0,e.__e=void 0),ci(r)&&(r.__t=void 0)}}function Xe(e=""){return document.createTextNode(e)}function dt(e){return Ii.call(e)}function Bt(e){return Li.call(e)}function $(e,t){if(!X)return dt(e);var r=dt(j);if(r===null)r=j.appendChild(Xe());else if(t&&r.nodeType!==gn){var s=Xe();return r==null||r.before(s),Ye(s),s}return t&&Hn(r),Ye(r),r}function pt(e,t=!1){if(!X){var r=dt(e);return r instanceof Comment&&r.data===""?Bt(r):r}if(t){if((j==null?void 0:j.nodeType)!==gn){var s=Xe();return j==null||j.before(s),Ye(s),s}Hn(j)}return j}function T(e,t=1,r=!1){let s=X?j:e;for(var i;t--;)i=s,s=Bt(s);if(!X)return s;if(r){if((s==null?void 0:s.nodeType)!==gn){var l=Xe();return s===null?i==null||i.after(l):s.before(l),Ye(l),l}Hn(s)}return Ye(s),s}function qi(e){e.textContent=""}function Oi(){return!1}function ys(e,t,r){return document.createElementNS(ai,e,void 0)}function Hn(e){if(e.nodeValue.length<65536)return;let t=e.nextSibling;for(;t!==null&&t.nodeType===gn;)t.remove(),e.nodeValue+=t.nodeValue,t=e.nextSibling}function Es(e){var t=G,r=V;_t(null),Pt(null);try{return e()}finally{_t(t),Pt(r)}}function Ui(e){V===null&&(G===null&&eo(),Zl()),cr&&Xl()}function So(e,t){var r=t.last;r===null?t.last=t.first=e:(r.next=e,e.prev=r,t.last=e)}function Tt(e,t){var r=V;r!==null&&(r.f&We)!==0&&(e|=We);var s={ctx:pe,deps:null,nodes:null,f:e|Ue|ct,first:null,fn:t,last:null,next:null,parent:r,b:r&&r.b,prev:null,teardown:null,wv:0,ac:null},i=s;if((e&Vr)!==0)Xr!==null?Xr.push(s):Ft(s);else if(t!==null){try{Ir(s)}catch(o){throw Ve(s),o}i.deps===null&&i.teardown===null&&i.nodes===null&&i.first===i.last&&(i.f&yr)===0&&(i=i.first,(e&ir)!==0&&(e&Kr)!==0&&i!==null&&(i.f|=Kr))}if(i!==null&&(i.parent=r,r!==null&&So(i,r),G!==null&&(G.f&Oe)!==0&&(e&lr)===0)){var l=G;(l.effects??(l.effects=[])).push(i)}return s}function $s(){return G!==null&&!At}function ji(e){const t=Tt(wr,null);return me(t,Re),t.teardown=e,t}function Gn(e){Ui();var t=V.f,r=!G&&(t&St)!==0&&(t&kr)===0;if(r){var s=pe;(s.e??(s.e=[])).push(e)}else return Fi(e)}function Fi(e){return Tt(Vr|pi,e)}function Do(e){return Ui(),Tt(wr|pi,e)}function No(e){Zt.ensure();const t=Tt(lr|yr,e);return()=>{Ve(t)}}function To(e){Zt.ensure();const t=Tt(lr|yr,e);return(r={})=>new Promise(s=>{r.outro?Ar(t,()=>{Ve(t),s(void 0)}):(Ve(t),s(void 0))})}function Bi(e){return Tt(Vr,e)}function Cs(e,t){var r=pe,s={effect:null,ran:!1,deps:e};r.l.$.push(s),s.effect=En(()=>{e(),!s.ran&&(s.ran=!0,v(t))})}function Ao(){var e=pe;En(()=>{for(var t of e.l.$){t.deps();var r=t.effect;(r.f&Re)!==0&&r.deps!==null&&me(r,vt),en(r)&&Ir(r),t.ran=!1}})}function Mo(e){return Tt(ps|yr,e)}function En(e,t=0){return Tt(wr|t,e)}function P(e,t=[],r=[],s=[]){xo(s,t,r,i=>{Tt(wr,()=>e(...i.map(n)))})}function Ss(e,t=0){var r=Tt(ir|t,e);return r}function ht(e){return Tt(St|yr,e)}function Pi(e){var t=e.teardown;if(t!==null){const r=cr,s=G;Wi(!0),_t(null);try{t.call(null)}finally{Wi(r),_t(s)}}}function Ds(e,t=!1){var r=e.first;for(e.first=e.last=null;r!==null;){const i=r.ac;i!==null&&Es(()=>{i.abort(Cr)});var s=r.next;(r.f&lr)!==0?r.parent=null:Ve(r,t),r=s}}function Ro(e){for(var t=e.first;t!==null;){var r=t.next;(t.f&St)===0&&Ve(t),t=r}}function Ve(e,t=!0){var r=!1;(t||(e.f&Yl)!==0)&&e.nodes!==null&&e.nodes.end!==null&&(Io(e.nodes.start,e.nodes.end),r=!0),Ds(e,t&&!r),$n(e,0),me(e,Ut);var s=e.nodes&&e.nodes.t;if(s!==null)for(const l of s)l.stop();Pi(e);var i=e.parent;i!==null&&i.first!==null&&zi(e),e.next=e.prev=e.teardown=e.ctx=e.deps=e.fn=e.nodes=e.ac=null}function Io(e,t){for(;e!==null;){var r=e===t?null:Bt(e);e.remove(),e=r}}function zi(e){var t=e.parent,r=e.prev,s=e.next;r!==null&&(r.next=s),s!==null&&(s.prev=r),t!==null&&(t.first===e&&(t.first=s),t.last===e&&(t.last=r))}function Ar(e,t,r=!0){var s=[];Hi(e,s,!0);var i=()=>{r&&Ve(e),t&&t()},l=s.length;if(l>0){var o=()=>--l||i();for(var a of s)a.out(o)}else i()}function Hi(e,t,r){if((e.f&We)===0){e.f^=We;var s=e.nodes&&e.nodes.t;if(s!==null)for(const a of s)(a.is_global||r)&&t.push(a);for(var i=e.first;i!==null;){var l=i.next,o=(i.f&Kr)!==0||(i.f&St)!==0&&(e.f&ir)!==0;Hi(i,t,o?r:!1),i=l}}}function Ns(e){Gi(e,!0)}function Gi(e,t){if((e.f&We)!==0){e.f^=We;for(var r=e.first;r!==null;){var s=r.next,i=(r.f&Kr)!==0||(r.f&St)!==0;Gi(r,i?t:!1),r=s}var l=e.nodes&&e.nodes.t;if(l!==null)for(const o of l)(o.is_global||t)&&o.in()}}function Ts(e,t){if(e.nodes)for(var r=e.nodes.start,s=e.nodes.end;r!==null;){var i=r===s?null:Bt(r);t.append(r),r=i}}let Wn=!1,cr=!1;function Wi(e){cr=e}let G=null,At=!1;function _t(e){G=e}let V=null;function Pt(e){V=e}let gt=null;function Yi(e){G!==null&&(gt===null?gt=[e]:gt.push(e))}let Ze=null,st=0,mt=null;function Lo(e){mt=e}let Qi=1,Mr=0,Rr=Mr;function Vi(e){Rr=e}function Ki(){return++Qi}function en(e){var t=e.f;if((t&Ue)!==0)return!0;if(t&Oe&&(e.f&=~Er),(t&vt)!==0){for(var r=e.deps,s=r.length,i=0;ie.wv)return!0}(t&ct)!==0&&je===null&&me(e,Re)}return!1}function Ji(e,t,r=!0){var s=e.reactions;if(s!==null&&!(gt!==null&&Qr.call(gt,e)))for(var i=0;i{e.ac.abort(Cr)}),e.ac=null);try{e.f|=ds;var d=e.fn,x=d();e.f|=kr;var k=e.deps,S=Z==null?void 0:Z.is_fork;if(Ze!==null){var E;if(S||$n(e,st),k!==null&&st>0)for(k.length=st+Ze.length,E=0;Er==null?void 0:r.call(this,l))}return e.startsWith("pointer")||e.startsWith("touch")||e==="wheel"?Xt(()=>{t.addEventListener(e,i,s)}):t.addEventListener(e,i,s),i}function Cn(e,t,r,s,i){var l={capture:s,passive:i},o=Uo(e,t,r,l);(t===document.body||t===window||t===document||t instanceof HTMLMediaElement)&&ji(()=>{t.removeEventListener(e,o,l)})}function Yn(e,t,r){(t[Lr]??(t[Lr]={}))[e]=r}function jo(e){for(var t=0;t{throw ve});throw k}}finally{e[Lr]=t,delete e.currentTarget,_t(d),Pt(x)}}}const Is=((yl=globalThis==null?void 0:globalThis.window)==null?void 0:yl.trustedTypes)&&globalThis.window.trustedTypes.createPolicy("svelte-trusted-html",{createHTML:e=>e});function Fo(e){return(Is==null?void 0:Is.createHTML(e))??e}function nl(e){var t=ys("template");return t.innerHTML=Fo(e.replaceAll("","")),t.content}function Mt(e,t){var r=V;r.nodes===null&&(r.nodes={start:e,end:t,a:null,t:null})}function N(e,t){var r=(t&ii)!==0,s=(t&Bl)!==0,i,l=!e.startsWith("");return()=>{if(X)return Mt(j,null),j;i===void 0&&(i=nl(l?e:""+e),r||(i=dt(i)));var o=s||Ri?document.importNode(i,!0):i.cloneNode(!0);if(r){var a=dt(o),f=o.lastChild;Mt(a,f)}else Mt(o,o);return o}}function Bo(e,t,r="svg"){var s=!e.startsWith(""),i=(t&ii)!==0,l=`<${r}>${s?e:""+e}`,o;return()=>{if(X)return Mt(j,null),j;if(!o){var a=nl(l),f=dt(a);if(i)for(o=document.createDocumentFragment();dt(f);)o.appendChild(dt(f));else o=dt(f)}var u=o.cloneNode(!0);if(i){var d=dt(u),x=u.lastChild;Mt(d,x)}else Mt(u,u);return u}}function Fe(e,t){return Bo(e,t,"svg")}function Po(e=""){if(!X){var t=Xe(e+"");return Mt(t,t),t}var r=j;return r.nodeType!==gn?(r.before(r=Xe()),Ye(r)):Hn(r),Mt(r,r),r}function tn(){if(X)return Mt(j,null),j;var e=document.createDocumentFragment(),t=document.createComment(""),r=Xe();return e.append(t,r),Mt(t,r),e}function w(e,t){if(X){var r=V;((r.f&kr)===0||r.nodes.end===null)&&(r.nodes.end=j),Fn();return}e!==null&&e.before(t)}const zo=["touchstart","touchmove"];function Ho(e){return zo.includes(e)}function K(e,t){var r=t==null?"":typeof t=="object"?`${t}`:t;r!==(e.__t??(e.__t=e.nodeValue))&&(e.__t=r,e.nodeValue=`${r}`)}function sl(e,t){return il(e,t)}function Go(e,t){ks(),t.intro=t.intro??!1;const r=t.target,s=X,i=j;try{for(var l=dt(r);l&&(l.nodeType!==mn||l.data!==li);)l=Bt(l);if(!l)throw Yr;Jt(!0),Ye(l);const o=il(e,{...t,anchor:l});return Jt(!1),o}catch(o){if(o instanceof Error&&o.message.split(` +`).some(a=>a.startsWith("https://svelte.dev/e/")))throw o;return o!==Yr&&console.warn("Failed to hydrate: ",o),t.recover===!1&&ro(),ks(),qi(r),Jt(!1),sl(e,t)}finally{Jt(s),Ye(i)}}const Qn=new Map;function il(e,{target:t,anchor:r,props:s={},events:i,context:l,intro:o=!0,transformError:a}){ks();var f=void 0,u=To(()=>{var d=r??t.appendChild(Xe());go(d,{pending:()=>{}},S=>{hs({});var E=pe;if(l&&(E.c=l),i&&(s.$$events=i),X&&Mt(S,null),f=e(S,s)||{},X&&(V.nodes.end=j,j===null||j.nodeType!==mn||j.data!==os))throw jn(),Yr;_s()},a);var x=new Set,k=S=>{for(var E=0;E{var b;for(var S of x)for(const A of[t,document]){var E=Qn.get(A),F=E.get(S);--F==0?(A.removeEventListener(S,Rs),E.delete(S),E.size===0&&Qn.delete(A)):E.set(S,F)}Ms.delete(k),d!==r&&((b=d.parentNode)==null||b.removeChild(d))}});return Ls.set(f,u),f}let Ls=new WeakMap;function Wo(e,t){const r=Ls.get(e);return r?(Ls.delete(e),r(t)):Promise.resolve()}class Yo{constructor(t,r=!0){we(this,"anchor");H(this,It,new Map);H(this,Wt,new Map);H(this,ot,new Map);H(this,Br,new Set);H(this,Tn,!0);H(this,An,t=>{if(c(this,It).has(t)){var r=c(this,It).get(t),s=c(this,Wt).get(r);if(s)Ns(s),c(this,Br).delete(r);else{var i=c(this,ot).get(r);i&&(i.effect.f&We)===0&&(c(this,Wt).set(r,i.effect),c(this,ot).delete(r),i.fragment.lastChild.remove(),this.anchor.before(i.fragment),s=i.effect)}for(const[l,o]of c(this,It)){if(c(this,It).delete(l),l===t)break;const a=c(this,ot).get(o);a&&(Ve(a.effect),c(this,ot).delete(o))}for(const[l,o]of c(this,Wt)){if(l===r||c(this,Br).has(l)||(o.f&We)!==0)continue;const a=()=>{if(Array.from(c(this,It).values()).includes(l)){var u=document.createDocumentFragment();Ts(o,u),u.append(Xe()),c(this,ot).set(l,{effect:o,fragment:u})}else Ve(o);c(this,Br).delete(l),c(this,Wt).delete(l)};c(this,Tn)||!s?(c(this,Br).add(l),Ar(o,a,!1)):a()}}});H(this,Zn,t=>{c(this,It).delete(t);const r=Array.from(c(this,It).values());for(const[s,i]of c(this,ot))r.includes(s)||(Ve(i.effect),c(this,ot).delete(s))});this.anchor=t,U(this,Tn,r)}ensure(t,r){var s=Z,i=Oi();if(r&&!c(this,Wt).has(t)&&!c(this,ot).has(t))if(i){var l=document.createDocumentFragment(),o=Xe();l.append(o),c(this,ot).set(t,{effect:ht(()=>r(o)),fragment:l})}else c(this,Wt).set(t,ht(()=>r(this.anchor)));if(c(this,It).set(s,t),i){for(const[a,f]of c(this,Wt))a===t?s.unskip_effect(f):s.skip_effect(f);for(const[a,f]of c(this,ot))a===t?s.unskip_effect(f.effect):s.skip_effect(f.effect);s.oncommit(c(this,An)),s.ondiscard(c(this,Zn))}else X&&(this.anchor=j),c(this,An).call(this,s)}}It=new WeakMap,Wt=new WeakMap,ot=new WeakMap,Br=new WeakMap,Tn=new WeakMap,An=new WeakMap,Zn=new WeakMap;function ll(e){pe===null&&_i(),fe&&pe.l!==null?Vo(pe).m.push(e):Gn(()=>{const t=v(e);if(typeof t=="function")return t})}function Qo(e){pe===null&&_i(),ll(()=>()=>v(e))}function Vo(e){var t=e.l;return t.u??(t.u={a:[],b:[],m:[]})}function ne(e,t,r=!1){var s;X&&(s=j,Fn());var i=new Yo(e),l=r?Kr:0;function o(a,f){if(X){var u=gi(s);if(a!==parseInt(u.substring(1))){var d=Bn();Ye(d),i.anchor=d,Jt(!1),i.ensure(a,f),Jt(!0);return}}i.ensure(a,f)}Ss(()=>{var a=!1;t((f,u=0)=>{a=!0,o(u,f)}),a||o(-1,null)},l)}function Vn(e,t){return t}function Ko(e,t,r){for(var s=[],i=t.length,l,o=t.length,a=0;a{if(l){if(l.pending.delete(x),l.done.add(x),l.pending.size===0){var k=e.outrogroups;qs(e,qn(l.done)),k.delete(l),k.size===0&&(e.outrogroups=null)}}else o-=1},!1)}if(o===0){var f=s.length===0&&r!==null;if(f){var u=r,d=u.parentNode;qi(d),d.append(u),e.items.clear()}qs(e,t,!f)}else l={pending:new Set(t),done:new Set},(e.outrogroups??(e.outrogroups=new Set)).add(l)}function qs(e,t,r=!0){var s;if(e.pending.size>0){s=new Set;for(const o of e.pending.values())for(const a of o)s.add(e.items.get(a).e)}for(var i=0;i{var ee=r();return fi(ee)?ee:ee==null?[]:qn(ee)}),k,S=new Map,E=!0;function F(ee){(ve.effect.f&Ut)===0&&(ve.pending.delete(ee),ve.fallback=d,Jo(ve,k,o,t,s),d!==null&&(k.length===0?(d.f&jt)===0?Ns(d):(d.f^=jt,Dn(d,null,o)):Ar(d,()=>{d=null})))}function b(ee){ve.pending.delete(ee)}var A=Ss(()=>{k=n(x);var ee=k.length;let W=!1;if(X){var ze=gi(o)===ls;ze!==(ee===0)&&(o=Bn(),Ye(o),Jt(!1),W=!0)}for(var Ae=new Set,Ie=Z,Lt=Oi(),wt=0;wtl(o)):(d=ht(()=>l(ol??(ol=Xe()))),d.f|=jt)),ee>Ae.size&&Jl(),X&&ee>0&&Ye(Bn()),!E)if(S.set(Ie,Ae),Lt){for(const[ie,Y]of a)Ae.has(ie)||Ie.skip_effect(Y.e);Ie.oncommit(F),Ie.ondiscard(b)}else F(Ie);W&&Jt(!0),n(x)}),ve={effect:A,items:a,pending:S,outrogroups:null,fallback:d};E=!1,X&&(o=j)}function Sn(e){for(;e!==null&&(e.f&St)===0;)e=e.next;return e}function Jo(e,t,r,s,i){var Yt,qt,M,ie,Y,le,Se,he,rt;var l=(s&Il)!==0,o=t.length,a=e.items,f=Sn(e.effect.first),u,d=null,x,k=[],S=[],E,F,b,A;if(l)for(A=0;A0){var wt=(s&si)!==0&&o===0?r:null;if(l){for(A=0;A{var He,gr;if(x!==void 0)for(b of x)(gr=(He=b.nodes)==null?void 0:He.a)==null||gr.apply()})}function Xo(e,t,r,s,i,l,o,a){var f=(o&Ml)!==0?(o&Ll)===0?Nt(r,!1,!1):Nr(r):null,u=(o&Rl)!==0?Nr(i):null;return{v:f,i:u,e:ht(()=>(l(t,f??r,u??i,a),()=>{e.delete(s)}))}}function Dn(e,t,r){if(e.nodes)for(var s=e.nodes.start,i=e.nodes.end,l=t&&(t.f&jt)===0?t.nodes.start:r;s!==null;){var o=Bt(s);if(l.before(s),s===i)return;s=o}}function vr(e,t,r){t===null?e.effect.first=r:t.next=r,r===null?e.effect.last=t:r.prev=t}function al(e,t){Bi(()=>{var r=e.getRootNode(),s=r.host?r:r.head??r.ownerDocument.head;if(!s.querySelector("#"+t.hash)){const i=ys("style");i.id=t.hash,i.textContent=t.code,s.appendChild(i)}})}const fl=[...` +\r\f \v\uFEFF`];function Zo(e,t,r){var s=e==null?"":""+e;if(t&&(s=s?s+" "+t:t),r){for(var i of Object.keys(r))if(r[i])s=s?s+" "+i:i;else if(s.length)for(var l=i.length,o=0;(o=s.indexOf(i,o))>=0;){var a=o+l;(o===0||fl.includes(s[o-1]))&&(a===s.length||fl.includes(s[a]))?s=(o===0?"":s.substring(0,o))+s.substring(a+1):o=a}}return s===""?null:s}function ea(e,t){return e==null?null:String(e)}function ge(e,t,r,s,i,l){var o=e.__className;if(X||o!==r||o===void 0){var a=Zo(r,s,l);(!X||a!==e.getAttribute("class"))&&(a==null?e.removeAttribute("class"):t?e.className=a:e.setAttribute("class",a)),e.__className=r}else if(l&&i!==l)for(var f in l){var u=!!l[f];(i==null||u!==!!i[f])&&e.classList.toggle(f,u)}return l}function qr(e,t,r,s){var i=e.__style;if(X||i!==t){var l=ea(t);(!X||l!==e.getAttribute("style"))&&(l==null?e.removeAttribute("style"):e.style.cssText=l),e.__style=t}return s}const ta=Symbol("is custom element"),ra=Symbol("is html"),na=Vl?"link":"LINK";function Or(e,t,r,s){var i=sa(e);X&&(i[t]=e.getAttribute(t),t==="src"||t==="srcset"||t==="href"&&e.nodeName===na)||i[t]!==(i[t]=r)&&(t==="loading"&&(e[Ql]=r),r==null?e.removeAttribute(t):typeof r!="string"&&ia(e).includes(t)?e[t]=r:e.setAttribute(t,r))}function sa(e){return e.__attributes??(e.__attributes={[ta]:e.nodeName.includes("-"),[ra]:e.namespaceURI===ai})}var ul=new Map;function ia(e){var t=e.getAttribute("is")||e.nodeName,r=ul.get(t);if(r)return r;ul.set(t,r=[]);for(var s,i=e,l=Element.prototype;l!==i;){s=ui(i);for(var o in s)s[o].set&&r.push(o);i=fs(i)}return r}function cl(e,t){return e===t||(e==null?void 0:e[$r])===t}function vl(e={},t,r,s){return Bi(()=>{var i,l;return En(()=>{i=l,l=[],v(()=>{e!==r(...l)&&(t(e,...l),i&&cl(r(...i),e)&&t(null,...i))})}),()=>{Xt(()=>{l&&cl(r(...l),e)&&t(null,...l)})}}),e}function la(e=!1){const t=pe,r=t.l.u;if(!r)return;let s=()=>h(t.s);if(e){let i=0,l={};const o=kn(()=>{let a=!1;const f=t.s;for(const u in f)f[u]!==l[u]&&(l[u]=f[u],a=!0);return a&&i++,i});s=()=>n(o)}r.b.length&&Do(()=>{dl(t,s),us(r.b)}),Gn(()=>{const i=v(()=>r.m.map(Wl));return()=>{for(const l of i)typeof l=="function"&&l()}}),r.a.length&&Gn(()=>{dl(t,s),us(r.a)})}function dl(e,t){if(e.l.s)for(const r of e.l.s)n(r);t()}let Kn=!1;function oa(e){var t=Kn;try{return Kn=!1,[e(),Kn]}finally{Kn=t}}function Os(e,t,r,s){var ee;var i=!fe||(r&Ol)!==0,l=(r&jl)!==0,o=(r&Fl)!==0,a=s,f=!0,u=()=>(f&&(f=!1,a=o?v(s):s),a),d;if(l){var x=$r in e||hi in e;d=((ee=br(e,t))==null?void 0:ee.set)??(x&&t in e?W=>e[t]=W:void 0)}var k,S=!1;l?[k,S]=oa(()=>e[t]):k=e[t],k===void 0&&s!==void 0&&(k=u(),d&&(i&&no(),d(k)));var E;if(i?E=()=>{var W=e[t];return W===void 0?u():(f=!0,W)}:E=()=>{var W=e[t];return W!==void 0&&(a=void 0),W===void 0?a:W},i&&(r&Ul)===0)return E;if(d){var F=e.$$legacy;return(function(W,ze){return arguments.length>0?((!i||!ze||F||S)&&d(ze?E():W),W):E()})}var b=!1,A=((r&ql)!==0?kn:Qe)(()=>(b=!1,E()));l&&n(A);var ve=V;return(function(W,ze){if(arguments.length>0){const Ae=ze?n(A):i&&l?Tr(W):W;return Q(A,Ae),b=!0,a!==void 0&&(a=Ae),W}return cr&&b||(ve.f&Ut)!==0?A.v:n(A)})}function aa(e){return new fa(e)}class fa{constructor(t){H(this,rr);H(this,bt);var l;var r=new Map,s=(o,a)=>{var f=Nt(a,!1,!1);return r.set(o,f),f};const i=new Proxy({...t.props||{},$$events:{}},{get(o,a){return n(r.get(a)??s(a,Reflect.get(o,a)))},has(o,a){return a===hi?!0:(n(r.get(a)??s(a,Reflect.get(o,a))),Reflect.has(o,a))},set(o,a,f){return Q(r.get(a)??s(a,f),f),Reflect.set(o,a,f)}});U(this,bt,(t.hydrate?Go:sl)(t.component,{target:t.target,anchor:t.anchor,props:i,context:t.context,intro:t.intro??!1,recover:t.recover,transformError:t.transformError})),(!((l=t==null?void 0:t.props)!=null&&l.$$host)||t.sync===!1)&&wn(),U(this,rr,i.$$events);for(const o of Object.keys(c(this,bt)))o==="$set"||o==="$destroy"||o==="$on"||Un(this,o,{get(){return c(this,bt)[o]},set(a){c(this,bt)[o]=a},enumerable:!0});c(this,bt).$set=o=>{Object.assign(i,o)},c(this,bt).$destroy=()=>{Wo(c(this,bt))}}$set(t){c(this,bt).$set(t)}$on(t,r){c(this,rr)[t]=c(this,rr)[t]||[];const s=(...i)=>r.call(this,...i);return c(this,rr)[t].push(s),()=>{c(this,rr)[t]=c(this,rr)[t].filter(i=>i!==s)}}$destroy(){c(this,bt).$destroy()}}rr=new WeakMap,bt=new WeakMap;let pl;typeof HTMLElement=="function"&&(pl=class extends HTMLElement{constructor(t,r,s){super();we(this,"$$ctor");we(this,"$$s");we(this,"$$c");we(this,"$$cn",!1);we(this,"$$d",{});we(this,"$$r",!1);we(this,"$$p_d",{});we(this,"$$l",{});we(this,"$$l_u",new Map);we(this,"$$me");we(this,"$$shadowRoot",null);this.$$ctor=t,this.$$s=r,s&&(this.$$shadowRoot=this.attachShadow(s))}addEventListener(t,r,s){if(this.$$l[t]=this.$$l[t]||[],this.$$l[t].push(r),this.$$c){const i=this.$$c.$on(t,r);this.$$l_u.set(r,i)}super.addEventListener(t,r,s)}removeEventListener(t,r,s){if(super.removeEventListener(t,r,s),this.$$c){const i=this.$$l_u.get(r);i&&(i(),this.$$l_u.delete(r))}}async connectedCallback(){if(this.$$cn=!0,!this.$$c){let t=function(i){return l=>{const o=ys("slot");i!=="default"&&(o.name=i),w(l,o)}};if(await Promise.resolve(),!this.$$cn||this.$$c)return;const r={},s=ua(this);for(const i of this.$$s)i in s&&(i==="default"&&!this.$$d.children?(this.$$d.children=t(i),r.default=!0):r[i]=t(i));for(const i of this.attributes){const l=this.$$g_p(i.name);l in this.$$d||(this.$$d[l]=Jn(l,i.value,this.$$p_d,"toProp"))}for(const i in this.$$p_d)!(i in this.$$d)&&this[i]!==void 0&&(this.$$d[i]=this[i],delete this[i]);this.$$c=aa({component:this.$$ctor,target:this.$$shadowRoot||this,props:{...this.$$d,$$slots:r,$$host:this}}),this.$$me=No(()=>{En(()=>{var i;this.$$r=!0;for(const l of On(this.$$c)){if(!((i=this.$$p_d[l])!=null&&i.reflect))continue;this.$$d[l]=this.$$c[l];const o=Jn(l,this.$$d[l],this.$$p_d,"toAttribute");o==null?this.removeAttribute(this.$$p_d[l].attribute||l):this.setAttribute(this.$$p_d[l].attribute||l,o)}this.$$r=!1})});for(const i in this.$$l)for(const l of this.$$l[i]){const o=this.$$c.$on(i,l);this.$$l_u.set(l,o)}this.$$l={}}}attributeChangedCallback(t,r,s){var i;this.$$r||(t=this.$$g_p(t),this.$$d[t]=Jn(t,s,this.$$p_d,"toProp"),(i=this.$$c)==null||i.$set({[t]:this.$$d[t]}))}disconnectedCallback(){this.$$cn=!1,Promise.resolve().then(()=>{!this.$$cn&&this.$$c&&(this.$$c.$destroy(),this.$$me(),this.$$c=void 0)})}$$g_p(t){return On(this.$$p_d).find(r=>this.$$p_d[r].attribute===t||!this.$$p_d[r].attribute&&r.toLowerCase()===t)||t}});function Jn(e,t,r,s){var l;const i=(l=r[e])==null?void 0:l.type;if(t=i==="Boolean"&&typeof t!="boolean"?t!=null:t,!s||!r[e])return t;if(s==="toAttribute")switch(i){case"Object":case"Array":return t==null?null:JSON.stringify(t);case"Boolean":return t?"":null;case"Number":return t??null;default:return t}else switch(i){case"Object":case"Array":return t&&JSON.parse(t);case"Boolean":return t;case"Number":return t!=null?+t:t;default:return t}}function ua(e){const t={};return e.childNodes.forEach(r=>{t[r.slot||"default"]=!0}),t}function hl(e,t,r,s,i,l){let o=class extends pl{constructor(){super(e,r,i),this.$$p_d=t}static get observedAttributes(){return On(t).map(a=>(t[a].attribute||a).toLowerCase())}};return On(t).forEach(a=>{Un(o.prototype,a,{get(){return this.$$c&&a in this.$$c?this.$$c[a]:this.$$d[a]},set(f){var x;f=Jn(a,f,t),this.$$d[a]=f;var u=this.$$c;if(u){var d=(x=br(u,a))==null?void 0:x.get;d?u[a]=f:u.$set({[a]:f})}}})}),s.forEach(a=>{Un(o.prototype,a,{get(){var f;return(f=this.$$c)==null?void 0:f[a]}})}),e.element=o,o}async function _l(e,t){const r=t?`/api/orgs/${e}/projects/${t}/timeline`:`/api/orgs/${e}/timeline`,s=await fetch(r,{credentials:"same-origin"});if(!s.ok)throw new Error(`Timeline fetch failed: ${s.status}`);return s.json()}function ca(e,t,r){const s=t?`/orgs/${e}/projects/${t}/events`:`/orgs/${e}/events`;let i=1e3,l=null,o=!1;function a(){if(!o){l=new EventSource(s),l.addEventListener("open",()=>{i=1e3});for(const f of["destination","release","artifact","pipeline"])l.addEventListener(f,u=>{try{const d=JSON.parse(u.data);r(f,d)}catch(d){console.warn(`[release-timeline] bad ${f} event:`,d)}});l.addEventListener("error",()=>{l.close(),o||(setTimeout(a,i),i=Math.min(i*2,3e4))})}}return a(),()=>{o=!0,l&&l.close()}}function gl(e){if(e<0&&(e=0),e<60)return`${e}s`;const t=Math.floor(e/60),r=e%60;return t<60?`${t}m ${r}s`:`${Math.floor(t/60)}h ${t%60}m`}function rn(e){if(!e)return"";const t=new Date(e),r=Date.now(),s=Math.floor((r-t.getTime())/1e3);return s<10?"just now":s<60?`${s}s ago`:s<3600?`${Math.floor(s/60)}m ago`:s<86400?`${Math.floor(s/3600)}h ago`:`${Math.floor(s/86400)}d ago`}const Us={prod:["#ec4899","#fce7f3"],production:["#ec4899","#fce7f3"],preprod:["#f97316","#ffedd5"],"pre-prod":["#f97316","#ffedd5"],staging:["#eab308","#fef9c3"],stage:["#eab308","#fef9c3"],dev:["#8b5cf6","#ede9fe"],development:["#8b5cf6","#ede9fe"],test:["#06b6d4","#cffafe"]},va=["#6b7280","#e5e7eb"];function da(e){const t=e.toLowerCase();if(Us[t])return Us[t];for(const[r,s]of Object.entries(Us))if(t.includes(r))return s;return va}function dr(e){const t=e.toLowerCase();return t.includes("prod")&&!t.includes("preprod")&&!t.includes("pre-prod")?{bg:"bg-pink-100 text-pink-800",dot:"bg-pink-500"}:t.includes("preprod")||t.includes("pre-prod")?{bg:"bg-orange-100 text-orange-800",dot:"bg-orange-500"}:t.includes("stag")?{bg:"bg-yellow-100 text-yellow-800",dot:"bg-yellow-500"}:t.includes("dev")?{bg:"bg-violet-100 text-violet-800",dot:"bg-violet-500"}:{bg:"bg-gray-100 text-gray-700",dot:"bg-gray-400"}}function ml(e){switch(e){case"SUCCEEDED":return"bg-green-500";case"RUNNING":return"bg-yellow-500";case"FAILED":return"bg-red-500";default:return null}}const js={SUCCEEDED:{label:"Deployed to",stageLabel:"Deployed to",color:"text-green-600",icon:"check-circle",iconColor:"text-green-500"},RUNNING:{label:"Deploying to",stageLabel:"Deploying to",color:"text-yellow-700",icon:"pulse",iconColor:"text-yellow-500"},ASSIGNED:{label:"Deploying to",stageLabel:"Deploying to",color:"text-yellow-700",icon:"pulse",iconColor:"text-yellow-500"},QUEUED:{label:"Queued for",stageLabel:"Queued for",color:"text-blue-600",icon:"clock",iconColor:"text-blue-400"},FAILED:{label:"Failed on",stageLabel:"Failed on",color:"text-red-600",icon:"x-circle",iconColor:"text-red-500"},TIMED_OUT:{label:"Timed out on",stageLabel:"Timed out on",color:"text-orange-600",icon:"clock",iconColor:"text-orange-500"},CANCELLED:{label:"Cancelled",stageLabel:"Cancelled",color:"text-gray-500",icon:"ban",iconColor:"text-gray-400"}};function nn(e){if(!e||e.length===0)return null;let t=!0,r=!1,s=!1,i=!1,l=!1,o=0;const a=e.length;for(const f of e)f.status==="SUCCEEDED"&&o++,f.status!=="SUCCEEDED"&&(t=!1),f.status==="FAILED"&&(r=!0),f.status==="RUNNING"&&(s=!0),f.status==="QUEUED"&&(l=!0),f.stage_type==="wait"&&f.status==="RUNNING"&&(i=!0);return t?{label:"Pipeline complete",color:"text-gray-600",icon:"check-circle",iconColor:"text-green-500",done:o,total:a}:r?{label:"Pipeline failed",color:"text-red-600",icon:"x-circle",iconColor:"text-red-500",done:o,total:a}:i?{label:"Waiting for time window",color:"text-yellow-700",icon:"clock",iconColor:"text-yellow-500",done:o,total:a}:s?{label:"Deploying to",color:"text-yellow-700",icon:"pulse",iconColor:"text-yellow-500",done:o,total:a}:l?{label:"Queued",color:"text-blue-600",icon:"clock",iconColor:"text-blue-400",done:o,total:a}:{label:"Pipeline pending",color:"text-gray-400",icon:"pending",iconColor:"text-gray-300",done:o,total:a}}function xl(e){switch(e){case"SUCCEEDED":return"Waited";case"RUNNING":return"Waiting";case"FAILED":return"Wait failed";case"CANCELLED":return"Wait cancelled";default:return"Wait"}}function bl(e){switch(e){case"SUCCEEDED":return"Deployed to";case"RUNNING":return"Deploying to";case"QUEUED":return"Queued for";case"FAILED":return"Failed on";case"TIMED_OUT":return"Timed out on";case"CANCELLED":return"Cancelled";default:return"Deploy to"}}var pa=N('

Loading releases...

'),ha=N('

'),_a=N('

No releases yet.

Create a release with forest release create

'),ga=N('
'),ma=N('
'),xa=N('
'),ba=N(" ",1),wa=N('
'),ka=N(' '),ya=N(' '),Ea=N(' '),$a=N(' '),Ca=N(' Deployed',1),Sa=N(' Queued',1),Da=Fe('',1),Na=N(''),Ta=Fe(''),Aa=Fe(''),Ma=Fe(''),Ra=Fe(''),Ia=N(" "),La=N(' ',1),qa=N(' Deployed',1),Oa=N(''),Ua=Fe(''),ja=Fe(''),Fa=N(" "),Ba=N(" ",1),Pa=N(' Pending',1),za=N('

'),Ha=N(' '),Ga=Fe(''),Wa=N(''),Ya=Fe(''),Qa=Fe(''),Va=Fe(''),Ka=N(" ",1),Ja=N(" "),Xa=N(' '),Za=N('
pipeline
'),ef=N('
'),tf=Fe(''),rf=N(''),nf=Fe(''),sf=Fe(''),lf=Fe(''),of=N('Deployed'),af=N('Deploying'),ff=N(' '),uf=N('Failed'),cf=N(''),vf=N('
'),df=N(''),pf=N(' '),hf=N(''),_f=N('
·
'),gf=N('
'),mf=N('
');const xf={hash:"svelte-4kxpm1",code:` + @keyframes svelte-4kxpm1-lane-pulse { + 0%, 100% { opacity: 0.6; } + 50% { opacity: 1; } + }.lane-pulse { + animation: svelte-4kxpm1-lane-pulse 2s ease-in-out infinite;}`};function bf(e,t){hs(t,!1),al(e,xf);const r=Nt(),s=Nt();let i=Os(t,"org",12,""),l=Os(t,"project",12,""),o=Nt([]),a=Nt([]),f=Nt(!0),u=Nt(null),d=Nt(null),x=Date.now(),k=null,S=Nt(null),E=Nt({});const F=20,b=4,A=12,ve=new Set(["QUEUED","RUNNING","ASSIGNED"]),ee=new Set(["SUCCEEDED"]);let W=null;function ze(){W||(W=setTimeout(()=>{W=null,Ie()},300))}async function Ae(){try{Q(u,null);const _=await _l(i(),l());Lt(_.timeline,_.lanes),Q(f,!1),he()}catch(_){Q(u,_.message),Q(f,!1)}}async function Ie(){try{const _=await _l(i(),l());Lt(_.timeline,_.lanes),he()}catch(_){console.warn("[release-timeline] refresh failed:",_)}}function Lt(_,m){const D=new Map;for(const z of n(o))z.kind==="release"&&z.release&&D.set(z.release.slug,z);const J=_.map(z=>{if(z.kind!=="release"||!z.release)return z;const te=D.get(z.release.slug);if(!te)return z;const L=te.release,R=z.release;return L.dest_envs===R.dest_envs&&L.has_pipeline===R.has_pipeline&&wt(L.pipeline_stages,R.pipeline_stages)&&Yt(L.destinations,R.destinations)?te:z});Q(o,J),Q(a,m)}function wt(_,m){if(_.length!==m.length)return!1;for(let D=0;D<_.length;D++)if(_[D].status!==m[D].status||_[D].started_at!==m[D].started_at||_[D].completed_at!==m[D].completed_at)return!1;return!0}function Yt(_,m){if(_.length!==m.length)return!1;for(let D=0;D<_.length;D++)if(_[D].status!==m[D].status||_[D].completed_at!==m[D].completed_at)return!1;return!0}function qt(_,m){_==="destination"&&m.action==="status_changed"?M(m):_==="release"?m.action==="created"?ze():(m.action==="status_changed"||m.action==="updated")&&ie(m):_==="artifact"&&(m.action==="created"||m.action==="updated")?ze():_==="pipeline"&&Y(m)}function M(_){var te,L,R;const m=(te=_.metadata)==null?void 0:te.status,D=((L=_.metadata)==null?void 0:L.destination_name)||_.resource_id,J=(R=_.metadata)==null?void 0:R.environment;if(!m||!D)return;let z=!1;Q(o,n(o).map(se=>{if(se.kind!=="release"||!se.release)return se;const De=se.release;if(De.destinations.findIndex(ue=>ue.name===D)===-1)return se;z=!0;const Le=De.destinations.map(ue=>ue.name===D?{...ue,status:m,...["SUCCEEDED","FAILED","TIMED_OUT","CANCELLED"].includes(m)?{completed_at:new Date().toISOString()}:{}}:ue),p=Le.map(ue=>`${ue.environment}:${ue.status||"PENDING"}`).join(","),Be=J?De.pipeline_stages.map(ue=>ue.stage_type==="deploy"&&ue.environment===J?{...ue,status:m==="ASSIGNED"?"RUNNING":m}:ue):De.pipeline_stages;return{...se,release:{...De,destinations:Le,dest_envs:p,pipeline_stages:Be}}})),z&&he()}function ie(_){var J,z;const m=(J=_.metadata)==null?void 0:J.status,D=(z=_.metadata)==null?void 0:z.environment;m&&D?M(_):ze()}function Y(_){var te,L,R;const m=(te=_.metadata)==null?void 0:te.status,D=(L=_.metadata)==null?void 0:L.environment,J=(R=_.metadata)==null?void 0:R.stage_type;if(!m){(_.action==="created"||_.action==="updated")&&ze();return}let z=!1;Q(o,n(o).map(se=>{if(se.kind!=="release"||!se.release)return se;const De=se.release;let Ke=!1;const Le=De.pipeline_stages.map(p=>D&&p.stage_type==="deploy"&&p.environment===D?(Ke=!0,{...p,status:m,...p.started_at?{}:{started_at:new Date().toISOString()}}):J==="wait"&&p.stage_type==="wait"?(Ke=!0,{...p,status:m}):p);return Ke?(z=!0,{...se,release:{...De,pipeline_stages:Le}}):se})),z&&he()}function le(_){return _?_.split(",").map(m=>m.trim()).filter(Boolean).map(m=>{const D=m.indexOf(":");return D===-1?{env:m,status:"SUCCEEDED"}:{env:m.slice(0,D),status:m.slice(D+1)}}):[]}let Se=null;function he(){Se||(Se=requestAnimationFrame(()=>{Se=null,Oo().then(rt)}))}function rt(){if(!n(S))return;const _=n(S).getBoundingClientRect();if(_.height===0)return;const m=_.height,D=Array.from(n(S).querySelectorAll("[data-release]")),J={};for(const z of n(a)){const te=z.name;let L=null,R=null,se=-1,De=-1;for(let Ee=0;Eexr.env===te))continue;const mr=(Ee.querySelector("[data-avatar]")||Ee).getBoundingClientRect();nr.push(mr.top+mr.height/2-_.top)}J[te]={solidH:p,hasHatch:Be,hatchTop:ue,hatchH:yt,isForward:Ot,dots:nr,color:da(te)}}Q(E,J)}const He=new Map;function gr(_,m){const D=`${_}|${m}`;let J=He.get(D);if(J)return J;const z=``;return J=`url("data:image/svg+xml,${encodeURIComponent(z)}")`,He.set(D,J),J}ll(()=>{Ae(),k=setInterval(()=>{x=Date.now()},1e4)}),Qo(()=>{n(d)&&n(d)(),k&&clearInterval(k),W&&clearTimeout(W),Se&&cancelAnimationFrame(Se)});function dn(){he()}function Bs(_,m,D){if(!_)return"";const J=new Date(_).getTime();if(isNaN(J))return"";if(m&&D!=="RUNNING"&&D!=="QUEUED"){const z=new Date(m).getTime();if(!isNaN(z))return gl(Math.floor((z-J)/1e3))}return gl(Math.floor((x-J)/1e3))}function Ps(_){var m;return _.kind==="release"&&_.release?`r:${_.release.slug}`:_.kind==="hidden"?`h:${_.count}:${((m=(_.releases||[])[0])==null?void 0:m.slug)||""}`:`u:${Math.random()}`}function zs(_,m){if(!_)return!1;switch(_.label){case"Pipeline complete":return m==="SUCCEEDED";case"Pipeline failed":return m==="FAILED"||m==="RUNNING"||m==="ASSIGNED";case"Deploying to":return m==="RUNNING"||m==="ASSIGNED";case"Queued":return m==="QUEUED";case"Waiting for time window":return m==="RUNNING"||m==="ASSIGNED";default:return m!=="PENDING"&&m!=="SUCCEEDED"}}Cs(()=>(n(f),n(u),h(i()),n(d),h(l())),()=>{!n(f)&&!n(u)&&i()&&!n(d)&&Q(d,ca(i(),l(),qt))}),Cs(()=>n(a),()=>{Q(r,n(a).length)}),Cs(()=>n(r),()=>{Q(s,n(r)*(F+b)+8)}),Ao();var Pr={get org(){return i()},set org(_){i(_),wn()},get project(){return l()},set project(_){l(_),wn()}};la();var es=tn();Cn("resize",ws,dn);var Hs=pt(es);{var be=_=>{var m=pa();w(_,m)},ye=_=>{var m=ha(),D=$(m),J=$(D,!0);y(D);var z=T(D,2);y(m),P(()=>K(J,n(u))),Cn("click",z,Ae),w(_,m)},kt=_=>{var m=_a();w(_,m)},zr=_=>{var m=mf(),D=$(m);xt(D,5,()=>n(a),te=>te.name,(te,L)=>{const R=Qe(()=>(n(E),n(L),v(()=>n(E)[n(L).name]))),se=Qe(()=>{const[p,Be]=(h(n(R)),n(L),v(()=>{var ue;return((ue=n(R))==null?void 0:ue.color)||[n(L).color,"#e5e7eb"]}));return{barColor:p,lightColor:Be}});var De=wa();qr(De,"width: 20px; margin-right: 4px; position: relative;");var Ke=$(De);{var Le=p=>{var Be=ba(),ue=pt(Be);{var yt=$e=>{var Ne=ga();P(Ce=>qr(Ne,`position: absolute; left: 0; width: 100%; top: ${h(n(R)),v(()=>n(R).hatchTop)??""}px; height: ${h(n(R)),v(()=>n(R).hatchH+(n(R).solidH>0?F/2:0))??""}px; background-image: ${Ce??""}; background-size: 8px 8px; background-repeat: repeat; border-radius: 9999px; z-index: 0;`),[()=>(h(n(R)),h(n(se).barColor),h(n(se).lightColor),v(()=>n(R).isForward?gr(n(se).barColor,n(se).lightColor):gr("#f59e0b","#fef3c7")))]),w($e,Ne)};ne(ue,$e=>{h(n(R)),v(()=>n(R).hasHatch)&&$e(yt)})}var Ot=T(ue,2);{var nr=$e=>{var Ne=ma();P(()=>qr(Ne,`position: absolute; bottom: 0; left: 0; width: 100%; height: ${h(n(R)),v(()=>n(R).solidH+(n(R).hasHatch?F/2:0))??""}px; background: ${n(se).barColor??""}; border-radius: 9999px; z-index: 1;`)),w($e,Ne)};ne(Ot,$e=>{h(n(R)),v(()=>n(R).solidH>0)&&$e(nr)})}var Ee=T(Ot,2);xt(Ee,1,()=>(h(n(R)),v(()=>n(R).dots)),Vn,($e,Ne)=>{var Ce=xa();P(()=>qr(Ce,`position: absolute; left: 50%; transform: translateX(-50%); top: ${n(Ne)-A/2}px; width: 12px; height: 12px; border-radius: 50%; background: #fff; border: 2px solid ${n(se).barColor??""}; z-index: 2;`)),w($e,Ce)}),w(p,Be)};ne(Ke,p=>{n(R)&&p(Le)})}y(De),w(te,De)}),y(D);var J=T(D,2);xt(J,5,()=>n(o),te=>Ps(te),(te,L)=>{var R=tn(),se=pt(R);{var De=Le=>{const p=Qe(()=>(n(L),v(()=>n(L).release)));var Be=df(),ue=$(Be),yt=$(ue),Ot=T($(yt),2),nr=$(Ot,!0);y(Ot),y(yt);var Ee=T(yt,2),$e=$(Ee);{var Ne=I=>{var g=ka(),oe=T($(g));y(g),P(()=>K(oe,` ${h(n(p)),v(()=>n(p).branch)??""}`)),w(I,g)};ne($e,I=>{h(n(p)),v(()=>n(p).branch)&&I(Ne)})}var Ce=T($e,2);{var mr=I=>{var g=ya(),oe=$(g,!0);y(g),P(C=>K(oe,C),[()=>(h(n(p)),v(()=>n(p).commit_sha.slice(0,7)))]),w(I,g)};ne(Ce,I=>{h(n(p)),v(()=>n(p).commit_sha)&&I(mr)})}var xr=T(Ce,2),Mn=$(xr,!0);y(xr);var pn=T(xr,2);{var Gs=I=>{var g=Ea(),oe=T($(g),2),C=$(oe,!0);y(oe),y(g),P(()=>{Or(oe,"href",`/users/${h(n(p)),v(()=>n(p).source_user)??""}`),K(C,(h(n(p)),v(()=>n(p).source_user)))}),w(I,g)};ne(pn,I=>{h(n(p)),v(()=>n(p).source_user)&&I(Gs)})}var ts=T(pn,2);{var rs=I=>{var g=$a(),oe=$(g,!0);y(g),P(()=>{Or(g,"href",`/orgs/${i()??""}/projects/${h(n(p)),v(()=>n(p).project_name)??""}`),K(oe,(h(n(p)),v(()=>n(p).project_name)))}),w(I,g)};ne(ts,I=>{h(n(p)),h(l()),v(()=>n(p).project_name&&n(p).project_name!==l())&&I(rs)})}y(Ee),y(ue);var Rn=T(ue,2),hn=$(Rn),Ws=$(hn);{var Hr=I=>{const g=Qe(()=>(h(n(p)),v(()=>n(p).env_groups&&n(p).env_groups.length>0&&n(p).env_groups.every(_e=>_e.status==="SUCCEEDED"))));var oe=Da(),C=T(pt(oe));{var at=_e=>{var nt=Ca();ar(2),w(_e,nt)},ft=_e=>{var nt=Sa();ar(2),w(_e,nt)};ne(C,_e=>{n(g)?_e(at):_e(ft,-1)})}w(I,oe)},In=Dr(()=>(h(n(p)),h(nn),v(()=>n(p).has_pipeline&&!nn(n(p).pipeline_stages)))),Ys=I=>{const g=Qe(()=>(h(nn),h(n(p)),v(()=>nn(n(p).pipeline_stages))));var oe=La(),C=T(pt(oe),2);{var at=ae=>{var B=Na();w(ae,B)},ft=ae=>{var B=Ta();P(()=>ge(B,0,`w-4 h-4 ${h(n(g)),v(()=>n(g).iconColor)??""} shrink-0`,"svelte-4kxpm1")),w(ae,B)},_e=ae=>{var B=Aa();P(()=>ge(B,0,`w-4 h-4 ${h(n(g)),v(()=>n(g).iconColor)??""} shrink-0`,"svelte-4kxpm1")),w(ae,B)},nt=ae=>{var B=Ma();P(()=>ge(B,0,`w-4 h-4 ${h(n(g)),v(()=>n(g).iconColor)??""} shrink-0`,"svelte-4kxpm1")),w(ae,B)},Gr=ae=>{var B=Ra();w(ae,B)};ne(C,ae=>{h(n(g)),v(()=>n(g).icon==="pulse")?ae(at):(h(n(g)),v(()=>n(g).icon==="check-circle")?ae(ft,1):(h(n(g)),v(()=>n(g).icon==="x-circle")?ae(_e,2):(h(n(g)),v(()=>n(g).icon==="clock")?ae(nt,3):ae(Gr,-1))))})}var Qt=T(C,2),Et=$(Qt,!0);y(Qt);var $t=T(Qt,2);xt($t,1,()=>(h(n(p)),v(()=>n(p).pipeline_stages)),ae=>ae.id||ae.environment||ae.stage_type,(ae,B)=>{var Kt=tn(),re=pt(Kt);{var ce=Ge=>{const ut=Qe(()=>(h(dr),n(B),v(()=>dr(n(B).environment||"")))),sr=Qe(()=>(h(ml),n(B),h(n(ut)),v(()=>ml(n(B).status)||n(ut).dot)));var q=Ia(),O=$(q),Me=T(O);y(q),P(()=>{ge(q,1,`inline-flex items-center gap-1 text-xs font-medium px-2 py-0.5 rounded-full ${h(n(ut)),v(()=>n(ut).bg)??""}`,"svelte-4kxpm1"),K(O,`${n(B),v(()=>n(B).environment)??""} `),ge(Me,1,`w-1.5 h-1.5 rounded-full ${n(sr)??""}`,"svelte-4kxpm1")}),w(Ge,q)},Ct=Dr(()=>(n(B),h(n(g)),v(()=>n(B).stage_type==="deploy"&&zs(n(g),n(B).status))));ne(re,Ge=>{n(Ct)&&Ge(ce)})}w(ae,Kt)});var Vt=T($t,2),Wr=$(Vt);y(Vt),P(()=>{ge(Qt,1,`${h(n(g)),v(()=>n(g).color)??""} text-sm`,"svelte-4kxpm1"),K(Et,(h(n(g)),v(()=>n(g).label))),K(Wr,`${h(n(g)),v(()=>n(g).done)??""}/${h(n(g)),v(()=>n(g).total)??""}`)}),w(I,oe)},Qs=Dr(()=>(h(n(p)),h(nn),v(()=>n(p).has_pipeline&&nn(n(p).pipeline_stages)))),Lf=I=>{const g=Qe(()=>(h(n(p)),v(()=>n(p).env_groups.every(_e=>_e.status==="SUCCEEDED"))));var oe=tn(),C=pt(oe);{var at=_e=>{var nt=qa();ar(2),w(_e,nt)},ft=_e=>{var nt=tn(),Gr=pt(nt);xt(Gr,1,()=>(h(n(p)),v(()=>n(p).env_groups)),Vn,(Qt,Et)=>{var $t=tn(),Vt=pt($t);{var Wr=ae=>{const B=Qe(()=>(h(js),n(Et),v(()=>js[n(Et).status]||js.SUCCEEDED)));var Kt=Ba(),re=pt(Kt);{var ce=O=>{var Me=Oa();w(O,Me)},Ct=O=>{var Me=Ua();P(()=>ge(Me,0,`w-4 h-4 ${h(n(B)),v(()=>n(B).iconColor)??""} shrink-0`,"svelte-4kxpm1")),w(O,Me)},Ge=O=>{var Me=ja();P(()=>ge(Me,0,`w-4 h-4 ${h(n(B)),v(()=>n(B).iconColor)??""} shrink-0`,"svelte-4kxpm1")),w(O,Me)};ne(re,O=>{h(n(B)),v(()=>n(B).icon==="pulse")?O(ce):(h(n(B)),v(()=>n(B).icon==="check-circle")?O(Ct,1):O(Ge,-1))})}var ut=T(re,2),sr=$(ut,!0);y(ut);var q=T(ut,2);xt(q,1,()=>(n(Et),v(()=>n(Et).envs)),O=>O,(O,Me)=>{const _n=Qe(()=>(h(dr),n(Me),v(()=>dr(n(Me)))));var ns=Fa(),Sl=$(ns),zf=T(Sl);y(ns),P(()=>{ge(ns,1,`inline-flex items-center gap-1 text-xs font-medium px-2 py-0.5 rounded-full ${h(n(_n)),v(()=>n(_n).bg)??""}`,"svelte-4kxpm1"),K(Sl,`${n(Me)??""} `),ge(zf,1,`w-1.5 h-1.5 rounded-full ${h(n(_n)),v(()=>n(_n).dot)??""}`,"svelte-4kxpm1")}),w(O,ns)}),P(()=>{ge(ut,1,`${h(n(B)),v(()=>n(B).color)??""} text-sm`,"svelte-4kxpm1"),K(sr,(h(n(B)),v(()=>n(B).label)))}),w(ae,Kt)};ne(Vt,ae=>{n(Et),v(()=>n(Et).status!=="SUCCEEDED")&&ae(Wr)})}w(Qt,$t)}),w(_e,nt)};ne(C,_e=>{n(g)?_e(at):_e(ft,-1)})}w(I,oe)},qf=I=>{var g=Pa();ar(2),w(I,g)};ne(Ws,I=>{n(In)?I(Hr):n(Qs)?I(Ys,1):(h(n(p)),v(()=>n(p).env_groups&&n(p).env_groups.length>0)?I(Lf,2):I(qf,-1))})}ar(2),y(hn);var Vs=T(hn,2),El=$(Vs);{var Of=I=>{var g=za(),oe=$(g,!0);y(g),P(()=>K(oe,(h(n(p)),v(()=>n(p).description)))),w(I,g)};ne(El,I=>{h(n(p)),v(()=>n(p).description)&&I(Of)})}var $l=T(El,2),Ks=$($l),Uf=$(Ks,!0);y(Ks);var jf=T(Ks,2);{var Ff=I=>{var g=Ha(),oe=$(g,!0);y(g),P(()=>K(oe,(h(n(p)),v(()=>n(p).version)))),w(I,g)};ne(jf,I=>{h(n(p)),v(()=>n(p).version)&&I(Ff)})}y($l),y(Vs);var Cl=T(Vs,2);{var Bf=I=>{var g=ef();xt(g,7,()=>(h(n(p)),v(()=>n(p).pipeline_stages)),(oe,C)=>oe.id||`${oe.stage_type}-${oe.environment}-${C}`,(oe,C,at)=>{var ft=Za(),_e=$(ft);{var nt=re=>{var ce=Ga();w(re,ce)},Gr=re=>{var ce=Wa();w(re,ce)},Qt=re=>{var ce=Ya();w(re,ce)},Et=re=>{var ce=Qa();w(re,ce)},$t=re=>{var ce=Va();w(re,ce)};ne(_e,re=>{n(C),v(()=>n(C).status==="SUCCEEDED")?re(nt):(n(C),v(()=>n(C).status==="RUNNING")?re(Gr,1):(n(C),v(()=>n(C).status==="QUEUED")?re(Qt,2):(n(C),v(()=>n(C).status==="FAILED")?re(Et,3):re($t,-1))))})}var Vt=T(_e,2);{var Wr=re=>{const ce=Qe(()=>(h(dr),n(C),v(()=>dr(n(C).environment||""))));var Ct=Ka(),Ge=pt(Ct),ut=$(Ge,!0);y(Ge);var sr=T(Ge,2),q=$(sr),O=T(q);y(sr),P(Me=>{ge(Ge,1,`text-sm ${n(C),v(()=>n(C).status==="SUCCEEDED"?"text-gray-700":n(C).status==="RUNNING"?"text-yellow-700":n(C).status==="FAILED"?"text-red-700":"text-gray-400")??""}`,"svelte-4kxpm1"),K(ut,Me),ge(sr,1,`inline-flex items-center gap-1 text-xs font-medium px-2 py-0.5 rounded-full ${h(n(ce)),v(()=>n(ce).bg)??""}`,"svelte-4kxpm1"),K(q,`${n(C),v(()=>n(C).environment)??""} `),ge(O,1,`w-1.5 h-1.5 rounded-full ${h(n(ce)),v(()=>n(ce).dot)??""}`,"svelte-4kxpm1")},[()=>(h(bl),n(C),v(()=>bl(n(C).status)))]),w(re,Ct)},ae=re=>{var ce=Ja(),Ct=$(ce);y(ce),P(Ge=>{ge(ce,1,`text-sm ${n(C),v(()=>n(C).status==="SUCCEEDED"?"text-gray-700":n(C).status==="RUNNING"?"text-yellow-700":"text-gray-400")??""}`,"svelte-4kxpm1"),K(Ct,`${Ge??""} ${n(C),v(()=>n(C).duration_seconds)??""}s`)},[()=>(h(xl),n(C),v(()=>xl(n(C).status)))]),w(re,ce)};ne(Vt,re=>{n(C),v(()=>n(C).stage_type==="deploy")?re(Wr):(n(C),v(()=>n(C).stage_type==="wait")&&re(ae,1))})}var B=T(Vt,2);{var Kt=re=>{var ce=Xa(),Ct=$(ce,!0);y(ce),P(Ge=>K(Ct,Ge),[()=>(n(C),v(()=>Bs(n(C).started_at,n(C).completed_at,n(C).status)))]),w(re,ce)};ne(B,re=>{n(C),v(()=>n(C).started_at&&(n(C).status==="RUNNING"||n(C).status==="QUEUED"||n(C).completed_at))&&re(Kt)})}ar(2),y(ft),P(()=>ge(ft,1,`px-4 py-2.5 flex items-center gap-3 text-sm ${h(n(at)),h(n(p)),v(()=>n(at)n(C).status==="PENDING"?"opacity-50":"")??""}`,"svelte-4kxpm1")),w(oe,ft)}),y(g),w(I,g)};ne(Cl,I=>{h(n(p)),v(()=>n(p).has_pipeline)&&I(Bf)})}var Pf=T(Cl,2);xt(Pf,3,()=>(h(n(p)),v(()=>n(p).destinations)),I=>I.name,(I,g,oe)=>{const C=Qe(()=>(h(dr),n(g),v(()=>dr(n(g).environment||""))));var at=vf(),ft=$(at);{var _e=q=>{var O=tf();w(q,O)},nt=q=>{var O=rf();w(q,O)},Gr=q=>{var O=nf();w(q,O)},Qt=q=>{var O=sf();w(q,O)},Et=q=>{var O=lf();w(q,O)};ne(ft,q=>{n(g),v(()=>n(g).status==="SUCCEEDED")?q(_e):(n(g),v(()=>n(g).status==="RUNNING"||n(g).status==="ASSIGNED")?q(nt,1):(n(g),v(()=>n(g).status==="QUEUED")?q(Gr,2):(n(g),v(()=>n(g).status==="FAILED")?q(Qt,3):q(Et,-1))))})}var $t=T(ft,2),Vt=$($t),Wr=T(Vt);y($t);var ae=T($t,2),B=$(ae,!0);y(ae);var Kt=T(ae,2);{var re=q=>{var O=of();w(q,O)},ce=q=>{var O=af();w(q,O)},Ct=q=>{var O=ff(),Me=$(O);y(O),P(()=>K(Me,`Queued${n(g),v(()=>n(g).queue_position?` #${n(g).queue_position}`:"")??""}`)),w(q,O)},Ge=q=>{var O=uf();w(q,O)};ne(Kt,q=>{n(g),v(()=>n(g).status==="SUCCEEDED")?q(re):(n(g),v(()=>n(g).status==="RUNNING")?q(ce,1):(n(g),v(()=>n(g).status==="QUEUED")?q(Ct,2):(n(g),v(()=>n(g).status==="FAILED")&&q(Ge,3))))})}var ut=T(Kt,2);{var sr=q=>{var O=cf(),Me=$(O,!0);y(O),P(_n=>K(Me,_n),[()=>(h(rn),n(g),v(()=>rn(n(g).completed_at)))]),w(q,O)};ne(ut,q=>{n(g),v(()=>n(g).completed_at)&&q(sr)})}y(at),P(()=>{ge(at,1,`px-4 py-2 flex items-center gap-3 text-sm ${h(n(oe)),h(n(p)),v(()=>n(oe)n(C).bg)??""}`,"svelte-4kxpm1"),K(Vt,`${n(g),v(()=>n(g).environment)??""} `),ge(Wr,1,`w-1.5 h-1.5 rounded-full ${h(n(C)),v(()=>n(C).dot)??""}`,"svelte-4kxpm1"),K(B,(n(g),v(()=>n(g).name)))}),w(I,at)}),y(Rn),y(Be),P(I=>{Or(Be,"data-envs",(h(n(p)),v(()=>n(p).dest_envs))),Or(Ot,"href",`/orgs/${i()??""}/projects/${h(n(p)),h(l()),v(()=>n(p).project_name||l())??""}/releases/${h(n(p)),v(()=>n(p).slug)??""}`),K(nr,(h(n(p)),v(()=>n(p).title))),K(Mn,I),K(Uf,(h(n(p)),v(()=>n(p).slug)))},[()=>(h(rn),h(n(p)),v(()=>rn(n(p).created_at)))]),Cn("toggle",Rn,he),w(Le,Be)},Ke=Le=>{var p=_f(),Be=$(p),ue=T($(Be)),yt=T(ue,3),Ot=$(yt);y(yt);var nr=T(yt,2),Ee=$(nr);y(nr),y(Be);var $e=T(Be,2);xt($e,5,()=>(n(L),v(()=>n(L).releases||[])),Ne=>Ne.slug,(Ne,Ce)=>{var mr=hf(),xr=$(mr),Mn=$(xr),pn=T($(Mn),2),Gs=$(pn,!0);y(pn),y(Mn);var ts=T(Mn,2),rs=$(ts);{var Rn=Hr=>{var In=pf(),Ys=$(In,!0);y(In),P(Qs=>K(Ys,Qs),[()=>(n(Ce),v(()=>n(Ce).commit_sha.slice(0,7)))]),w(Hr,In)};ne(rs,Hr=>{n(Ce),v(()=>n(Ce).commit_sha)&&Hr(Rn)})}var hn=T(rs,2),Ws=$(hn,!0);y(hn),y(ts),y(xr),y(mr),P(Hr=>{Or(pn,"href",`/orgs/${i()??""}/projects/${n(Ce),h(l()),v(()=>n(Ce).project_name||l())??""}/releases/${n(Ce),v(()=>n(Ce).slug)??""}`),K(Gs,(n(Ce),v(()=>n(Ce).title))),K(Ws,Hr)},[()=>(h(rn),n(Ce),v(()=>rn(n(Ce).created_at)))]),w(Ne,mr)}),y($e),y(p),P(()=>{K(ue,` ${n(L),v(()=>n(L).count)??""} hidden commit${n(L),v(()=>n(L).count!==1?"s":"")??""} `),K(Ot,`Show commit${n(L),v(()=>n(L).count!==1?"s":"")??""}`),K(Ee,`Hide commit${n(L),v(()=>n(L).count!==1?"s":"")??""}`)}),Cn("toggle",p,he),w(Le,p)};ne(se,Le=>{n(L),v(()=>n(L).kind==="release"&&n(L).release)?Le(De):(n(L),v(()=>n(L).kind==="hidden")&&Le(Ke,1))})}w(te,R)}),y(J),vl(J,te=>Q(S,te),()=>n(S));var z=T(J,2);xt(z,5,()=>n(a),te=>te.name,(te,L)=>{var R=gf();qr(R,"width: 20px; margin-right: 4px; display: flex; justify-content: center;");var se=$(R),De=$(se,!0);y(se),y(R),P(()=>{qr(se,`writing-mode: vertical-rl; transform: rotate(180deg); font-size: 10px; font-weight: 500; color: ${n(L),v(()=>n(L).color)??""}; white-space: nowrap;`),K(De,(n(L),v(()=>n(L).name)))}),w(te,R)}),y(z),y(m),P(()=>qr(m,`grid-template-columns: ${n(s)??""}px 1fr; grid-template-rows: 1fr auto;`)),w(_,m)};ne(Hs,_=>{n(f)?_(be):n(u)?_(ye,1):(n(o),v(()=>n(o).length===0)?_(kt,2):_(zr,-1))})}return w(e,es),_s(Pr)}customElements.define("release-timeline",hl(bf,{org:{},project:{}},[],[]));var wf=N(' Waiting for logs…',1),kf=N('
'),yf=N('
No logs recorded for this release.
'),Ef=N(''),$f=N(' Live'),Cf=Fe(''),Sf=Fe(''),Df=N(' '),Nf=N('
'),Tf=N(''),Af=N('
',1),Mf=N("
");const Rf={hash:"svelte-qvn6bd",code:`.logs-root.svelte-qvn6bd {position:relative;border:1px solid #e5e7eb;border-radius:0.5rem;overflow:hidden;font-family:ui-monospace, SFMono-Regular, "SF Mono", Menlo, Consolas, monospace;font-size:0.8125rem;line-height:1.625;background:#111827;color:#d1d5db;}.logs-empty.svelte-qvn6bd {padding:2rem;text-align:center;color:#6b7280;font-family:system-ui, -apple-system, sans-serif;font-size:0.875rem;display:flex;align-items:center;justify-content:center;gap:0.5rem;}.logs-header.svelte-qvn6bd {display:flex;align-items:center;background:#1f2937;border-bottom:1px solid #374151;}.logs-tabs.svelte-qvn6bd {display:flex;gap:0;overflow-x:auto;flex:1;min-width:0;}.logs-tab.svelte-qvn6bd {padding:0.5rem 1rem;font-size:0.75rem;font-family:system-ui, -apple-system, sans-serif;color:#9ca3af;background:transparent;border:none;border-bottom:2px solid transparent;cursor:pointer;white-space:nowrap;display:flex;align-items:center;gap:0.375rem;transition:color 0.15s, border-color 0.15s;}.logs-tab.svelte-qvn6bd:hover {color:#e5e7eb;}.logs-tab.active.svelte-qvn6bd {color:#f9fafb;border-bottom-color:#3b82f6;}.logs-count.svelte-qvn6bd {font-size:0.625rem;padding:0.0625rem 0.375rem;border-radius:9999px;background:#374151;color:#9ca3af;}.logs-controls.svelte-qvn6bd {display:flex;align-items:center;gap:0.25rem;padding:0 0.5rem;flex-shrink:0;}.logs-ctrl-btn.svelte-qvn6bd {display:flex;align-items:center;justify-content:center;width:1.75rem;height:1.75rem;border-radius:0.25rem;border:none;background:transparent;color:#6b7280;cursor:pointer;transition:color 0.15s, background 0.15s;}.logs-ctrl-btn.svelte-qvn6bd:hover {color:#d1d5db;background:#374151;}.logs-ctrl-btn.active.svelte-qvn6bd {color:#93c5fd;background:#1e3a5f;}.logs-live.svelte-qvn6bd {display:flex;align-items:center;gap:0.375rem;font-family:system-ui, -apple-system, sans-serif;font-size:0.6875rem;color:#34d399;text-transform:uppercase;letter-spacing:0.05em;padding-right:0.5rem;}.logs-dot.svelte-qvn6bd {width:0.5rem;height:0.5rem;border-radius:9999px;background:#34d399;display:inline-block; + animation: svelte-qvn6bd-pulse 2s ease-in-out infinite;} + + @keyframes svelte-qvn6bd-pulse { + 0%, + 100% { + opacity: 1; + } + 50% { + opacity: 0.4; + } + }.logs-output.svelte-qvn6bd {max-height:60vh;overflow-y:auto;padding:0.25rem 0;}.logs-root.expanded.svelte-qvn6bd .logs-output:where(.svelte-qvn6bd) {max-height:85vh;}.logs-output.svelte-qvn6bd::-webkit-scrollbar {width:0.5rem;}.logs-output.svelte-qvn6bd::-webkit-scrollbar-track {background:#1f2937;}.logs-output.svelte-qvn6bd::-webkit-scrollbar-thumb {background:#4b5563;border-radius:0.25rem;}.logs-line.svelte-qvn6bd {display:flex;padding:0 1rem 0 0;gap:0;min-height:1.5rem;}.logs-line.svelte-qvn6bd:hover {background:rgba(255, 255, 255, 0.04);}.logs-line.stderr.svelte-qvn6bd {color:#fca5a5;background:rgba(239, 68, 68, 0.06);}.logs-line.stderr.svelte-qvn6bd:hover {background:rgba(239, 68, 68, 0.1);}.logs-line.status-line.svelte-qvn6bd {color:#93c5fd;font-weight:600;padding-top:0.375rem;padding-bottom:0.375rem;border-top:1px solid #1e3a5f;margin-top:0.25rem;}.logs-ts.svelte-qvn6bd {color:#4b5563;white-space:nowrap;user-select:none;flex-shrink:0;width:3.5rem;text-align:right;padding-right:1rem;padding-left:0.75rem;border-right:1px solid #1f2937;margin-right:0.75rem;}.logs-text.svelte-qvn6bd {white-space:pre-wrap;word-break:break-all;flex:1;min-width:0;padding-left:1rem;}.logs-line.svelte-qvn6bd .logs-ts:where(.svelte-qvn6bd) + .logs-text:where(.svelte-qvn6bd) {padding-left:0;}.logs-scroll-btn.svelte-qvn6bd {position:absolute;bottom:0.75rem;left:50%;transform:translateX(-50%);padding:0.25rem 0.75rem;font-size:0.6875rem;font-family:system-ui, -apple-system, sans-serif;color:#d1d5db;background:#374151;border:1px solid #4b5563;border-radius:9999px;cursor:pointer;opacity:0.9;transition:opacity 0.15s;}.logs-scroll-btn.svelte-qvn6bd:hover {opacity:1;background:#4b5563;}`};function If(e,t){hs(t,!0),al(e,Rf);let r=Os(t,"url",7,""),s=Pe(Tr({})),i=Pe(null),l=Pe(!1),o=Pe(!1),a=Pe(!0),f=Pe(!0),u=Pe(!1),d=Pe(null),x=Dr(()=>Object.keys(n(s)).sort()),k=Dr(()=>n(i)&&n(s)[n(i)]?n(s)[n(i)]:[]);function S(){if(!r())return;const M=new EventSource(r());return Q(l,!0),M.addEventListener("log",ie=>{try{const Y=JSON.parse(ie.data),le=Y.destination||"unknown";n(s)[le]||(n(s)[le]=[],n(i)||Q(i,le,!0)),n(s)[le]=[...n(s)[le],{line:Y.line,timestamp:Y.timestamp,channel:Y.channel||"stdout"}],n(a)&&requestAnimationFrame(()=>{n(d)&&(n(d).scrollTop=n(d).scrollHeight)})}catch(Y){console.warn("[release-logs] bad log event:",Y)}}),M.addEventListener("status",ie=>{try{const Y=JSON.parse(ie.data),le=Y.destination||"unknown";n(s)[le]||(n(s)[le]=[],n(i)||Q(i,le,!0)),n(s)[le]=[...n(s)[le],{line:`── ${Y.status} ──`,timestamp:"",channel:"status"}]}catch{}}),M.addEventListener("done",()=>{Q(o,!0)}),M.addEventListener("error",()=>{Q(l,!1),M.close()}),()=>{M.close(),Q(l,!1)}}Gn(()=>{if(r())return S()});function E(){if(!n(d))return;const M=n(d).scrollHeight-n(d).scrollTop-n(d).clientHeight<40;Q(a,M)}function F(){n(d)&&(n(d).scrollTop=n(d).scrollHeight,Q(a,!0))}function b(M){if(!M)return null;const ie=Number(M);if(Number.isFinite(ie)&&ie>1e12)return ie;const Y=new Date(M);return isNaN(Y.getTime())?null:Y.getTime()}function A(M,ie){const Y=b(M);if(Y===null||ie===null)return"";const le=Y-ie;if(le<0)return"0s";const Se=Math.floor(le/1e3);if(Se<60)return`${Se}s`;const he=Math.floor(Se/60),rt=Se%60;return`${he}m${String(rt).padStart(2,"0")}s`}let ve=Dr(()=>{const M={};for(const[ie,Y]of Object.entries(n(s)))for(const le of Y)if(le.timestamp){M[ie]=b(le.timestamp);break}return M}),ee=Dr(()=>n(i)?n(ve)[n(i)]??null:null);function W(M){const ie=b(M);if(ie===null)return"";const Y=new Date(ie),le=String(Y.getHours()).padStart(2,"0"),Se=String(Y.getMinutes()).padStart(2,"0"),he=String(Y.getSeconds()).padStart(2,"0"),rt=String(Y.getMilliseconds()).padStart(3,"0");return`${le}:${Se}:${he}.${rt}`}var ze={get url(){return r()},set url(M=""){r(M),wn()}},Ae=Mf();let Ie;var Lt=$(Ae);{var wt=M=>{var ie=kf(),Y=$(ie);{var le=he=>{var rt=wf();ar(),w(he,rt)},Se=he=>{var rt=Po("No logs available");w(he,rt)};ne(Y,he=>{n(l)?he(le):he(Se,-1)})}y(ie),w(M,ie)},Yt=M=>{var ie=yf();w(M,ie)},qt=M=>{var ie=Af(),Y=pt(ie),le=$(Y);xt(le,21,()=>n(x),Vn,(be,ye)=>{var kt=Ef();let zr;var _=$(kt),m=T(_),D=$(m,!0);y(m),y(kt),P(()=>{var J;zr=ge(kt,1,"logs-tab svelte-qvn6bd",null,zr,{active:n(i)===n(ye)}),K(_,`${n(ye)??""} `),K(D,((J=n(s)[n(ye)])==null?void 0:J.length)||0)}),Yn("click",kt,()=>Q(i,n(ye),!0)),w(be,kt)}),y(le);var Se=T(le,2),he=$(Se);{var rt=be=>{var ye=$f();w(be,ye)};ne(he,be=>{n(l)&&!n(o)&&be(rt)})}var He=T(he,2);let gr;var dn=T(He,2),Bs=$(dn);{var Ps=be=>{var ye=Cf();w(be,ye)},zs=be=>{var ye=Sf();w(be,ye)};ne(Bs,be=>{n(u)?be(Ps):be(zs,-1)})}y(dn),y(Se),y(Y);var Pr=T(Y,2);xt(Pr,21,()=>n(k),Vn,(be,ye)=>{var kt=Nf();let zr;var _=$(kt);{var m=z=>{var te=Df(),L=$(te,!0);y(te),P((R,se)=>{Or(te,"title",R),K(L,se)},[()=>W(n(ye).timestamp),()=>A(n(ye).timestamp,n(ee))]),w(z,te)};ne(_,z=>{n(f)&&z(m)})}var D=T(_,2),J=$(D,!0);y(D),y(kt),P(()=>{zr=ge(kt,1,"logs-line svelte-qvn6bd",null,zr,{stderr:n(ye).channel==="stderr","status-line":n(ye).channel==="status"}),K(J,n(ye).line)}),w(be,kt)}),y(Pr),vl(Pr,be=>Q(d,be),()=>n(d));var es=T(Pr,2);{var Hs=be=>{var ye=Tf();Yn("click",ye,F),w(be,ye)};ne(es,be=>{n(a)||be(Hs)})}P(()=>{gr=ge(He,1,"logs-ctrl-btn svelte-qvn6bd",null,gr,{active:n(f)}),Or(dn,"title",n(u)?"Collapse":"Expand")}),Yn("click",He,()=>Q(f,!n(f))),Yn("click",dn,()=>Q(u,!n(u))),Cn("scroll",Pr,E),w(M,ie)};ne(Lt,M=>{n(x).length===0&&!n(o)?M(wt):n(x).length===0&&n(o)?M(Yt,1):M(qt,-1)})}return y(Ae),P(()=>Ie=ge(Ae,1,"logs-root svelte-qvn6bd",null,Ie,{expanded:n(u)})),w(e,Ae),_s(ze)}jo(["click"]),customElements.define("release-logs",hl(If,{url:{}},[],[],{mode:"open"}))})(); diff --git a/static/js/details-persist.js b/static/js/details-persist.js new file mode 100644 index 0000000..c77b752 --- /dev/null +++ b/static/js/details-persist.js @@ -0,0 +1,38 @@ +/** + * Persists the open/closed state of
elements inside [data-release] + * cards across page reloads using sessionStorage. + * + * Key format: `details::` + */ +(function () { + const prefix = "details:" + location.pathname + ":"; + + // Restore open state on load + document.querySelectorAll("[data-release][data-release-slug]").forEach((card) => { + const slug = card.dataset.releaseSlug; + const details = card.querySelector("details"); + if (!details || !slug) return; + + if (sessionStorage.getItem(prefix + slug) === "1") { + details.open = true; + } + }); + + // Listen for toggle events (works for both open and close) + document.addEventListener("toggle", (e) => { + const details = e.target; + if (details.tagName !== "DETAILS") return; + + const card = details.closest("[data-release][data-release-slug]"); + if (!card) return; + + const slug = card.dataset.releaseSlug; + if (!slug) return; + + if (details.open) { + sessionStorage.setItem(prefix + slug, "1"); + } else { + sessionStorage.removeItem(prefix + slug); + } + }, true); +})(); diff --git a/static/js/live-events.js b/static/js/live-events.js new file mode 100644 index 0000000..cff2ef9 --- /dev/null +++ b/static/js/live-events.js @@ -0,0 +1,701 @@ +/** + * Live event updates via SSE. + * + * Connects to the project events endpoint and updates the deployment UI + * in real-time when destination statuses change. + * + * Usage: + */ +(function () { + const script = document.currentScript; + const org = script?.dataset.org; + const project = script?.dataset.project; + if (!org || !project) return; + + const url = `/orgs/${org}/projects/${project}/events`; + let lastSequence = 0; + let retryDelay = 1000; + + function connect() { + const es = new EventSource(url); + + es.addEventListener("open", () => { + retryDelay = 1000; + }); + + // destination status_changed events update inline badges + es.addEventListener("destination", (e) => { + try { + const data = JSON.parse(e.data); + lastSequence = Math.max(lastSequence, data.sequence || 0); + handleDestinationEvent(data); + } catch (err) { + console.warn("[live-events] bad destination event:", err); + } + }); + + // release events + es.addEventListener("release", (e) => { + try { + const data = JSON.parse(e.data); + lastSequence = Math.max(lastSequence, data.sequence || 0); + if (data.action === "created") { + window.location.reload(); + } else if ( + data.action === "status_changed" || + data.action === "updated" + ) { + handleReleaseEvent(data); + } + } catch (err) { + console.warn("[live-events] bad release event:", err); + } + }); + + // artifact events -> reload to show new artifacts + es.addEventListener("artifact", (e) => { + try { + const data = JSON.parse(e.data); + if (data.action === "created" || data.action === "updated") { + window.location.reload(); + } + } catch (err) { + console.warn("[live-events] bad artifact event:", err); + } + }); + + // pipeline events (pipeline run progress) + es.addEventListener("pipeline", (e) => { + try { + const data = JSON.parse(e.data); + lastSequence = Math.max(lastSequence, data.sequence || 0); + handlePipelineEvent(data); + } catch (err) { + console.warn("[live-events] bad pipeline event:", err); + } + }); + + es.addEventListener("error", () => { + es.close(); + // Reconnect with exponential backoff + setTimeout(connect, retryDelay); + retryDelay = Math.min(retryDelay * 2, 30000); + }); + } + + // ── Status update helpers ────────────────────────────────────────── + + const STATUS_CONFIG = { + SUCCEEDED: { + icon: "check-circle", + iconColor: "text-green-500", + label: "Deployed", + labelColor: "text-green-600", + summaryIcon: "check-circle", + summaryColor: "text-green-500", + summaryLabel: "Deployed to", + summaryLabelColor: "text-gray-600", + }, + RUNNING: { + icon: "pulse", + iconColor: "text-yellow-500", + label: "Deploying", + labelColor: "text-yellow-600", + summaryIcon: "pulse", + summaryColor: "text-yellow-500", + summaryLabel: "Deploying to", + summaryLabelColor: "text-yellow-700", + }, + ASSIGNED: { + icon: "pulse", + iconColor: "text-yellow-500", + label: "Assigned", + labelColor: "text-yellow-600", + summaryIcon: "pulse", + summaryColor: "text-yellow-500", + summaryLabel: "Deploying to", + summaryLabelColor: "text-yellow-700", + }, + QUEUED: { + icon: "clock", + iconColor: "text-blue-400", + label: "Queued", + labelColor: "text-blue-600", + summaryIcon: "clock", + summaryColor: "text-blue-400", + summaryLabel: "Queued for", + summaryLabelColor: "text-blue-600", + }, + FAILED: { + icon: "x-circle", + iconColor: "text-red-500", + label: "Failed", + labelColor: "text-red-600", + summaryIcon: "x-circle", + summaryColor: "text-red-500", + summaryLabel: "Failed on", + summaryLabelColor: "text-red-600", + }, + TIMED_OUT: { + icon: "clock", + iconColor: "text-orange-500", + label: "Timed out", + labelColor: "text-orange-600", + summaryIcon: "clock", + summaryColor: "text-orange-500", + summaryLabel: "Timed out on", + summaryLabelColor: "text-orange-600", + }, + CANCELLED: { + icon: "ban", + iconColor: "text-gray-400", + label: "Cancelled", + labelColor: "text-gray-500", + summaryIcon: "ban", + summaryColor: "text-gray-400", + summaryLabel: "Cancelled", + summaryLabelColor: "text-gray-500", + }, + }; + + function makeStatusIcon(type, colorClass) { + if (type === "pulse") { + const span = document.createElement("span"); + span.className = "w-4 h-4 shrink-0 flex items-center justify-center"; + span.innerHTML = + ''; + return span; + } + const svg = document.createElementNS("http://www.w3.org/2000/svg", "svg"); + svg.setAttribute("class", `w-4 h-4 ${colorClass} shrink-0`); + svg.setAttribute("fill", "none"); + svg.setAttribute("stroke", "currentColor"); + svg.setAttribute("viewBox", "0 0 24 24"); + const path = document.createElementNS( + "http://www.w3.org/2000/svg", + "path" + ); + path.setAttribute("stroke-linecap", "round"); + path.setAttribute("stroke-linejoin", "round"); + path.setAttribute("stroke-width", "2"); + const paths = { + "check-circle": "M9 12l2 2 4-4m6 2a9 9 0 11-18 0 9 9 0 0118 0z", + "x-circle": + "M10 14l2-2m0 0l2-2m-2 2l-2-2m2 2l2 2m7-2a9 9 0 11-18 0 9 9 0 0118 0z", + clock: "M12 8v4l3 3m6-3a9 9 0 11-18 0 9 9 0 0118 0z", + ban: "M18.364 18.364A9 9 0 005.636 5.636m12.728 12.728A9 9 0 015.636 5.636m12.728 12.728L5.636 5.636", + }; + path.setAttribute("d", paths[type] || paths["check-circle"]); + svg.appendChild(path); + return svg; + } + + function handleDestinationEvent(data) { + if (data.action !== "status_changed") return; + const status = data.metadata?.status; + const destName = data.metadata?.destination_name || data.resource_id; + const env = data.metadata?.environment; + if (!status || !destName) return; + + const config = STATUS_CONFIG[status]; + if (!config) return; + + // Find all destination rows that match + document + .querySelectorAll("[data-release] details .px-4.py-2") + .forEach((row) => { + const nameSpan = row.querySelector(".text-gray-400.text-xs"); + if (!nameSpan || nameSpan.textContent.trim() !== destName) return; + + // Update the status icon (first child element) + const oldIcon = row.firstElementChild; + if (oldIcon) { + const newIcon = makeStatusIcon(config.icon, config.iconColor); + row.replaceChild(newIcon, oldIcon); + } + + // Update the status label text + const labels = row.querySelectorAll("span[class*='text-xs text-']"); + labels.forEach((label) => { + const text = label.textContent.trim(); + if ( + [ + "Deployed", + "Deploying", + "Assigned", + "Queued", + "Failed", + "Timed out", + "Cancelled", + ].some((s) => text.startsWith(s)) + ) { + label.textContent = config.label; + // Reset classes + label.className = `text-xs ${config.labelColor}`; + } + }); + }); + + // Update pipeline stage rows that match this environment + if (env) { + updatePipelineStages(env, status, config); + } + + // Also update the summary line for the parent release card + updateReleaseSummary(data); + } + + function updatePipelineStages(env, status, config) { + document + .querySelectorAll( + `[data-pipeline-stage][data-stage-type="deploy"][data-stage-env="${env}"]` + ) + .forEach((row) => { + // Update data attributes + row.dataset.stageStatus = status; + + // Set started_at if transitioning to an active state and not already set + if ( + (status === "RUNNING" || status === "QUEUED") && + !row.dataset.startedAt + ) { + row.dataset.startedAt = new Date().toISOString(); + } + // Set completed_at when reaching a terminal state + if ( + ["SUCCEEDED", "FAILED", "TIMED_OUT", "CANCELLED"].includes(status) && + !row.dataset.completedAt + ) { + row.dataset.completedAt = new Date().toISOString(); + } + + // Ensure elapsed span exists for active stages + if ( + (status === "RUNNING" || status === "QUEUED") && + !row.querySelector("[data-elapsed]") + ) { + const pipelineLabel = row.querySelector("span.ml-auto"); + if (pipelineLabel) { + const el = document.createElement("span"); + el.className = "text-xs text-gray-400 tabular-nums"; + el.dataset.elapsed = ""; + pipelineLabel.before(el); + } + } + + // Toggle opacity for pending vs active + if (status === "PENDING") { + row.classList.add("opacity-50"); + } else { + row.classList.remove("opacity-50"); + } + + // Replace status icon (first child element) + const oldIcon = row.firstElementChild; + if (oldIcon) { + const newIcon = makeStatusIcon(config.icon, config.iconColor); + row.replaceChild(newIcon, oldIcon); + } + + // Update the status text span (e.g. "Deploying to" -> "Deployed to") + const textSpan = row.querySelector("span.text-sm"); + if (textSpan) { + const labels = { + SUCCEEDED: "Deployed to", + RUNNING: "Deploying to", + QUEUED: "Queued for", + FAILED: "Failed on", + TIMED_OUT: "Timed out on", + CANCELLED: "Cancelled", + }; + if (labels[status]) textSpan.textContent = labels[status]; + // Update text color + const colors = { + SUCCEEDED: "text-gray-700", + RUNNING: "text-yellow-700", + QUEUED: "text-blue-600", + FAILED: "text-red-700", + TIMED_OUT: "text-orange-600", + CANCELLED: "text-gray-500", + }; + textSpan.className = `text-sm ${colors[status] || "text-gray-600"}`; + } + + // Update the env badge dot color + const badge = row.querySelector( + "span.inline-flex span.rounded-full:last-child" + ); + if (badge) { + const dotColors = { + SUCCEEDED: "bg-green-500", + RUNNING: "bg-yellow-500", + FAILED: "bg-red-500", + }; + if (dotColors[status]) { + badge.className = `w-1.5 h-1.5 rounded-full ${dotColors[status]}`; + } + } + }); + } + + function updateReleaseSummary(_data) { + // Re-compute summaries by scanning pipeline stage rows or destination rows. + document.querySelectorAll("[data-release]").forEach((card) => { + const summary = card.querySelector("details > summary"); + if (!summary) return; + + const pipelineStages = card.querySelectorAll("[data-pipeline-stage]"); + const hasPipeline = pipelineStages.length > 0; + + if (hasPipeline) { + updatePipelineSummary(summary, pipelineStages); + } else { + updateDestinationSummary(summary, card); + } + }); + } + + function updatePipelineSummary(summary, stages) { + let allDone = true; + let anyFailed = false; + let anyRunning = false; + let anyWaiting = false; + let done = 0; + const total = stages.length; + const envBadges = []; + + stages.forEach((row) => { + const status = row.dataset.stageStatus || "PENDING"; + const stageType = row.dataset.stageType; + const env = row.dataset.stageEnv; + + if (status === "SUCCEEDED") done++; + if (status !== "SUCCEEDED") allDone = false; + if (status === "FAILED") anyFailed = true; + if (status === "RUNNING") anyRunning = true; + if (stageType === "wait" && status === "RUNNING") anyWaiting = true; + + // Collect env badges for non-PENDING deploy stages + if (stageType === "deploy" && status !== "PENDING" && env) { + envBadges.push({ env, status }); + } + }); + + const chevron = summary.querySelector("svg:last-child"); + summary.innerHTML = ""; + + // Pipeline gear icon + const gear = document.createElementNS("http://www.w3.org/2000/svg", "svg"); + gear.setAttribute("class", "w-3.5 h-3.5 text-purple-400 shrink-0"); + gear.setAttribute("fill", "none"); + gear.setAttribute("stroke", "currentColor"); + gear.setAttribute("viewBox", "0 0 24 24"); + gear.innerHTML = + ''; + summary.appendChild(gear); + + // Status icon + label + let statusIcon, statusLabel, statusLabelColor; + if (allDone) { + statusIcon = makeStatusIcon("check-circle", "text-green-500"); + statusLabel = "Pipeline complete"; + statusLabelColor = "text-gray-600"; + } else if (anyFailed) { + statusIcon = makeStatusIcon("x-circle", "text-red-500"); + statusLabel = "Pipeline failed"; + statusLabelColor = "text-red-600"; + } else if (anyWaiting) { + statusIcon = makeStatusIcon("clock", "text-yellow-500"); + statusLabel = "Waiting for time window"; + statusLabelColor = "text-yellow-700"; + } else if (anyRunning) { + statusIcon = makeStatusIcon("pulse", "text-yellow-500"); + statusLabel = "Deploying to"; + statusLabelColor = "text-yellow-700"; + } else { + statusIcon = makeStatusIcon("clock", "text-gray-300"); + statusLabel = "Pipeline pending"; + statusLabelColor = "text-gray-400"; + } + + summary.appendChild(statusIcon); + const labelSpan = document.createElement("span"); + labelSpan.className = `${statusLabelColor} text-sm`; + labelSpan.textContent = statusLabel; + summary.appendChild(labelSpan); + + // Environment badges + for (const { env, status } of envBadges) { + summary.appendChild(makeEnvBadge(env, status)); + } + + // Progress counter + const progress = document.createElement("span"); + progress.className = "text-xs text-gray-400"; + progress.textContent = `${done}/${total}`; + summary.appendChild(progress); + + if (chevron) summary.appendChild(chevron); + } + + function updateDestinationSummary(summary, card) { + // Collect current statuses from destination rows + const rows = card.querySelectorAll("details .px-4.py-2"); + const envStatuses = new Map(); + rows.forEach((row) => { + const envBadge = row.querySelector("[class*='rounded-full']"); + const envName = + envBadge?.closest("span[class*='px-2']")?.textContent?.trim() || ""; + const labels = row.querySelectorAll("span[class*='text-xs text-']"); + let status = ""; + labels.forEach((l) => { + const t = l.textContent.trim(); + if (t === "Deployed") status = "SUCCEEDED"; + else if (t === "Deploying" || t === "Assigned") status = "RUNNING"; + else if (t.startsWith("Queued")) status = "QUEUED"; + else if (t === "Failed") status = "FAILED"; + else if (t === "Timed out") status = "TIMED_OUT"; + else if (t === "Cancelled") status = "CANCELLED"; + }); + if (envName && status) envStatuses.set(envName, status); + }); + + if (envStatuses.size === 0) return; + + const groups = new Map(); + for (const [env, st] of envStatuses) { + if (!groups.has(st)) groups.set(st, []); + groups.get(st).push(env); + } + + const chevron = summary.querySelector("svg:last-child"); + summary.innerHTML = ""; + + for (const [status, envs] of groups) { + const cfg = STATUS_CONFIG[status]; + if (!cfg) continue; + + summary.appendChild(makeStatusIcon(cfg.summaryIcon, cfg.summaryColor)); + + const label = document.createElement("span"); + label.className = `${cfg.summaryLabelColor} text-sm`; + label.textContent = cfg.summaryLabel; + summary.appendChild(label); + + for (const env of envs) { + summary.appendChild(makeEnvBadge(env, status)); + } + } + + if (chevron) summary.appendChild(chevron); + } + + function makeEnvBadge(env, status) { + const badge = document.createElement("span"); + let bgClass = "bg-gray-100 text-gray-700"; + let dotClass = "bg-gray-400"; + if (env.includes("prod") && !env.includes("preprod")) { + bgClass = "bg-pink-100 text-pink-800"; + dotClass = "bg-pink-500"; + } else if (env.includes("preprod") || env.includes("pre-prod")) { + bgClass = "bg-orange-100 text-orange-800"; + dotClass = "bg-orange-500"; + } else if (env.includes("stag")) { + bgClass = "bg-yellow-100 text-yellow-800"; + dotClass = "bg-yellow-500"; + } else if (env.includes("dev")) { + bgClass = "bg-violet-100 text-violet-800"; + dotClass = "bg-violet-500"; + } + // Override dot color based on stage status + const statusDots = { + SUCCEEDED: "bg-green-500", + RUNNING: "bg-yellow-500", + FAILED: "bg-red-500", + }; + if (statusDots[status]) dotClass = statusDots[status]; + + badge.className = `inline-flex items-center gap-1 text-xs font-medium px-2 py-0.5 rounded-full ${bgClass}`; + badge.innerHTML = `${env} `; + return badge; + } + + // ── Release event handler ───────────────────────────────────────── + + function handleReleaseEvent(data) { + // Release status_changed or updated: metadata may carry per-destination + // updates, or a high-level status change. Treat it as a destination update + // when we have environment + status metadata; otherwise reload for safety. + const status = data.metadata?.status; + const env = data.metadata?.environment; + + if (status && env) { + // We have enough info to do an inline update + const config = STATUS_CONFIG[status]; + if (config) { + updatePipelineStages(env, status, config); + updateReleaseSummary(data); + } + } else { + // Generic release change — reload to pick up new state + window.location.reload(); + } + } + + // ── Pipeline event handler ────────────────────────────────────────── + + function handlePipelineEvent(data) { + // Pipeline events carry stage-level status updates in metadata: + // stage_id, stage_type, environment, status, started_at, completed_at, error_message + const stageStatus = data.metadata?.status; + const stageEnv = data.metadata?.environment; + const stageType = data.metadata?.stage_type; + const stageId = data.metadata?.stage_id; + + if (!stageStatus) { + // Can't do inline update without status — reload + if (data.action === "created" || data.action === "updated") { + window.location.reload(); + } + return; + } + + const config = STATUS_CONFIG[stageStatus]; + + // Update pipeline stage rows by environment (deploy stages) + if (stageEnv && config) { + updatePipelineStages(stageEnv, stageStatus, config); + } + + // Also update by stage_id for wait stages or when env isn't enough + if (stageId) { + document + .querySelectorAll(`[data-pipeline-stage]`) + .forEach((row) => { + // Match on the stage id attribute if we had one, but we use + // stage_type + env. For wait stages, update all wait stages + // in the same card context. + if (stageType === "wait" && row.dataset.stageType === "wait") { + row.dataset.stageStatus = stageStatus; + + if (stageStatus === "RUNNING") { + row.classList.remove("opacity-50"); + if (!row.dataset.startedAt) { + row.dataset.startedAt = + data.metadata?.started_at || new Date().toISOString(); + } + } else if (stageStatus === "SUCCEEDED") { + row.classList.remove("opacity-50"); + if (!row.dataset.completedAt) { + row.dataset.completedAt = + data.metadata?.completed_at || new Date().toISOString(); + } + } + + // Update icon + const iconCfg = STATUS_CONFIG[stageStatus]; + if (iconCfg) { + const oldIcon = row.firstElementChild; + if (oldIcon) { + const newIcon = makeStatusIcon( + iconCfg.icon, + iconCfg.iconColor + ); + row.replaceChild(newIcon, oldIcon); + } + } + + // Update text ("Waiting" -> "Waited") + const textSpan = row.querySelector("span.text-sm"); + if (textSpan) { + const dur = textSpan.textContent.match(/\d+s/)?.[0] || ""; + if (stageStatus === "SUCCEEDED") { + textSpan.textContent = `Waited ${dur}`; + textSpan.className = "text-sm text-gray-700"; + } else if (stageStatus === "RUNNING") { + textSpan.textContent = `Waiting ${dur}`; + textSpan.className = "text-sm text-yellow-700"; + } else if (stageStatus === "FAILED") { + textSpan.textContent = `Wait failed ${dur}`; + textSpan.className = "text-sm text-red-700"; + } else if (stageStatus === "CANCELLED") { + textSpan.textContent = `Wait cancelled ${dur}`; + textSpan.className = "text-sm text-gray-500"; + } + } + + // Remove wait_until span on completion + if (["SUCCEEDED", "FAILED", "CANCELLED"].includes(stageStatus)) { + const waitUntil = row.querySelector("[data-wait-until]"); + if (waitUntil) waitUntil.remove(); + } + + // Ensure elapsed span exists + if ( + (stageStatus === "RUNNING" || stageStatus === "QUEUED") && + !row.querySelector("[data-elapsed]") + ) { + const pipelineLabel = row.querySelector("span.ml-auto"); + if (pipelineLabel) { + const el = document.createElement("span"); + el.className = "text-xs text-gray-400 tabular-nums"; + el.dataset.elapsed = ""; + pipelineLabel.before(el); + } + } + } + }); + } + + // Re-compute summary for affected cards + updateReleaseSummary(data); + } + + // ── Elapsed time tickers ────────────────────────────────────────── + + function formatElapsed(seconds) { + if (seconds < 0) seconds = 0; + if (seconds < 60) return `${seconds}s`; + const m = Math.floor(seconds / 60); + const s = seconds % 60; + if (m < 60) return `${m}m ${s}s`; + const h = Math.floor(m / 60); + return `${h}h ${m % 60}m`; + } + + function updateElapsedTimers() { + document.querySelectorAll("[data-pipeline-stage]").forEach((row) => { + const elapsed = row.querySelector("[data-elapsed]"); + if (!elapsed) return; + + const startedAt = row.dataset.startedAt; + if (!startedAt) return; + + const start = new Date(startedAt).getTime(); + if (isNaN(start)) return; + + const completedAt = row.dataset.completedAt; + const status = row.dataset.stageStatus; + + if (completedAt && status !== "RUNNING" && status !== "QUEUED") { + // Completed stage — show fixed duration + const end = new Date(completedAt).getTime(); + if (!isNaN(end)) { + elapsed.textContent = formatElapsed(Math.floor((end - start) / 1000)); + } + } else { + // Active stage — live counter + const now = Date.now(); + elapsed.textContent = formatElapsed(Math.floor((now - start) / 1000)); + } + }); + } + + // Run immediately, then tick every second + updateElapsedTimers(); + setInterval(updateElapsedTimers, 1000); + + // Connect on page load + connect(); +})(); diff --git a/static/js/pipeline-builder.js b/static/js/pipeline-builder.js new file mode 100644 index 0000000..8505427 --- /dev/null +++ b/static/js/pipeline-builder.js @@ -0,0 +1,629 @@ +/** + * web component + * + * Visual DAG builder for release pipeline stages. + * Syncs to a hidden textarea (data-target) as JSON. + * + * Stage format (matches Rust serde of PipelineStage): + * { "id": "stage-name", "depends_on": ["other"], "config": {"Deploy": {"environment": "prod"}} } + * + * Usage: + * + * + */ + +class PipelineBuilder extends HTMLElement { + connectedCallback() { + this.stages = []; + this._targetId = this.dataset.target; + this._readonly = this.dataset.readonly === "true"; + this._mode = "builder"; // "builder" | "json" + + // Load initial value from target textarea + const target = this._target(); + if (target && target.value.trim()) { + try { + const parsed = JSON.parse(target.value.trim()); + this.stages = this._parseStages(parsed); + } catch (e) { + this._rawJson = target.value.trim(); + } + } + + this._render(); + } + + _target() { + return this._targetId ? document.getElementById(this._targetId) : null; + } + + // Extract the stage type string from a config object + _stageType(config) { + if (!config) return "deploy"; + if (config.Deploy !== undefined) return "deploy"; + if (config.Wait !== undefined) return "wait"; + return "deploy"; + } + + // Extract display info from config + _configLabel(config) { + if (!config) return ""; + if (config.Deploy) return config.Deploy.environment || ""; + if (config.Wait) return config.Wait.duration_seconds ? `${config.Wait.duration_seconds}s` : ""; + return ""; + } + + _normalizeStage(s) { + // Handle the new typed format: {id, depends_on, config: {Deploy: {environment}}} + if (s.id !== undefined) { + return { + id: s.id || "", + depends_on: Array.isArray(s.depends_on) ? s.depends_on : [], + config: s.config || { Deploy: { environment: "" } }, + }; + } + // Legacy format: {name, type, depends_on} + const type = s.type || "deploy"; + const config = type === "wait" + ? { Wait: { duration_seconds: s.duration_seconds || 0 } } + : { Deploy: { environment: s.environment || "" } }; + return { + id: s.name || "", + depends_on: Array.isArray(s.depends_on) ? s.depends_on : [], + config, + }; + } + + _parseStages(parsed) { + if (Array.isArray(parsed)) { + return parsed.map((s) => this._normalizeStage(s)); + } + if (parsed.stages && Array.isArray(parsed.stages)) { + return parsed.stages.map((s) => this._normalizeStage(s)); + } + // Map format: { "id": { depends_on, config } } + if (typeof parsed === "object" && parsed !== null) { + return Object.entries(parsed).map(([id, val]) => + this._normalizeStage({ id, ...val }) + ); + } + return []; + } + + _sync() { + const target = this._target(); + if (!target) return; + if (this.stages.length === 0) { + target.value = ""; + return; + } + // Filter out stages with no id + const valid = this.stages.filter((s) => s.id.trim()); + target.value = JSON.stringify(valid, null, 2); + } + + _validate() { + const ids = this.stages.map((s) => s.id).filter(Boolean); + const idSet = new Set(ids); + const errors = []; + + if (ids.length !== idSet.size) { + errors.push("Duplicate stage IDs detected"); + } + + for (const s of this.stages) { + for (const dep of s.depends_on) { + if (!idSet.has(dep)) { + errors.push(`"${s.id}" depends on unknown stage "${dep}"`); + } + } + } + + // Cycle detection (Kahn's algorithm) + const inDegree = {}; + const adj = {}; + for (const s of this.stages) { + if (!s.id) continue; + inDegree[s.id] = 0; + adj[s.id] = []; + } + for (const s of this.stages) { + if (!s.id) continue; + for (const dep of s.depends_on) { + if (adj[dep]) { + adj[dep].push(s.id); + inDegree[s.id]++; + } + } + } + const queue = Object.keys(inDegree).filter((k) => inDegree[k] === 0); + let visited = 0; + while (queue.length > 0) { + const node = queue.shift(); + visited++; + for (const next of adj[node] || []) { + inDegree[next]--; + if (inDegree[next] === 0) queue.push(next); + } + } + if (visited < Object.keys(inDegree).length) { + errors.push("Cycle detected in stage dependencies"); + } + + for (let i = 0; i < this.stages.length; i++) { + if (!this.stages[i].id.trim()) { + errors.push(`Stage ${i + 1} has no ID`); + } + } + + return errors; + } + + _computeLevels() { + const byId = {}; + for (const s of this.stages) { + if (s.id) byId[s.id] = s; + } + const levels = {}; + const visited = new Set(); + + const getLevel = (id) => { + if (levels[id] !== undefined) return levels[id]; + if (visited.has(id)) return 0; + visited.add(id); + const s = byId[id]; + if (!s || s.depends_on.length === 0) { + levels[id] = 0; + return 0; + } + let maxDep = 0; + for (const dep of s.depends_on) { + if (byId[dep]) { + maxDep = Math.max(maxDep, getLevel(dep) + 1); + } + } + levels[id] = maxDep; + return maxDep; + }; + + for (const s of this.stages) { + if (s.id) getLevel(s.id); + } + return levels; + } + + _render() { + const errors = this._validate(); + if (!this._readonly) this._sync(); + + this.innerHTML = ""; + this.className = "block"; + + // Readonly mode: just show the DAG + if (this._readonly) { + if (this.stages.length > 0) { + const canvas = el("div", "dag-canvas overflow-x-auto"); + this._renderDag(canvas); + this.append(canvas); + } else { + this.append(el("p", "text-xs text-gray-400 italic", "No stages defined")); + } + return; + } + + // Mode toggle + const toolbar = el("div", "flex items-center gap-2 mb-3"); + const builderBtn = el( + "button", + `text-xs px-2.5 py-1 rounded border ${this._mode === "builder" ? "bg-gray-900 text-white border-gray-900" : "border-gray-300 text-gray-600 hover:bg-gray-50"}`, + "Builder" + ); + builderBtn.type = "button"; + builderBtn.onclick = () => { + if (this._mode === "json") { + const ta = this.querySelector(".json-editor"); + if (ta) { + try { + const parsed = JSON.parse(ta.value); + this.stages = this._parseStages(parsed); + this._rawJson = null; + } catch (e) { + this._rawJson = ta.value; + } + } + this._mode = "builder"; + this._render(); + } + }; + const jsonBtn = el( + "button", + `text-xs px-2.5 py-1 rounded border ${this._mode === "json" ? "bg-gray-900 text-white border-gray-900" : "border-gray-300 text-gray-600 hover:bg-gray-50"}`, + "JSON" + ); + jsonBtn.type = "button"; + jsonBtn.onclick = () => { + this._mode = "json"; + this._render(); + }; + toolbar.append(builderBtn, jsonBtn); + + if (this._mode === "builder" && this.stages.length > 0) { + const stageCount = el("span", "text-xs text-gray-400 ml-auto", `${this.stages.length} stage${this.stages.length !== 1 ? "s" : ""}`); + toolbar.append(stageCount); + } + + this.append(toolbar); + + if (this._mode === "json") { + this._renderJsonMode(); + } else { + this._renderBuilderMode(errors); + } + } + + _renderJsonMode() { + const target = this._target(); + const currentJson = this._rawJson || (target ? target.value : "") || "[]"; + + const ta = el("textarea", "json-editor w-full border border-gray-300 rounded-md px-3 py-2 text-sm font-mono focus:outline-none focus:ring-2 focus:ring-gray-900 resize-y"); + ta.rows = 12; + ta.value = currentJson; + ta.spellcheck = false; + ta.oninput = () => { + const t = this._target(); + if (t) t.value = ta.value; + this._updateJsonErrors(ta.value); + }; + + const errBox = el("div", "json-errors mt-2"); + this.append(ta, errBox); + this._updateJsonErrors(currentJson); + } + + _updateJsonErrors(value) { + const errBox = this.querySelector(".json-errors"); + if (!errBox) return; + errBox.innerHTML = ""; + if (!value.trim()) return; + try { + const parsed = JSON.parse(value); + const stages = Array.isArray(parsed) ? parsed : (parsed.stages || []); + const ids = stages.map((s) => s.id || s.name).filter(Boolean); + if (new Set(ids).size !== ids.length) { + errBox.append(el("p", "text-xs text-amber-600", "Warning: duplicate stage IDs")); + } + } catch (e) { + errBox.append(el("p", "text-xs text-red-600", "Invalid JSON: " + e.message)); + } + } + + _renderBuilderMode(errors) { + if (this.stages.length > 0) { + const dagBox = el("div", "mb-4 border border-gray-200 rounded-lg overflow-hidden"); + const canvas = el("div", "dag-canvas p-4 bg-gray-50 overflow-x-auto"); + canvas.style.minHeight = "80px"; + this._renderDag(canvas); + dagBox.append(canvas); + this.append(dagBox); + } + + const list = el("div", "space-y-2 mb-3"); + for (let i = 0; i < this.stages.length; i++) { + list.append(this._renderStageCard(i)); + } + this.append(list); + + if (errors.length > 0) { + const errBox = el("div", "mb-3 p-3 bg-red-50 border border-red-200 rounded-md"); + for (const err of errors) { + errBox.append(el("p", "text-xs text-red-700", err)); + } + this.append(errBox); + } + + const addBtn = el("button", "text-sm px-3 py-1.5 rounded border border-dashed border-gray-300 text-gray-500 hover:border-gray-400 hover:text-gray-700 w-full", "+ Add stage"); + addBtn.type = "button"; + addBtn.onmousedown = (e) => e.preventDefault(); + addBtn.onclick = () => { + clearTimeout(this._blurTimer); + this.stages.push({ id: "", depends_on: [], config: { Deploy: { environment: "" } } }); + this._render(); + requestAnimationFrame(() => { + const inputs = this.querySelectorAll('input[data-field="id"]'); + if (inputs.length) inputs[inputs.length - 1].focus(); + }); + }; + this.append(addBtn); + } + + _renderStageCard(index) { + const stage = this.stages[index]; + const type = this._stageType(stage.config); + const otherIds = this.stages + .map((s, i) => (i !== index && s.id.trim() ? s.id.trim() : null)) + .filter(Boolean); + + const card = el("div", "border border-gray-200 rounded-md bg-white"); + + // Header row + const header = el("div", "flex items-center gap-2 px-3 py-2"); + const badge = el("span", "text-xs font-mono text-gray-400 w-5 shrink-0", `${index + 1}`); + + // ID input + const idInput = el("input", "flex-1 border border-gray-200 rounded px-2 py-1 text-sm focus:outline-none focus:ring-1 focus:ring-gray-400 min-w-0"); + idInput.type = "text"; + idInput.value = stage.id; + idInput.placeholder = "stage id"; + idInput.dataset.field = "id"; + idInput.oninput = () => { + this.stages[index].id = idInput.value.trim().toLowerCase().replace(/[^a-z0-9_-]/g, "-"); + idInput.value = this.stages[index].id; + this._sync(); + this._renderDagIfPresent(); + }; + idInput.onblur = () => { + this._blurTimer = setTimeout(() => this._render(), 150); + }; + + // Type select (deploy / wait) + const typeSelect = el("select", "border border-gray-200 rounded px-2 py-1 text-xs bg-white shrink-0"); + for (const t of ["deploy", "wait"]) { + const opt = document.createElement("option"); + opt.value = t; + opt.textContent = t; + opt.selected = type === t; + typeSelect.append(opt); + } + typeSelect.onmousedown = (e) => e.stopPropagation(); + typeSelect.onchange = () => { + clearTimeout(this._blurTimer); + if (typeSelect.value === "wait") { + this.stages[index].config = { Wait: { duration_seconds: 0 } }; + } else { + this.stages[index].config = { Deploy: { environment: "" } }; + } + this._render(); + }; + + // Remove button + const removeBtn = el("button", "text-gray-400 hover:text-red-500 shrink-0 p-1"); + removeBtn.type = "button"; + removeBtn.innerHTML = ``; + removeBtn.title = "Remove stage"; + removeBtn.onmousedown = (e) => e.preventDefault(); + removeBtn.onclick = () => { + clearTimeout(this._blurTimer); + const removedId = this.stages[index].id; + this.stages.splice(index, 1); + for (const s of this.stages) { + s.depends_on = s.depends_on.filter((d) => d !== removedId); + } + this._render(); + }; + + header.append(badge, idInput, typeSelect, removeBtn); + card.append(header); + + // Config row (type-specific fields) + const configRow = el("div", "px-3 pb-2 flex items-center gap-2 flex-wrap"); + if (type === "deploy") { + const envLabel = el("span", "text-xs text-gray-500 shrink-0", "env:"); + const envInput = el("input", "border border-gray-200 rounded px-2 py-1 text-xs w-32 focus:outline-none focus:ring-1 focus:ring-gray-400"); + envInput.type = "text"; + envInput.value = (stage.config.Deploy && stage.config.Deploy.environment) || ""; + envInput.placeholder = "environment"; + envInput.onmousedown = (e) => e.stopPropagation(); + envInput.oninput = () => { + if (!this.stages[index].config.Deploy) this.stages[index].config = { Deploy: { environment: "" } }; + this.stages[index].config.Deploy.environment = envInput.value.trim(); + this._sync(); + }; + envInput.onblur = () => { + this._blurTimer = setTimeout(() => this._render(), 150); + }; + configRow.append(envLabel, envInput); + } else if (type === "wait") { + const durLabel = el("span", "text-xs text-gray-500 shrink-0", "wait:"); + const durInput = el("input", "border border-gray-200 rounded px-2 py-1 text-xs w-20 focus:outline-none focus:ring-1 focus:ring-gray-400"); + durInput.type = "number"; + durInput.min = "0"; + durInput.value = (stage.config.Wait && stage.config.Wait.duration_seconds) || 0; + durInput.placeholder = "seconds"; + durInput.onmousedown = (e) => e.stopPropagation(); + durInput.oninput = () => { + if (!this.stages[index].config.Wait) this.stages[index].config = { Wait: { duration_seconds: 0 } }; + this.stages[index].config.Wait.duration_seconds = parseInt(durInput.value) || 0; + this._sync(); + }; + durInput.onblur = () => { + this._blurTimer = setTimeout(() => this._render(), 150); + }; + const secLabel = el("span", "text-xs text-gray-400", "seconds"); + configRow.append(durLabel, durInput, secLabel); + } + card.append(configRow); + + // Dependencies row + if (otherIds.length > 0) { + const depsRow = el("div", "px-3 pb-2 flex items-center gap-2 flex-wrap"); + const label = el("span", "text-xs text-gray-500 shrink-0", "after:"); + depsRow.append(label); + + for (const dep of otherIds) { + const isSelected = stage.depends_on.includes(dep); + const chip = el( + "button", + `text-xs px-2 py-0.5 rounded-full border transition-colors ${isSelected ? "bg-gray-900 text-white border-gray-900" : "border-gray-300 text-gray-500 hover:border-gray-400"}`, + dep + ); + chip.type = "button"; + chip.onmousedown = (e) => e.preventDefault(); + chip.onclick = () => { + clearTimeout(this._blurTimer); + if (isSelected) { + this.stages[index].depends_on = this.stages[index].depends_on.filter((d) => d !== dep); + } else { + this.stages[index].depends_on.push(dep); + } + this._render(); + }; + depsRow.append(chip); + } + card.append(depsRow); + } + + return card; + } + + _renderDagIfPresent() { + const canvas = this.querySelector(".dag-canvas"); + if (canvas) this._renderDag(canvas); + } + + _renderDag(canvas) { + canvas.innerHTML = ""; + const named = this.stages.filter((s) => s.id.trim()); + if (named.length === 0) { + canvas.append(el("p", "text-xs text-gray-400 italic", "Add stages to see the pipeline graph")); + return; + } + + const levels = this._computeLevels(); + const maxLevel = Math.max(0, ...Object.values(levels)); + + const columns = []; + for (let l = 0; l <= maxLevel; l++) columns.push([]); + for (const s of named) { + const lvl = levels[s.id] || 0; + columns[lvl].push(s); + } + + const svgNS = "http://www.w3.org/2000/svg"; + const NODE_W = 120; + const NODE_H = 40; + const COL_GAP = 60; + const ROW_GAP = 12; + + const positions = {}; + let totalW = 0; + let totalH = 0; + + for (let col = 0; col <= maxLevel; col++) { + const stages = columns[col]; + for (let row = 0; row < stages.length; row++) { + const x = col * (NODE_W + COL_GAP); + const y = row * (NODE_H + ROW_GAP); + positions[stages[row].id] = { x, y }; + totalW = Math.max(totalW, x + NODE_W); + totalH = Math.max(totalH, y + NODE_H); + } + } + + const PAD = 8; + const svgW = totalW + PAD * 2; + const svgH = totalH + PAD * 2; + + const svg = document.createElementNS(svgNS, "svg"); + svg.setAttribute("width", svgW); + svg.setAttribute("height", svgH); + svg.style.display = "block"; + + // Arrowhead marker + const defs = document.createElementNS(svgNS, "defs"); + const marker = document.createElementNS(svgNS, "marker"); + marker.setAttribute("id", "pb-arrow"); + marker.setAttribute("viewBox", "0 0 10 10"); + marker.setAttribute("refX", "10"); + marker.setAttribute("refY", "5"); + marker.setAttribute("markerWidth", "6"); + marker.setAttribute("markerHeight", "6"); + marker.setAttribute("orient", "auto-start-reverse"); + const arrowPath = document.createElementNS(svgNS, "path"); + arrowPath.setAttribute("d", "M 0 0 L 10 5 L 0 10 z"); + arrowPath.setAttribute("fill", "#9ca3af"); + marker.append(arrowPath); + defs.append(marker); + svg.append(defs); + + // Draw edges + for (const s of named) { + const to = positions[s.id]; + if (!to) continue; + for (const dep of s.depends_on) { + const from = positions[dep]; + if (!from) continue; + const line = document.createElementNS(svgNS, "line"); + line.setAttribute("x1", from.x + NODE_W + PAD); + line.setAttribute("y1", from.y + NODE_H / 2 + PAD); + line.setAttribute("x2", to.x + PAD); + line.setAttribute("y2", to.y + NODE_H / 2 + PAD); + line.setAttribute("stroke", "#d1d5db"); + line.setAttribute("stroke-width", "2"); + line.setAttribute("marker-end", "url(#pb-arrow)"); + svg.append(line); + } + } + + // Draw nodes + const TYPE_COLORS = { + deploy: { bg: "#dbeafe", border: "#93c5fd", text: "#1e40af" }, + wait: { bg: "#fef3c7", border: "#fcd34d", text: "#92400e" }, + }; + + for (const s of named) { + const pos = positions[s.id]; + if (!pos) continue; + const type = this._stageType(s.config); + const colors = TYPE_COLORS[type] || TYPE_COLORS.deploy; + const label = this._configLabel(s.config); + + const rect = document.createElementNS(svgNS, "rect"); + rect.setAttribute("x", pos.x + PAD); + rect.setAttribute("y", pos.y + PAD); + rect.setAttribute("width", NODE_W); + rect.setAttribute("height", NODE_H); + rect.setAttribute("rx", "6"); + rect.setAttribute("fill", colors.bg); + rect.setAttribute("stroke", colors.border); + rect.setAttribute("stroke-width", "1.5"); + svg.append(rect); + + // Stage ID text + const text = document.createElementNS(svgNS, "text"); + text.setAttribute("x", pos.x + NODE_W / 2 + PAD); + text.setAttribute("y", pos.y + NODE_H / 2 + PAD + (label ? -4 : 0)); + text.setAttribute("text-anchor", "middle"); + text.setAttribute("dominant-baseline", "middle"); + text.setAttribute("fill", colors.text); + text.setAttribute("font-size", "12"); + text.setAttribute("font-weight", "600"); + text.textContent = s.id.length > 14 ? s.id.slice(0, 13) + "…" : s.id; + svg.append(text); + + // Config label (environment or duration) + if (label) { + const sub = document.createElementNS(svgNS, "text"); + sub.setAttribute("x", pos.x + NODE_W / 2 + PAD); + sub.setAttribute("y", pos.y + NODE_H / 2 + 10 + PAD); + sub.setAttribute("text-anchor", "middle"); + sub.setAttribute("dominant-baseline", "middle"); + sub.setAttribute("fill", colors.text); + sub.setAttribute("font-size", "9"); + sub.setAttribute("opacity", "0.7"); + sub.textContent = label; + svg.append(sub); + } + } + + canvas.append(svg); + } +} + +function el(tag, className, text) { + const e = document.createElement(tag); + if (className) e.className = className; + if (text) e.textContent = text; + return e; +} + +customElements.define("pipeline-builder", PipelineBuilder); diff --git a/static/js/swim-lanes.js b/static/js/swim-lanes.js index 91d33ab..2025fb4 100644 --- a/static/js/swim-lanes.js +++ b/static/js/swim-lanes.js @@ -2,34 +2,30 @@ * web component * * Renders colored vertical bars alongside a release timeline. - * Each bar grows from the BOTTOM of the timeline upward to the top edge - * of the last release card deployed to that environment. - * Labels are rendered at the bottom of each bar, rotated vertically. + * Bars grow from the BOTTOM of the timeline upward to the dot position + * (avatar center) of the relevant release card. * - * Usage: - * - *
- *
- *
- *
...
- *
...
- *
- *
+ * In-flight deployments (QUEUED/RUNNING/ASSIGNED) show a hatched segment + * with direction arrows: ▲ for forward deploy, ▼ for rollback. + * + * data-envs format: "env:STATUS,env:STATUS" e.g. "staging:SUCCEEDED,prod:QUEUED" */ const ENV_COLORS = { - prod: ["#f472b6", "#ec4899"], - production: ["#f472b6", "#ec4899"], - preprod: ["#fdba74", "#f97316"], - "pre-prod": ["#fdba74", "#f97316"], - staging: ["#fbbf24", "#ca8a04"], - stage: ["#fbbf24", "#ca8a04"], - dev: ["#a78bfa", "#7c3aed"], - development: ["#a78bfa", "#7c3aed"], - test: ["#67e8f9", "#0891b2"], + prod: ["#ec4899", "#fce7f3"], + production: ["#ec4899", "#fce7f3"], + preprod: ["#f97316", "#ffedd5"], + "pre-prod": ["#f97316", "#ffedd5"], + staging: ["#eab308", "#fef9c3"], + stage: ["#eab308", "#fef9c3"], + dev: ["#8b5cf6", "#ede9fe"], + development: ["#8b5cf6", "#ede9fe"], + test: ["#06b6d4", "#cffafe"], }; -const DEFAULT_COLORS = ["#d1d5db", "#9ca3af"]; +const DEFAULT_COLORS = ["#6b7280", "#e5e7eb"]; +const IN_FLIGHT = new Set(["QUEUED", "RUNNING", "ASSIGNED"]); +const DEPLOYED = new Set(["SUCCEEDED"]); function envColors(name) { const lower = name.toLowerCase(); @@ -40,17 +36,80 @@ function envColors(name) { return DEFAULT_COLORS; } +function parseEnvs(raw) { + if (!raw) return []; + return raw + .split(",") + .map((s) => s.trim()) + .filter(Boolean) + .map((entry) => { + const colon = entry.indexOf(":"); + if (colon === -1) return { env: entry, status: "SUCCEEDED" }; + return { env: entry.slice(0, colon), status: entry.slice(colon + 1) }; + }); +} + +function dotY(card, timelineTop) { + const avatar = card.querySelector("[data-avatar]"); + const anchor = avatar || card; + const r = anchor.getBoundingClientRect(); + return r.top + r.height / 2 - timelineTop; +} + +/** Create an inline SVG data URL for a diagonal hatch pattern */ +function hatchPattern(color, bgColor) { + const svg = ` + + + `; + return `url("data:image/svg+xml,${encodeURIComponent(svg)}")`; +} + +// Inject CSS once +if (!document.getElementById("swim-lane-styles")) { + const style = document.createElement("style"); + style.id = "swim-lane-styles"; + style.textContent = ` + @keyframes lane-pulse { + 0%, 100% { opacity: 0.6; } + 50% { opacity: 1; } + } + .lane-pulse { + animation: lane-pulse 2s ease-in-out infinite; + } + .lane-arrow { + font-size: 9px; + line-height: 1; + font-weight: 700; + text-align: center; + width: 100%; + position: absolute; + left: 0; + z-index: 3; + pointer-events: none; + } + `; + document.head.appendChild(style); +} + +const BAR_WIDTH = 20; +const BAR_GAP = 4; +const DOT_SIZE = 12; + class SwimLanes extends HTMLElement { connectedCallback() { - this.style.display = "flex"; - this._render(); - this._ro = new ResizeObserver(() => this._render()); - const timeline = this.querySelector("[data-swimlane-timeline]"); - if (timeline) { - this._ro.observe(timeline); - // Re-render when details elements are toggled (show/hide commits) - timeline.addEventListener("toggle", () => this._render(), true); - } + // Lanes live in [data-swimlane-gutter], a CSS grid column to the + // left of the timeline. The grid column width is pre-set in the + // template (lane_count * 18 + 8 px) so there is no layout shift. + requestAnimationFrame(() => { + this._render(); + this._ro = new ResizeObserver(() => this._render()); + const timeline = this.querySelector("[data-swimlane-timeline]"); + if (timeline) { + this._ro.observe(timeline); + timeline.addEventListener("toggle", () => this._render(), true); + } + }); } disconnectedCallback() { @@ -65,37 +124,70 @@ class SwimLanes extends HTMLElement { if (cards.length === 0) return; const timelineRect = timeline.getBoundingClientRect(); - const lanes = Array.from(this.querySelectorAll("[data-lane]")); + if (timelineRect.height === 0) return; + const gutter = this.querySelector("[data-swimlane-gutter]"); + const lanes = gutter + ? Array.from(gutter.querySelectorAll("[data-lane]")) + : Array.from(this.querySelectorAll("[data-lane]")); for (const lane of lanes) { const env = lane.dataset.lane; - const [barColor, labelColor] = envColors(env); + const [barColor, lightColor] = envColors(env); - // Find the LAST (bottommost) card deployed to this env - let lastCard = null; - for (const card of cards) { - const envs = (card.dataset.envs || "") - .split(",") - .map((s) => s.trim()) - .filter(Boolean); - if (envs.includes(env)) lastCard = card; + let deployedCard = null; + let deployedIdx = -1; + let flightCard = null; + let flightIdx = -1; + + for (let i = 0; i < cards.length; i++) { + const entries = parseEnvs(cards[i].dataset.envs); + for (const entry of entries) { + if (entry.env !== env) continue; + if (DEPLOYED.has(entry.status) && !deployedCard) { + deployedCard = cards[i]; + deployedIdx = i; + } + if (IN_FLIGHT.has(entry.status) && !flightCard) { + flightCard = cards[i]; + flightIdx = i; + } + } } - // Bar height: from bottom of timeline up to top of the last deployed card - let barHeight = 0; - if (lastCard) { - const cardRect = lastCard.getBoundingClientRect(); - barHeight = timelineRect.bottom - cardRect.top; + const timelineH = timelineRect.height; + + // Card top edge (Y relative to timeline) — bars extend to the card top + const deployedTop = deployedCard + ? deployedCard.getBoundingClientRect().top - timelineRect.top + : null; + const flightTop = flightCard + ? flightCard.getBoundingClientRect().top - timelineRect.top + : null; + // Dot center Y — used for arrow placement + const flightDot = flightCard + ? dotY(flightCard, timelineRect.top) + : null; + + // Solid bar: from bottom up to the card top of the LOWER card. + // If both exist, only go to whichever is lower (further down) to avoid overlap. + let solidBarFromBottom = 0; + if (deployedTop !== null && flightTop !== null) { + const lowerTop = Math.max(deployedTop, flightTop); + solidBarFromBottom = timelineH - lowerTop; + } else if (deployedTop !== null) { + solidBarFromBottom = timelineH - deployedTop; } - // Style the lane container - lane.style.width = "14px"; - lane.style.marginRight = "4px"; + // Style lane container — width/gap only; height comes from the grid row + lane.style.width = BAR_WIDTH + "px"; + lane.style.marginRight = BAR_GAP + "px"; lane.style.position = "relative"; - lane.style.minHeight = timelineRect.height + "px"; - lane.style.flexShrink = "0"; - // Create or update bar (anchored to bottom) + const hasHatch = !!flightCard; + const hasSolid = solidBarFromBottom > 0; + const R = "9999px"; + + // ── Solid bar ── let bar = lane.querySelector(".lane-bar"); if (!bar) { bar = document.createElement("div"); @@ -104,26 +196,85 @@ class SwimLanes extends HTMLElement { bar.style.bottom = "0"; bar.style.left = "0"; bar.style.width = "100%"; - bar.style.borderRadius = "9999px"; lane.appendChild(bar); } - bar.style.height = barHeight + "px"; + bar.style.height = Math.max(solidBarFromBottom, 0) + "px"; bar.style.backgroundColor = barColor; + // Round bottom always; round top only if no hatch connects above + bar.style.borderRadius = hasHatch + ? `0 0 ${R} ${R}` + : R; + + // ── Hatched segment for in-flight ── + let hatch = lane.querySelector(".lane-hatch"); + let arrow = lane.querySelector(".lane-arrow"); + if (flightCard) { + const isForward = deployedIdx === -1 || flightIdx < deployedIdx; + + // Hatched segment spans between the two card tops (or bottom of timeline) + const anchorY = deployedTop !== null ? deployedTop : timelineH; + const topY = Math.min(anchorY, flightTop); + const bottomY = Math.max(anchorY, flightTop); + const segHeight = bottomY - topY; + + if (!hatch) { + hatch = document.createElement("div"); + hatch.className = "lane-hatch lane-pulse"; + hatch.style.position = "absolute"; + hatch.style.left = "0"; + hatch.style.width = "100%"; + hatch.style.backgroundSize = "8px 8px"; + hatch.style.backgroundRepeat = "repeat"; + lane.appendChild(hatch); + } + hatch.style.backgroundImage = isForward + ? hatchPattern(barColor, lightColor) + : hatchPattern("#f59e0b", "#fef3c7"); + hatch.style.top = topY + "px"; + hatch.style.height = Math.max(segHeight, 4) + "px"; + hatch.style.display = ""; + // Round top always; round bottom only if no solid bar connects below + hatch.style.borderRadius = hasSolid + ? `${R} ${R} 0 0` + : R; + + // Direction arrow: + // Forward (▲): shown at the in-flight card (destination) + // Rollback (▼): shown at the deployed card (source we're rolling back from) + const arrowDotY = isForward + ? flightDot + : dotY(deployedCard, timelineRect.top); + if (!arrow) { + arrow = document.createElement("div"); + arrow.className = "lane-arrow"; + lane.appendChild(arrow); + } + arrow.textContent = isForward ? "\u25B2" : "\u25BC"; + arrow.style.color = isForward ? barColor : "#f59e0b"; + arrow.style.top = arrowDotY - 5 + "px"; + arrow.style.display = ""; + } else { + if (hatch) hatch.style.display = "none"; + if (arrow) arrow.style.display = "none"; + } + + // ── Dots ── + // The arrow replaces the dot on one card: + // Forward: arrow on in-flight card (destination) + // Rollback: arrow on deployed card (source) + const arrowCard = flightCard + ? (deployedIdx === -1 || flightIdx < deployedIdx ? flightCard : deployedCard) + : null; - // Place dots on the lane for each card deployed to this env const existingDots = lane.querySelectorAll(".lane-dot"); let dotIndex = 0; for (const card of cards) { - const envs = (card.dataset.envs || "") - .split(",") - .map((s) => s.trim()) - .filter(Boolean); - if (!envs.includes(env)) continue; + const entries = parseEnvs(card.dataset.envs); + const match = entries.find((e) => e.env === env); + if (!match) continue; + if (card === arrowCard) continue; // arrow shown instead of dot - const avatar = card.querySelector("[data-avatar]"); - const anchor = avatar || card; - const anchorRect = anchor.getBoundingClientRect(); - const centerY = anchorRect.top + anchorRect.height / 2 - timelineRect.top; + const cy = dotY(card, timelineRect.top); let dot = existingDots[dotIndex]; if (!dot) { @@ -132,41 +283,23 @@ class SwimLanes extends HTMLElement { dot.style.position = "absolute"; dot.style.left = "50%"; dot.style.transform = "translateX(-50%)"; - dot.style.width = "8px"; - dot.style.height = "8px"; + dot.style.width = DOT_SIZE + "px"; + dot.style.height = DOT_SIZE + "px"; dot.style.borderRadius = "50%"; - dot.style.backgroundColor = "#fff"; - dot.style.border = "2px solid " + barColor; - dot.style.zIndex = "1"; + dot.style.zIndex = "2"; lane.appendChild(dot); } - dot.style.top = centerY - 4 + "px"; - dot.style.borderColor = barColor; + dot.style.top = cy - DOT_SIZE / 2 + "px"; + dot.style.backgroundColor = "#fff"; + dot.style.border = "2px solid " + barColor; + dot.classList.remove("lane-pulse"); dotIndex++; } - // Remove extra dots from previous renders for (let i = dotIndex; i < existingDots.length; i++) { existingDots[i].remove(); } - // Create or update label (at the very bottom, below bars) - let label = lane.querySelector(".lane-label"); - if (!label) { - label = document.createElement("span"); - label.className = "lane-label"; - label.style.position = "absolute"; - label.style.bottom = "-4px"; - label.style.left = "50%"; - label.style.writingMode = "vertical-lr"; - label.style.transform = "translateX(-50%) translateY(100%) rotate(180deg)"; - label.style.fontSize = "10px"; - label.style.fontWeight = "500"; - label.style.whiteSpace = "nowrap"; - label.style.paddingTop = "6px"; - lane.appendChild(label); - } - label.textContent = env; - label.style.color = labelColor; + // Labels are rendered server-side above the gutter (no JS needed). } } } diff --git a/templates/base.html.jinja b/templates/base.html.jinja index e658dc8..42faa26 100644 --- a/templates/base.html.jinja +++ b/templates/base.html.jinja @@ -7,7 +7,7 @@ - + {% if user is defined and user %} {# ── Authenticated nav ─────────────────────────────────────── #} {% endif %} -
+
{% block content %}{% endblock %}
-