feat: add many things

Signed-off-by: kjuulh <contact@kjuulh.io>
This commit is contained in:
2026-03-08 23:00:03 +01:00
parent 45353089c2
commit 5a5f9a3003
104 changed files with 23417 additions and 2027 deletions

View File

@@ -1,10 +1,19 @@
use forage_core::auth::{
AuthError, AuthTokens, CreatedToken, ForestAuth, PersonalAccessToken, User, UserEmail,
UserProfile,
};
use forage_core::platform::{
Artifact, ArtifactContext, ArtifactDestination, ArtifactSource, Destination, ForestPlatform,
Organisation, OrgMember, PlatformError,
Artifact, ArtifactContext, ArtifactDestination, ArtifactRef, ArtifactSource, CreatePolicyInput,
CreateReleasePipelineInput, CreateTriggerInput, Destination, DestinationType, Environment,
ForestPlatform, Organisation, OrgMember, PipelineStage, PipelineStageConfig, PlatformError,
Policy, PolicyConfig, ReleasePipeline, Trigger, UpdatePolicyInput,
UpdateReleasePipelineInput, UpdateTriggerInput,
};
use forage_grpc::policy_service_client::PolicyServiceClient;
use forage_grpc::release_pipeline_service_client::ReleasePipelineServiceClient;
use forage_grpc::trigger_service_client::TriggerServiceClient;
use forage_grpc::destination_service_client::DestinationServiceClient;
use forage_grpc::environment_service_client::EnvironmentServiceClient;
use forage_grpc::organisation_service_client::OrganisationServiceClient;
use forage_grpc::release_service_client::ReleaseServiceClient;
use forage_grpc::users_service_client::UsersServiceClient;
@@ -42,10 +51,42 @@ impl GrpcForestClient {
OrganisationServiceClient::new(self.channel.clone())
}
fn release_client(&self) -> ReleaseServiceClient<Channel> {
pub(crate) fn artifact_client(
&self,
) -> forage_grpc::artifact_service_client::ArtifactServiceClient<Channel> {
forage_grpc::artifact_service_client::ArtifactServiceClient::new(self.channel.clone())
}
pub(crate) fn release_client(&self) -> ReleaseServiceClient<Channel> {
ReleaseServiceClient::new(self.channel.clone())
}
fn env_client(&self) -> EnvironmentServiceClient<Channel> {
EnvironmentServiceClient::new(self.channel.clone())
}
fn dest_client(&self) -> DestinationServiceClient<Channel> {
DestinationServiceClient::new(self.channel.clone())
}
fn trigger_client(&self) -> TriggerServiceClient<Channel> {
TriggerServiceClient::new(self.channel.clone())
}
fn policy_client(&self) -> PolicyServiceClient<Channel> {
PolicyServiceClient::new(self.channel.clone())
}
fn pipeline_client(&self) -> ReleasePipelineServiceClient<Channel> {
ReleasePipelineServiceClient::new(self.channel.clone())
}
pub fn event_client(
&self,
) -> forage_grpc::event_service_client::EventServiceClient<Channel> {
forage_grpc::event_service_client::EventServiceClient::new(self.channel.clone())
}
fn authed_request<T>(access_token: &str, msg: T) -> Result<Request<T>, AuthError> {
bearer_request(access_token, msg).map_err(AuthError::Other)
}
@@ -202,6 +243,41 @@ impl ForestAuth for GrpcForestClient {
Ok(convert_user(user))
}
async fn get_user_by_username(
&self,
access_token: &str,
username: &str,
) -> Result<UserProfile, AuthError> {
let req = Self::authed_request(
access_token,
forage_grpc::GetUserRequest {
identifier: Some(forage_grpc::get_user_request::Identifier::Username(
username.into(),
)),
},
)?;
let resp = self
.client()
.get_user(req)
.await
.map_err(map_status)?
.into_inner();
let user = resp
.user
.ok_or(AuthError::Other("no user in response".into()))?;
Ok(UserProfile {
user_id: user.user_id,
username: user.username,
created_at: user.created_at.map(|ts| {
chrono::DateTime::from_timestamp(ts.seconds, ts.nanos as u32)
.map(|dt| dt.to_rfc3339())
.unwrap_or_default()
}),
})
}
async fn list_tokens(
&self,
access_token: &str,
@@ -396,8 +472,13 @@ fn convert_artifact(a: forage_grpc::Artifact) -> Artifact {
source_type: s.source_type.filter(|v| !v.is_empty()),
run_url: s.run_url.filter(|v| !v.is_empty()),
});
// Artifact proto does not carry git ref directly; git info comes from AnnotateRelease.
// We leave git_ref as None for now.
let git_ref = a.r#ref.map(|r| ArtifactRef {
commit_sha: r.commit_sha,
branch: r.branch.filter(|v| !v.is_empty()),
commit_message: r.commit_message.filter(|v| !v.is_empty()),
version: r.version.filter(|v| !v.is_empty()),
repo_url: r.repo_url.filter(|v| !v.is_empty()),
});
let destinations = a
.destinations
.into_iter()
@@ -419,6 +500,11 @@ fn convert_artifact(a: forage_grpc::Artifact) -> Artifact {
} else {
Some(d.type_version)
},
status: if d.status.is_empty() {
None
} else {
Some(d.status)
},
})
.collect();
Artifact {
@@ -435,12 +521,202 @@ fn convert_artifact(a: forage_grpc::Artifact) -> Artifact {
pr: ctx.pr.filter(|v| !v.is_empty()),
},
source,
git_ref: None,
git_ref,
destinations,
created_at: a.created_at,
}
}
fn convert_pipeline_stage(s: forage_grpc::PipelineStage) -> PipelineStage {
let config = match s.config {
Some(forage_grpc::pipeline_stage::Config::Deploy(d)) => {
PipelineStageConfig::Deploy { environment: d.environment }
}
Some(forage_grpc::pipeline_stage::Config::Wait(w)) => {
PipelineStageConfig::Wait { duration_seconds: w.duration_seconds }
}
None => PipelineStageConfig::Deploy { environment: String::new() },
};
PipelineStage {
id: s.id,
depends_on: s.depends_on,
config,
}
}
/// Convert a `PipelineStageState` proto message (from GetReleaseIntentStates)
/// to the domain type. Same enum mapping as `convert_pipeline_run_stage`.
fn convert_pipeline_stage_state(
s: forage_grpc::PipelineStageState,
) -> forage_core::platform::PipelineRunStageState {
let stage_type = match forage_grpc::PipelineRunStageType::try_from(s.stage_type) {
Ok(forage_grpc::PipelineRunStageType::Deploy) => "deploy",
Ok(forage_grpc::PipelineRunStageType::Wait) => "wait",
_ => "unknown",
};
let status = match forage_grpc::PipelineRunStageStatus::try_from(s.status) {
Ok(forage_grpc::PipelineRunStageStatus::Pending) => "PENDING",
Ok(forage_grpc::PipelineRunStageStatus::Active) => "RUNNING",
Ok(forage_grpc::PipelineRunStageStatus::Succeeded) => "SUCCEEDED",
Ok(forage_grpc::PipelineRunStageStatus::Failed) => "FAILED",
Ok(forage_grpc::PipelineRunStageStatus::Cancelled) => "CANCELLED",
_ => "PENDING",
};
forage_core::platform::PipelineRunStageState {
stage_id: s.stage_id,
depends_on: s.depends_on,
stage_type: stage_type.into(),
status: status.into(),
environment: s.environment,
duration_seconds: s.duration_seconds,
queued_at: s.queued_at,
started_at: s.started_at,
completed_at: s.completed_at,
error_message: s.error_message,
wait_until: s.wait_until,
release_ids: s.release_ids,
}
}
fn convert_release_step_state(
s: forage_grpc::ReleaseStepState,
) -> forage_core::platform::ReleaseStepState {
forage_core::platform::ReleaseStepState {
release_id: s.release_id,
stage_id: s.stage_id,
destination_name: s.destination_name,
environment: s.environment,
status: s.status,
queued_at: s.queued_at,
assigned_at: s.assigned_at,
started_at: s.started_at,
completed_at: s.completed_at,
error_message: s.error_message,
}
}
fn convert_stages_to_grpc(stages: &[PipelineStage]) -> Vec<forage_grpc::PipelineStage> {
stages
.iter()
.map(|s| forage_grpc::PipelineStage {
id: s.id.clone(),
depends_on: s.depends_on.clone(),
config: Some(match &s.config {
PipelineStageConfig::Deploy { environment } => {
forage_grpc::pipeline_stage::Config::Deploy(forage_grpc::DeployStageConfig {
environment: environment.clone(),
})
}
PipelineStageConfig::Wait { duration_seconds } => {
forage_grpc::pipeline_stage::Config::Wait(forage_grpc::WaitStageConfig {
duration_seconds: *duration_seconds,
})
}
}),
})
.collect()
}
fn convert_release_pipeline(p: forage_grpc::ReleasePipeline) -> ReleasePipeline {
ReleasePipeline {
id: p.id,
name: p.name,
enabled: p.enabled,
stages: p.stages.into_iter().map(convert_pipeline_stage).collect(),
created_at: p.created_at,
updated_at: p.updated_at,
}
}
fn convert_trigger(t: forage_grpc::Trigger) -> Trigger {
Trigger {
id: t.id,
name: t.name,
enabled: t.enabled,
branch_pattern: t.branch_pattern,
title_pattern: t.title_pattern,
author_pattern: t.author_pattern,
commit_message_pattern: t.commit_message_pattern,
source_type_pattern: t.source_type_pattern,
target_environments: t.target_environments,
target_destinations: t.target_destinations,
force_release: t.force_release,
use_pipeline: t.use_pipeline,
created_at: t.created_at,
updated_at: t.updated_at,
}
}
fn convert_policy(p: forage_grpc::Policy) -> Policy {
let policy_type_str = match forage_grpc::PolicyType::try_from(p.policy_type) {
Ok(forage_grpc::PolicyType::SoakTime) => "soak_time",
Ok(forage_grpc::PolicyType::BranchRestriction) => "branch_restriction",
_ => "unknown",
};
let config = match p.config {
Some(forage_grpc::policy::Config::SoakTime(c)) => PolicyConfig::SoakTime {
source_environment: c.source_environment,
target_environment: c.target_environment,
duration_seconds: c.duration_seconds,
},
Some(forage_grpc::policy::Config::BranchRestriction(c)) => {
PolicyConfig::BranchRestriction {
target_environment: c.target_environment,
branch_pattern: c.branch_pattern,
}
}
None => PolicyConfig::SoakTime {
source_environment: String::new(),
target_environment: String::new(),
duration_seconds: 0,
},
};
Policy {
id: p.id,
name: p.name,
enabled: p.enabled,
policy_type: policy_type_str.into(),
config,
created_at: p.created_at,
updated_at: p.updated_at,
}
}
fn policy_config_to_grpc(
config: &PolicyConfig,
) -> (i32, Option<forage_grpc::create_policy_request::Config>) {
match config {
PolicyConfig::SoakTime {
source_environment,
target_environment,
duration_seconds,
} => (
forage_grpc::PolicyType::SoakTime as i32,
Some(forage_grpc::create_policy_request::Config::SoakTime(
forage_grpc::SoakTimeConfig {
source_environment: source_environment.clone(),
target_environment: target_environment.clone(),
duration_seconds: *duration_seconds,
},
)),
),
PolicyConfig::BranchRestriction {
target_environment,
branch_pattern,
} => (
forage_grpc::PolicyType::BranchRestriction as i32,
Some(
forage_grpc::create_policy_request::Config::BranchRestriction(
forage_grpc::BranchRestrictionConfig {
target_environment: target_environment.clone(),
branch_pattern: branch_pattern.clone(),
},
),
),
),
}
}
fn convert_member(m: forage_grpc::OrganisationMember) -> OrgMember {
OrgMember {
user_id: m.user_id,
@@ -688,13 +964,661 @@ impl ForestPlatform for GrpcForestClient {
Ok(convert_artifact(artifact))
}
async fn list_environments(
&self,
access_token: &str,
organisation: &str,
) -> Result<Vec<Environment>, PlatformError> {
let req = platform_authed_request(
access_token,
forage_grpc::ListEnvironmentsRequest {
organisation: organisation.into(),
},
)?;
let resp = self
.env_client()
.list_environments(req)
.await
.map_err(map_platform_status)?
.into_inner();
Ok(resp
.environments
.into_iter()
.map(|e| Environment {
id: e.id,
organisation: e.organisation,
name: e.name,
description: e.description.filter(|v| !v.is_empty()),
sort_order: e.sort_order,
created_at: e.created_at,
})
.collect())
}
async fn list_destinations(
&self,
_access_token: &str,
_organisation: &str,
access_token: &str,
organisation: &str,
) -> Result<Vec<Destination>, PlatformError> {
// DestinationService client not yet generated; return empty for now
Ok(vec![])
let req = platform_authed_request(
access_token,
forage_grpc::GetDestinationsRequest {
organisation: organisation.into(),
},
)?;
let resp = self
.dest_client()
.get_destinations(req)
.await
.map_err(map_platform_status)?
.into_inner();
Ok(resp
.destinations
.into_iter()
.map(|d| Destination {
name: d.name,
environment: d.environment,
organisation: d.organisation,
metadata: d.metadata,
dest_type: d.r#type.map(|t| DestinationType {
organisation: t.organisation,
name: t.name,
version: t.version,
}),
})
.collect())
}
async fn create_environment(
&self,
access_token: &str,
organisation: &str,
name: &str,
description: Option<&str>,
sort_order: i32,
) -> Result<Environment, PlatformError> {
let req = platform_authed_request(
access_token,
forage_grpc::CreateEnvironmentRequest {
organisation: organisation.into(),
name: name.into(),
description: description.map(|s| s.to_string()),
sort_order,
},
)?;
let resp = self
.env_client()
.create_environment(req)
.await
.map_err(map_platform_status)?
.into_inner();
let e = resp
.environment
.ok_or(PlatformError::Other("no environment in response".into()))?;
Ok(Environment {
id: e.id,
organisation: e.organisation,
name: e.name,
description: e.description.filter(|v| !v.is_empty()),
sort_order: e.sort_order,
created_at: e.created_at,
})
}
async fn create_destination(
&self,
access_token: &str,
organisation: &str,
name: &str,
environment: &str,
metadata: &std::collections::HashMap<String, String>,
dest_type: Option<&forage_core::platform::DestinationType>,
) -> Result<(), PlatformError> {
let req = platform_authed_request(
access_token,
forage_grpc::CreateDestinationRequest {
organisation: organisation.into(),
name: name.into(),
environment: environment.into(),
metadata: metadata.clone(),
r#type: dest_type.map(|t| forage_grpc::DestinationType {
organisation: t.organisation.clone(),
name: t.name.clone(),
version: t.version,
}),
},
)?;
self.dest_client()
.create_destination(req)
.await
.map_err(map_platform_status)?;
Ok(())
}
async fn update_destination(
&self,
access_token: &str,
name: &str,
metadata: &std::collections::HashMap<String, String>,
) -> Result<(), PlatformError> {
let req = platform_authed_request(
access_token,
forage_grpc::UpdateDestinationRequest {
name: name.into(),
metadata: metadata.clone(),
},
)?;
self.dest_client()
.update_destination(req)
.await
.map_err(map_platform_status)?;
Ok(())
}
async fn get_destination_states(
&self,
access_token: &str,
organisation: &str,
project: Option<&str>,
) -> Result<forage_core::platform::DeploymentStates, PlatformError> {
let req = bearer_request(
access_token,
forage_grpc::GetDestinationStatesRequest {
organisation: organisation.into(),
project: project.map(|p| p.into()),
},
)
.map_err(|e| PlatformError::Other(e.to_string()))?;
let resp = self
.release_client()
.get_destination_states(req)
.await
.map_err(map_platform_status)?;
let inner = resp.into_inner();
let destinations = inner
.destinations
.into_iter()
.map(|d| forage_core::platform::DestinationState {
destination_id: d.destination_id,
destination_name: d.destination_name,
environment: d.environment,
release_id: d.release_id,
artifact_id: d.artifact_id,
status: d.status,
error_message: d.error_message,
queued_at: d.queued_at,
completed_at: d.completed_at,
queue_position: d.queue_position,
started_at: d.started_at,
})
.collect();
Ok(forage_core::platform::DeploymentStates {
destinations,
})
}
async fn get_release_intent_states(
&self,
access_token: &str,
organisation: &str,
project: Option<&str>,
include_completed: bool,
) -> Result<Vec<forage_core::platform::ReleaseIntentState>, PlatformError> {
let req = bearer_request(
access_token,
forage_grpc::GetReleaseIntentStatesRequest {
organisation: organisation.into(),
project: project.map(|p| p.into()),
include_completed,
},
)
.map_err(|e| PlatformError::Other(e.to_string()))?;
let resp = self
.release_client()
.get_release_intent_states(req)
.await
.map_err(map_platform_status)?;
Ok(resp
.into_inner()
.release_intents
.into_iter()
.map(|ri| forage_core::platform::ReleaseIntentState {
release_intent_id: ri.release_intent_id,
artifact_id: ri.artifact_id,
project: ri.project,
created_at: ri.created_at,
stages: ri
.stages
.into_iter()
.map(convert_pipeline_stage_state)
.collect(),
steps: ri
.steps
.into_iter()
.map(convert_release_step_state)
.collect(),
})
.collect())
}
async fn release_artifact(
&self,
access_token: &str,
artifact_id: &str,
destinations: &[String],
environments: &[String],
use_pipeline: bool,
) -> Result<(), PlatformError> {
let req = bearer_request(
access_token,
forage_grpc::ReleaseRequest {
artifact_id: artifact_id.into(),
destinations: destinations.to_vec(),
environments: environments.to_vec(),
force: false,
use_pipeline,
},
)
.map_err(|e| PlatformError::Other(e.to_string()))?;
self.release_client()
.release(req)
.await
.map_err(map_platform_status)?;
Ok(())
}
async fn list_triggers(
&self,
access_token: &str,
organisation: &str,
project: &str,
) -> Result<Vec<Trigger>, PlatformError> {
let req = platform_authed_request(
access_token,
forage_grpc::ListTriggersRequest {
project: Some(forage_grpc::Project {
organisation: organisation.into(),
project: project.into(),
}),
},
)?;
let resp = self
.trigger_client()
.list_triggers(req)
.await
.map_err(map_platform_status)?
.into_inner();
Ok(resp.triggers.into_iter().map(convert_trigger).collect())
}
async fn create_trigger(
&self,
access_token: &str,
organisation: &str,
project: &str,
input: &CreateTriggerInput,
) -> Result<Trigger, PlatformError> {
let req = platform_authed_request(
access_token,
forage_grpc::CreateTriggerRequest {
project: Some(forage_grpc::Project {
organisation: organisation.into(),
project: project.into(),
}),
name: input.name.clone(),
branch_pattern: input.branch_pattern.clone(),
title_pattern: input.title_pattern.clone(),
author_pattern: input.author_pattern.clone(),
commit_message_pattern: input.commit_message_pattern.clone(),
source_type_pattern: input.source_type_pattern.clone(),
target_environments: input.target_environments.clone(),
target_destinations: input.target_destinations.clone(),
force_release: input.force_release,
use_pipeline: input.use_pipeline,
},
)?;
let resp = self
.trigger_client()
.create_trigger(req)
.await
.map_err(map_platform_status)?
.into_inner();
let trigger = resp
.trigger
.ok_or(PlatformError::Other("no trigger in response".into()))?;
Ok(convert_trigger(trigger))
}
async fn update_trigger(
&self,
access_token: &str,
organisation: &str,
project: &str,
name: &str,
input: &UpdateTriggerInput,
) -> Result<Trigger, PlatformError> {
let req = platform_authed_request(
access_token,
forage_grpc::UpdateTriggerRequest {
project: Some(forage_grpc::Project {
organisation: organisation.into(),
project: project.into(),
}),
name: name.into(),
enabled: input.enabled,
branch_pattern: input.branch_pattern.clone(),
title_pattern: input.title_pattern.clone(),
author_pattern: input.author_pattern.clone(),
commit_message_pattern: input.commit_message_pattern.clone(),
source_type_pattern: input.source_type_pattern.clone(),
target_environments: input.target_environments.clone(),
target_destinations: input.target_destinations.clone(),
force_release: input.force_release,
use_pipeline: input.use_pipeline,
},
)?;
let resp = self
.trigger_client()
.update_trigger(req)
.await
.map_err(map_platform_status)?
.into_inner();
let trigger = resp
.trigger
.ok_or(PlatformError::Other("no trigger in response".into()))?;
Ok(convert_trigger(trigger))
}
async fn delete_trigger(
&self,
access_token: &str,
organisation: &str,
project: &str,
name: &str,
) -> Result<(), PlatformError> {
let req = platform_authed_request(
access_token,
forage_grpc::DeleteTriggerRequest {
project: Some(forage_grpc::Project {
organisation: organisation.into(),
project: project.into(),
}),
name: name.into(),
},
)?;
self.trigger_client()
.delete_trigger(req)
.await
.map_err(map_platform_status)?;
Ok(())
}
async fn list_policies(
&self,
access_token: &str,
organisation: &str,
project: &str,
) -> Result<Vec<Policy>, PlatformError> {
let req = platform_authed_request(
access_token,
forage_grpc::ListPoliciesRequest {
project: Some(forage_grpc::Project {
organisation: organisation.into(),
project: project.into(),
}),
},
)?;
let resp = self
.policy_client()
.list_policies(req)
.await
.map_err(map_platform_status)?
.into_inner();
Ok(resp.policies.into_iter().map(convert_policy).collect())
}
async fn create_policy(
&self,
access_token: &str,
organisation: &str,
project: &str,
input: &CreatePolicyInput,
) -> Result<Policy, PlatformError> {
let (policy_type, config) = policy_config_to_grpc(&input.config);
let req = platform_authed_request(
access_token,
forage_grpc::CreatePolicyRequest {
project: Some(forage_grpc::Project {
organisation: organisation.into(),
project: project.into(),
}),
name: input.name.clone(),
policy_type,
config,
},
)?;
let resp = self
.policy_client()
.create_policy(req)
.await
.map_err(map_platform_status)?
.into_inner();
let policy = resp
.policy
.ok_or(PlatformError::Other("no policy in response".into()))?;
Ok(convert_policy(policy))
}
async fn update_policy(
&self,
access_token: &str,
organisation: &str,
project: &str,
name: &str,
input: &UpdatePolicyInput,
) -> Result<Policy, PlatformError> {
let config = input.config.as_ref().map(|c| {
let (_, grpc_config) = policy_config_to_grpc(c);
match grpc_config {
Some(forage_grpc::create_policy_request::Config::SoakTime(s)) => {
forage_grpc::update_policy_request::Config::SoakTime(s)
}
Some(forage_grpc::create_policy_request::Config::BranchRestriction(b)) => {
forage_grpc::update_policy_request::Config::BranchRestriction(b)
}
None => forage_grpc::update_policy_request::Config::SoakTime(
forage_grpc::SoakTimeConfig::default(),
),
}
});
let req = platform_authed_request(
access_token,
forage_grpc::UpdatePolicyRequest {
project: Some(forage_grpc::Project {
organisation: organisation.into(),
project: project.into(),
}),
name: name.into(),
enabled: input.enabled,
config,
},
)?;
let resp = self
.policy_client()
.update_policy(req)
.await
.map_err(map_platform_status)?
.into_inner();
let policy = resp
.policy
.ok_or(PlatformError::Other("no policy in response".into()))?;
Ok(convert_policy(policy))
}
async fn delete_policy(
&self,
access_token: &str,
organisation: &str,
project: &str,
name: &str,
) -> Result<(), PlatformError> {
let req = platform_authed_request(
access_token,
forage_grpc::DeletePolicyRequest {
project: Some(forage_grpc::Project {
organisation: organisation.into(),
project: project.into(),
}),
name: name.into(),
},
)?;
self.policy_client()
.delete_policy(req)
.await
.map_err(map_platform_status)?;
Ok(())
}
async fn list_release_pipelines(
&self,
access_token: &str,
organisation: &str,
project: &str,
) -> Result<Vec<ReleasePipeline>, PlatformError> {
let req = platform_authed_request(
access_token,
forage_grpc::ListReleasePipelinesRequest {
project: Some(forage_grpc::Project {
organisation: organisation.into(),
project: project.into(),
}),
},
)?;
let resp = self
.pipeline_client()
.list_release_pipelines(req)
.await
.map_err(map_platform_status)?
.into_inner();
Ok(resp
.pipelines
.into_iter()
.map(convert_release_pipeline)
.collect())
}
async fn create_release_pipeline(
&self,
access_token: &str,
organisation: &str,
project: &str,
input: &CreateReleasePipelineInput,
) -> Result<ReleasePipeline, PlatformError> {
let req = platform_authed_request(
access_token,
forage_grpc::CreateReleasePipelineRequest {
project: Some(forage_grpc::Project {
organisation: organisation.into(),
project: project.into(),
}),
name: input.name.clone(),
stages: convert_stages_to_grpc(&input.stages),
},
)?;
let resp = self
.pipeline_client()
.create_release_pipeline(req)
.await
.map_err(map_platform_status)?
.into_inner();
let pipeline = resp
.pipeline
.ok_or(PlatformError::Other("no pipeline in response".into()))?;
Ok(convert_release_pipeline(pipeline))
}
async fn update_release_pipeline(
&self,
access_token: &str,
organisation: &str,
project: &str,
name: &str,
input: &UpdateReleasePipelineInput,
) -> Result<ReleasePipeline, PlatformError> {
let req = platform_authed_request(
access_token,
forage_grpc::UpdateReleasePipelineRequest {
project: Some(forage_grpc::Project {
organisation: organisation.into(),
project: project.into(),
}),
name: name.into(),
enabled: input.enabled,
stages: input.stages.as_ref().map(|s| convert_stages_to_grpc(s)).unwrap_or_default(),
update_stages: input.stages.is_some(),
},
)?;
let resp = self
.pipeline_client()
.update_release_pipeline(req)
.await
.map_err(map_platform_status)?
.into_inner();
let pipeline = resp
.pipeline
.ok_or(PlatformError::Other("no pipeline in response".into()))?;
Ok(convert_release_pipeline(pipeline))
}
async fn delete_release_pipeline(
&self,
access_token: &str,
organisation: &str,
project: &str,
name: &str,
) -> Result<(), PlatformError> {
let req = platform_authed_request(
access_token,
forage_grpc::DeleteReleasePipelineRequest {
project: Some(forage_grpc::Project {
organisation: organisation.into(),
project: project.into(),
}),
name: name.into(),
},
)?;
self.pipeline_client()
.delete_release_pipeline(req)
.await
.map_err(map_platform_status)?;
Ok(())
}
async fn get_artifact_spec(
&self,
access_token: &str,
artifact_id: &str,
) -> Result<String, PlatformError> {
let req = platform_authed_request(
access_token,
forage_grpc::GetArtifactSpecRequest {
artifact_id: artifact_id.into(),
},
)?;
let resp = self
.artifact_client()
.get_artifact_spec(req)
.await
.map_err(map_platform_status)?;
Ok(resp.into_inner().content)
}
}