feat: add check for codeowners

Signed-off-by: kjuulh <contact@kjuulh.io>
This commit is contained in:
2026-02-05 10:55:14 +01:00
parent e394510993
commit 1481293b7c
13 changed files with 377 additions and 55 deletions

1
Cargo.lock generated
View File

@@ -563,6 +563,7 @@ dependencies = [
"axum",
"clap",
"dotenv",
"http",
"nodrift",
"notmad",
"octocrab",

View File

@@ -23,3 +23,4 @@ nodrift = "0.3.5"
octocrab = "0.49.5"
schemars = "1.2.1"
serde_json = "1.0.149"
http = "1"

View File

@@ -1,3 +1,5 @@
use std::collections::BTreeMap;
use regex::Regex;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
@@ -15,7 +17,19 @@ pub struct ForgeConfig {
pub schedule: ForgeSchedule,
#[serde(default)]
pub policies: Policies,
pub policies: BTreeMap<String, PolicyOption>,
#[serde(default)]
pub mode: Mode,
}
#[derive(Clone, Debug, Deserialize, JsonSchema, Default)]
pub enum Mode {
#[serde(rename = "warn")]
Warn,
#[serde(rename = "update")]
#[default]
Update,
}
/// # Filter
@@ -54,13 +68,6 @@ fn deny_default() -> Vec<ForgeRegex> {
vec![]
}
#[derive(Clone, Debug, Default, Deserialize, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct Policies {
#[serde(default)]
pub squash_merge_only: PolicyOption,
}
#[derive(Clone, Debug, Default, Deserialize, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct PolicyOption {

View File

@@ -1,4 +1,4 @@
use std::{path::Path, time::Duration};
use std::{collections::BTreeMap, path::Path, time::Duration};
use anyhow::Context;
use async_trait::async_trait;
@@ -8,7 +8,7 @@ use tokio_util::sync::CancellationToken;
use crate::{
State,
forge_config::{ForgeConfig, ForgeConfigType, ForgeSchedule, GitHubCredentials},
forge_config::{ForgeConfig, ForgeConfigType, ForgeSchedule, GitHubCredentials, Mode},
forges,
};
@@ -18,6 +18,105 @@ pub struct ForgeService {
config: ForgeConfig,
}
impl ForgeService {
async fn sync(&self) -> anyhow::Result<()> {
match &self.config.forge_type {
ForgeConfigType::GitHub {
credentials,
organisation,
} => {
let creds = match credentials.clone() {
GitHubCredentials::Token(token) => {
forges::github::GitHubCredentials::Token(token)
}
GitHubCredentials::TokenEnv(key) => {
let token =
std::env::var(key).context("failed to lookup github token env")?;
forges::github::GitHubCredentials::Token(token)
}
};
let client = forges::github::GitHub::new(creds);
let policies = forges::github::policies::Policies::new();
// 1. get repositories matching allow / deny list
let repos = client
.get_repositories(
organisation,
self.config.filter.allow.iter().map(|a| a.into()).collect(),
self.config.filter.deny.iter().map(|a| a.into()).collect(),
)
.await?;
for repo in repos {
let repo = client.get_repository(&repo).await?;
let repo_display = serde_json::to_string(&repo)?;
tracing::trace!(repo = repo_display, "checking for updates");
tracing::debug!(repo = ?repo.full_name, "checking policies");
let mut all_updates = BTreeMap::new();
let mut all_warnings = Vec::new();
// 2. Evaluate each enabled policy
for policy_name in self
.config
.policies
.iter()
.filter(|(_, option)| option.enabled)
.map(|(p, _)| p)
{
let policy = policies
.get_policy(policy_name)
.context(anyhow::anyhow!("failed to find policy: {policy_name}"))?;
let result = policy.evaluate(&client, &repo).await?;
all_updates.extend(result.updates);
all_warnings.extend(result.warnings);
}
// Always log warnings regardless of mode
for warning in &all_warnings {
tracing::warn!(repo = ?repo.full_name, "{warning}");
}
if all_updates.is_empty() {
if all_warnings.is_empty() {
tracing::debug!(repo = ?repo.full_name, "no policy required updating, skipping");
}
continue;
}
tracing::info!(repo = ?repo.full_name, "updating repository");
let updates_display = serde_json::to_string(&all_updates)?;
match self.config.mode {
Mode::Warn => {
tracing::error!(update = updates_display, "repository requires update");
}
Mode::Update => {
tracing::info!(update = updates_display, "repository updating");
// 3. Update repositories
client.update_repository(&repo, all_updates).await.context(
anyhow::anyhow!(
"update repository: {}",
repo.full_name.unwrap_or_default()
),
)?;
}
}
}
}
}
Ok(())
}
}
#[async_trait]
impl Component for ForgeService {
fn name(&self) -> Option<String> {
@@ -57,39 +156,9 @@ impl nodrift::Drifter for ForgeService {
async fn execute(&self, _token: CancellationToken) -> anyhow::Result<()> {
tracing::info!("running schedule");
match &self.config.forge_type {
ForgeConfigType::GitHub {
credentials,
organisation,
} => {
let creds = match credentials.clone() {
GitHubCredentials::Token(token) => {
forges::github::GitHubCredentials::Token(token)
}
GitHubCredentials::TokenEnv(key) => {
let token =
std::env::var(key).context("failed to lookup github token env")?;
forges::github::GitHubCredentials::Token(token)
}
};
let client = forges::github::GitHub::new(creds);
// 1. get repositories matching allow / deny list
client
.get_repositories(
organisation,
self.config.filter.allow.iter().map(|a| a.into()).collect(),
self.config.filter.deny.iter().map(|a| a.into()).collect(),
)
.await?;
// 2. Get updates for each
// 3. Update repositories
}
}
self.sync()
.await
.inspect_err(|e| tracing::warn!("failed to update repository: {e:#}"))?;
Ok(())
}

View File

@@ -1,7 +1,11 @@
use std::collections::BTreeMap;
use anyhow::Context;
use octocrab::{Octocrab, models::Repository};
use regex::Regex;
pub mod policies;
pub struct GitHub {
credentials: GitHubCredentials,
client: Octocrab,
@@ -29,6 +33,14 @@ impl GitHub {
}
}
pub async fn get_repository(&self, repo: &Repository) -> anyhow::Result<Repository> {
self.client
.repos_by_id(repo.id)
.get()
.await
.context("repo by id")
}
pub async fn get_repositories(
&self,
organisation: &str,
@@ -97,4 +109,47 @@ impl GitHub {
Ok(allowed_repos)
}
pub async fn file_exists(&self, repo: &Repository, path: &str) -> anyhow::Result<bool> {
let full_name = repo.full_name.as_ref().context("full name is required")?;
let parts: Vec<&str> = full_name.splitn(2, '/').collect();
anyhow::ensure!(parts.len() == 2, "invalid full_name format: {full_name}");
let (owner, name) = (parts[0], parts[1]);
match self
.client
.repos(owner, name)
.get_content()
.path(path)
.send()
.await
{
Ok(_) => Ok(true),
Err(octocrab::Error::GitHub { source, .. })
if source.status_code == http::StatusCode::NOT_FOUND =>
{
Ok(false)
}
Err(e) => Err(e.into()),
}
}
pub async fn update_repository(
&self,
repo: &Repository,
options: BTreeMap<String, serde_json::Value>,
) -> anyhow::Result<()> {
let _res: serde_json::Value = self
.client
.patch(
format!(
"/repos/{}",
repo.full_name.as_ref().context("full name is required")?
),
Some(&options),
)
.await?;
Ok(())
}
}

View File

@@ -0,0 +1,69 @@
use std::{collections::BTreeMap, sync::Arc};
use async_trait::async_trait;
use octocrab::models::Repository;
use crate::forges::github::GitHub;
pub mod auto_merge;
pub mod has_codeowners;
pub mod squash_merge;
pub struct PolicyResult {
pub updates: BTreeMap<String, serde_json::Value>,
pub warnings: Vec<String>,
}
impl PolicyResult {
pub fn ok() -> Self {
Self {
updates: BTreeMap::new(),
warnings: Vec::new(),
}
}
pub fn warn(message: impl Into<String>) -> Self {
Self {
updates: BTreeMap::new(),
warnings: vec![message.into()],
}
}
pub fn is_empty(&self) -> bool {
self.updates.is_empty() && self.warnings.is_empty()
}
}
#[async_trait]
pub trait Policy: Send + Sync {
async fn evaluate(&self, client: &GitHub, repo: &Repository) -> anyhow::Result<PolicyResult>;
}
#[derive(Clone)]
pub struct Policies {
policies: Arc<BTreeMap<String, Arc<dyn Policy + 'static>>>,
}
impl Policies {
pub fn new() -> Self {
let mut policies: BTreeMap<String, Arc<dyn Policy + 'static>> = BTreeMap::new();
policies.insert(
"squash_merge_only".into(),
Arc::new(squash_merge::SquashMergeOnly),
);
policies.insert("allow_auto_merge".into(), Arc::new(auto_merge::AutoMerge));
policies.insert(
"has_codeowners".into(),
Arc::new(has_codeowners::HasCodeowners),
);
Self {
policies: Arc::new(policies),
}
}
pub fn get_policy(&self, policy_name: &str) -> Option<Arc<dyn Policy + 'static>> {
self.policies.get(policy_name).cloned()
}
}

View File

@@ -0,0 +1,27 @@
use std::collections::BTreeMap;
use async_trait::async_trait;
use octocrab::models::Repository;
use crate::forges::github::GitHub;
use crate::forges::github::policies::{Policy, PolicyResult};
pub struct AutoMerge;
#[async_trait]
impl Policy for AutoMerge {
async fn evaluate(&self, _client: &GitHub, repo: &Repository) -> anyhow::Result<PolicyResult> {
if repo.allow_auto_merge.unwrap_or_default() {
tracing::trace!("already has allow auto merge: {:?}", repo.allow_auto_merge);
return Ok(PolicyResult::ok());
}
Ok(PolicyResult {
updates: BTreeMap::from([(
"allow_auto_merge".to_string(),
serde_json::Value::Bool(true),
)]),
warnings: Vec::new(),
})
}
}

View File

@@ -0,0 +1,31 @@
use async_trait::async_trait;
use octocrab::models::Repository;
use crate::forges::github::GitHub;
use crate::forges::github::policies::{Policy, PolicyResult};
const CODEOWNERS_PATHS: &[&str] = &["CODEOWNERS", ".github/CODEOWNERS", "docs/CODEOWNERS"];
pub struct HasCodeowners;
#[async_trait]
impl Policy for HasCodeowners {
async fn evaluate(&self, client: &GitHub, repo: &Repository) -> anyhow::Result<PolicyResult> {
for path in CODEOWNERS_PATHS {
if client.file_exists(repo, path).await? {
tracing::trace!(
repo = ?repo.full_name,
path,
"CODEOWNERS file found"
);
return Ok(PolicyResult::ok());
}
}
let repo_name = repo.full_name.as_deref().unwrap_or(&repo.name);
Ok(PolicyResult::warn(format!(
"{repo_name}: no CODEOWNERS file found (checked: {})",
CODEOWNERS_PATHS.join(", ")
)))
}
}

View File

@@ -0,0 +1,48 @@
use std::collections::BTreeMap;
use async_trait::async_trait;
use octocrab::models::Repository;
use serde_json::Value;
use crate::forges::github::GitHub;
use crate::forges::github::policies::{Policy, PolicyResult};
pub struct SquashMergeOnly;
#[async_trait]
impl Policy for SquashMergeOnly {
async fn evaluate(&self, _client: &GitHub, repo: &Repository) -> anyhow::Result<PolicyResult> {
let mut updates = BTreeMap::new();
if !repo.allow_squash_merge.unwrap_or_default() {
tracing::trace!(
"repo requires squash merge update: {:?}",
repo.allow_squash_merge
);
updates.insert("allow_squash_merge".to_string(), Value::Bool(true));
updates.insert(
"squash_merge_commit_title".into(),
Value::String("PR_TITLE".into()),
);
updates.insert(
"squash_merge_commit_message".into(),
Value::String("PR_BODY".into()),
);
}
if !repo.allow_merge_commit.unwrap_or(true) {
tracing::trace!(
"repo requires disabling merge commit: {:?}",
repo.allow_merge_commit
);
updates.insert("allow_merge_commit".into(), Value::Bool(false));
}
Ok(PolicyResult {
updates,
warnings: Vec::new(),
})
}
}

View File

@@ -0,0 +1 @@

View File

@@ -0,0 +1,16 @@
#:schema ../schema.json
mode = "warn"
[github]
organisation = "understory-io"
credentials.token_env = "GITHUB_ACCESS_TOKEN"
[filter]
allow = ["^canopy-.*$"]
[schedule]
once = true
[policies]
has_codeowners.enabled = true

View File

@@ -1,15 +1,18 @@
#:schema ../schema.json
mode = "update"
[github]
organisation = "understory-io"
credentials.token_env = "GITHUB_ACCESS_TOKEN"
[filter]
allow = ["^canopy-.*$"]
deny = ["^infrastructure-.*$", "^canopy-data-gateway$"]
allow = ["^canopy-data-gateway$"]
# deny = ["^infrastructure-.*$", "^canopy-data-gateway$"]
[schedule]
once = true
[policies]
squash_merge_only.enabled = true
allow_auto_merge.enabled = true

View File

@@ -7,7 +7,10 @@
"$ref": "#/$defs/Filter"
},
"policies": {
"$ref": "#/$defs/Policies"
"type": "object",
"additionalProperties": {
"$ref": "#/$defs/PolicyOption"
}
},
"schedule": {
"$ref": "#/$defs/ForgeSchedule"
@@ -137,15 +140,6 @@
}
]
},
"Policies": {
"type": "object",
"properties": {
"squash_merge_only": {
"$ref": "#/$defs/PolicyOption"
}
},
"additionalProperties": false
},
"PolicyOption": {
"type": "object",
"properties": {