diff --git a/config.toml b/config.toml index 6feaef585..f0dfa862c 100644 --- a/config.toml +++ b/config.toml @@ -80,3 +80,7 @@ members-without-zulip-id = [ "therealprof", "zeenix" ] + +enable-rulesets-repos = [ + "rust-lang/bors" +] diff --git a/src/data.rs b/src/data.rs index b523dfc40..c04f8dd21 100644 --- a/src/data.rs +++ b/src/data.rs @@ -235,6 +235,7 @@ impl Data { Ok(sync_team::Config { special_org_members, independent_github_orgs: self.config.independent_github_orgs().clone(), + enable_rulesets_repos: self.config.enable_rulesets_repos().clone(), }) } } diff --git a/src/schema.rs b/src/schema.rs index 3fbe8bdfe..dfd48af02 100644 --- a/src/schema.rs +++ b/src/schema.rs @@ -16,6 +16,8 @@ pub(crate) struct Config { // Use a BTreeSet for consistent ordering in tests special_org_members: BTreeSet, members_without_zulip_id: BTreeSet, + #[serde(default)] + enable_rulesets_repos: BTreeSet, } impl Config { @@ -46,6 +48,10 @@ impl Config { pub(crate) fn members_without_zulip_id(&self) -> &BTreeSet { &self.members_without_zulip_id } + + pub(crate) fn enable_rulesets_repos(&self) -> &BTreeSet { + &self.enable_rulesets_repos + } } // This is an enum to allow two kinds of values for the email field: diff --git a/sync-team/Cargo.toml b/sync-team/Cargo.toml index b17343ac2..21e7d4a15 100644 --- a/sync-team/Cargo.toml +++ b/sync-team/Cargo.toml @@ -14,6 +14,7 @@ base64.workspace = true hyper-old-types.workspace = true serde_json.workspace = true secrecy.workspace = true +indexmap.workspace = true [dev-dependencies] indexmap.workspace = true diff --git a/sync-team/src/github/api/mod.rs b/sync-team/src/github/api/mod.rs index d877d4a7b..debac26ca 100644 --- a/sync-team/src/github/api/mod.rs +++ b/sync-team/src/github/api/mod.rs @@ -479,3 +479,160 @@ pub(crate) struct RepoSettings { pub archived: bool, pub auto_merge_enabled: bool, } + +/// GitHub Repository Ruleset +#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)] +pub(crate) struct Ruleset { + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) id: Option, + pub(crate) name: String, + pub(crate) target: RulesetTarget, + pub(crate) source_type: RulesetSourceType, + pub(crate) enforcement: RulesetEnforcement, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) bypass_actors: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub(crate) conditions: Option, + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub(crate) rules: Vec, +} + +#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "lowercase")] +pub(crate) enum RulesetTarget { + Branch, + Tag, +} + +#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "PascalCase")] +pub(crate) enum RulesetSourceType { + Repository, + Organization, +} + +#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "lowercase")] +pub(crate) enum RulesetEnforcement { + Active, + Disabled, + Evaluate, +} + +#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)] +pub(crate) struct RulesetBypassActor { + pub(crate) actor_id: i64, + pub(crate) actor_type: RulesetActorType, + pub(crate) bypass_mode: RulesetBypassMode, +} + +#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "snake_case")] +pub(crate) enum RulesetActorType { + Integration, + OrganizationAdmin, + RepositoryRole, + Team, +} + +#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "snake_case")] +pub(crate) enum RulesetBypassMode { + Always, + PullRequest, +} + +#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)] +pub(crate) struct RulesetConditions { + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) ref_name: Option, +} + +#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)] +pub(crate) struct RulesetRefNameCondition { + pub(crate) include: Vec, + pub(crate) exclude: Vec, +} + +#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)] +#[serde(tag = "type", rename_all = "snake_case")] +pub(crate) enum RulesetRule { + Creation, + Update, + Deletion, + RequiredLinearHistory, + MergeQueue { + parameters: MergeQueueParameters, + }, + RequiredDeployments { + parameters: RequiredDeploymentsParameters, + }, + RequiredSignatures, + PullRequest { + parameters: PullRequestParameters, + }, + RequiredStatusChecks { + parameters: RequiredStatusChecksParameters, + }, + NonFastForward, +} + +#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)] +pub(crate) struct MergeQueueParameters { + pub(crate) check_response_timeout_minutes: i32, + pub(crate) grouping_strategy: MergeQueueGroupingStrategy, + pub(crate) max_entries_to_build: i32, + pub(crate) max_entries_to_merge: i32, + pub(crate) merge_method: MergeQueueMergeMethod, + pub(crate) min_entries_to_merge: i32, + pub(crate) min_entries_to_merge_wait_minutes: i32, +} + +#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub(crate) enum MergeQueueGroupingStrategy { + Allgreen, + Headgreen, +} + +#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub(crate) enum MergeQueueMergeMethod { + Merge, + Squash, + Rebase, +} + +#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)] +pub(crate) struct RequiredDeploymentsParameters { + pub(crate) required_deployment_environments: Vec, +} + +#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)] +pub(crate) struct PullRequestParameters { + pub(crate) dismiss_stale_reviews_on_push: bool, + pub(crate) require_code_owner_review: bool, + pub(crate) require_last_push_approval: bool, + pub(crate) required_approving_review_count: i32, + pub(crate) required_review_thread_resolution: bool, +} + +#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)] +pub(crate) struct RequiredStatusChecksParameters { + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) do_not_enforce_on_create: Option, + pub(crate) required_status_checks: Vec, + pub(crate) strict_required_status_checks_policy: bool, +} + +#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)] +pub(crate) struct RequiredStatusCheck { + pub(crate) context: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) integration_id: Option, +} + +pub(crate) enum RulesetOp { + CreateForRepo, + UpdateRuleset(i64), +} diff --git a/sync-team/src/github/api/read.rs b/sync-team/src/github/api/read.rs index 7deb6f37f..bef860a53 100644 --- a/sync-team/src/github/api/read.rs +++ b/sync-team/src/github/api/read.rs @@ -1,3 +1,4 @@ +use crate::github::api::Ruleset; use crate::github::api::{ BranchProtection, GraphNode, GraphNodes, GraphPageInfo, HttpClient, Login, Repo, RepoTeam, RepoUser, Team, TeamMember, TeamRole, team_node_id, url::GitHubUrl, user_node_id, @@ -59,6 +60,14 @@ pub(crate) trait GithubRead { org: &str, repo: &str, ) -> anyhow::Result>; + + /// Get rulesets for a repository + /// Returns a vector of rulesets + fn repo_rulesets( + &self, + org: &str, + repo: &str, + ) -> anyhow::Result>; } pub(crate) struct GitHubApiRead { @@ -536,4 +545,26 @@ impl GithubRead for GitHubApiRead { }) .collect() } + + fn repo_rulesets( + &self, + org: &str, + repo: &str, + ) -> anyhow::Result> { + let mut rulesets: Vec = Vec::new(); + + // REST API endpoint for rulesets + // https://docs.github.com/en/rest/repos/rules#get-all-repository-rulesets + // The API returns an array of rulesets directly, not wrapped in an object + self.client.rest_paginated( + &Method::GET, + &GitHubUrl::repos(org, repo, "rulesets")?, + |resp: Vec| { + rulesets.extend(resp); + Ok(()) + }, + )?; + + Ok(rulesets) + } } diff --git a/sync-team/src/github/api/write.rs b/sync-team/src/github/api/write.rs index df1bd8649..f24f6d741 100644 --- a/sync-team/src/github/api/write.rs +++ b/sync-team/src/github/api/write.rs @@ -766,4 +766,53 @@ impl GitHubWrite { } Ok(()) } + + /// Create or update a ruleset for a repository + pub(crate) fn upsert_ruleset( + &self, + op: crate::github::api::RulesetOp, + org: &str, + repo: &str, + ruleset: &crate::github::api::Ruleset, + ) -> anyhow::Result<()> { + use crate::github::api::RulesetOp; + + match op { + RulesetOp::CreateForRepo => { + debug!("Creating ruleset '{}' in '{}/{}'", ruleset.name, org, repo); + if !self.dry_run { + // REST API: POST /repos/{owner}/{repo}/rulesets + // https://docs.github.com/en/rest/repos/rules#create-a-repository-ruleset + let url = GitHubUrl::repos(org, repo, "rulesets")?; + self.client.send(Method::POST, &url, ruleset)?; + } + } + RulesetOp::UpdateRuleset(id) => { + debug!( + "Updating ruleset '{}' (id: {}) in '{}/{}'", + ruleset.name, id, org, repo + ); + if !self.dry_run { + // REST API: PUT /repos/{owner}/{repo}/rulesets/{ruleset_id} + // https://docs.github.com/en/rest/repos/rules#update-a-repository-ruleset + let url = GitHubUrl::repos(org, repo, &format!("rulesets/{}", id))?; + self.client.send(Method::PUT, &url, ruleset)?; + } + } + } + Ok(()) + } + + /// Delete a ruleset from a repository + pub(crate) fn delete_ruleset(&self, org: &str, repo: &str, id: i64) -> anyhow::Result<()> { + debug!("Deleting ruleset id {} from '{}/{}'", id, org, repo); + if !self.dry_run { + // REST API: DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id} + // https://docs.github.com/en/rest/repos/rules#delete-a-repository-ruleset + let url = GitHubUrl::repos(org, repo, &format!("rulesets/{}", id))?; + self.client + .send(Method::DELETE, &url, &serde_json::json!({}))?; + } + Ok(()) + } } diff --git a/sync-team/src/github/mod.rs b/sync-team/src/github/mod.rs index d41bf9afe..68ae7e287 100644 --- a/sync-team/src/github/mod.rs +++ b/sync-team/src/github/mod.rs @@ -328,6 +328,12 @@ impl SyncGitHub { Ok(diffs) } + /// Check if a repository should use rulesets instead of branch protections + fn should_use_rulesets(&self, repo: &rust_team_data::v1::Repo) -> bool { + let repo_full_name = format!("{}/{}", repo.org, repo.name); + self.config.enable_rulesets_repos.contains(&repo_full_name) + } + fn diff_repo(&self, expected_repo: &rust_team_data::v1::Repo) -> anyhow::Result { debug!( "Diffing repo `{}/{}`", @@ -342,6 +348,8 @@ impl SyncGitHub { Default::default(), Default::default(), )?; + + // Always create branch protections let mut branch_protections = Vec::new(); for branch_protection in &expected_repo.branch_protections { branch_protections.push(( @@ -350,6 +358,16 @@ impl SyncGitHub { )); } + // Additionally create rulesets if configured + let mut rulesets = Vec::new(); + let use_rulesets = self.should_use_rulesets(expected_repo); + if use_rulesets { + for branch_protection in &expected_repo.branch_protections { + let ruleset = construct_ruleset(expected_repo, branch_protection); + rulesets.push(ruleset); + } + } + return Ok(RepoDiff::Create(CreateRepoDiff { org: expected_repo.org.clone(), name: expected_repo.name.clone(), @@ -360,7 +378,13 @@ impl SyncGitHub { auto_merge_enabled: expected_repo.auto_merge_enabled, }, permissions, - branch_protections, + // Don't create branch protections if using rulesets + branch_protections: if use_rulesets { + vec![] + } else { + branch_protections + }, + rulesets, environments: expected_repo .environments .iter() @@ -371,7 +395,15 @@ impl SyncGitHub { }; let permission_diffs = self.diff_permissions(expected_repo)?; + let branch_protection_diffs = self.diff_branch_protections(&actual_repo, expected_repo)?; + + let ruleset_diffs = if self.should_use_rulesets(expected_repo) { + self.diff_rulesets(expected_repo)? + } else { + Vec::new() + }; + let environment_diffs = self.diff_environments(expected_repo)?; let old_settings = RepoSettings { description: actual_repo.description.clone(), @@ -393,6 +425,7 @@ impl SyncGitHub { settings_diff: (old_settings, new_settings), permission_diffs, branch_protection_diffs, + ruleset_diffs, environment_diffs, })) } @@ -433,6 +466,18 @@ impl SyncGitHub { let mut actual_protections = self .github .branch_protections(&actual_repo.org, &actual_repo.name)?; + + // If rulesets are enabled, delete all existing branch protections + // to avoid conflicts between branch protections and rulesets + if self.should_use_rulesets(expected_repo) { + return Ok(actual_protections + .into_iter() + .map(|(name, (id, _))| BranchProtectionDiff { + pattern: name, + operation: BranchProtectionDiffOperation::Delete(id), + }) + .collect()); + } for branch_protection in &expected_repo.branch_protections { let actual_branch_protection = actual_protections.remove(&branch_protection.pattern); let mut expected_branch_protection = @@ -567,6 +612,68 @@ impl SyncGitHub { Ok(environment_diffs) } + fn diff_rulesets( + &self, + expected_repo: &rust_team_data::v1::Repo, + ) -> anyhow::Result> { + let mut ruleset_diffs = Vec::new(); + + // Fetch existing rulesets from GitHub + let actual_rulesets = self + .github + .repo_rulesets(&expected_repo.org, &expected_repo.name)?; + + // Build a map of actual rulesets by name + let mut actual_rulesets_map: HashMap = actual_rulesets + .into_iter() + .map(|r| (r.name.clone(), r)) + .collect(); + + // Process each branch protection as a potential ruleset + for branch_protection in &expected_repo.branch_protections { + let expected_ruleset = construct_ruleset(expected_repo, branch_protection); + let ruleset_name = expected_ruleset.name.clone(); + + if let Some(actual_ruleset) = actual_rulesets_map.remove(&ruleset_name) { + // Ruleset exists, check if it needs updating + // For simplicity, we compare the entire ruleset + // In production, you might want more sophisticated comparison + if actual_ruleset.rules != expected_ruleset.rules + || actual_ruleset.conditions != expected_ruleset.conditions + || actual_ruleset.enforcement != expected_ruleset.enforcement + { + let id = actual_ruleset.id.unwrap_or(0); + ruleset_diffs.push(RulesetDiff { + name: ruleset_name, + operation: RulesetDiffOperation::Update( + id, + actual_ruleset, + expected_ruleset, + ), + }); + } + } else { + // Ruleset doesn't exist, create it + ruleset_diffs.push(RulesetDiff { + name: ruleset_name, + operation: RulesetDiffOperation::Create(expected_ruleset), + }); + } + } + + // Any remaining rulesets in actual_rulesets_map should be deleted + for (name, ruleset) in actual_rulesets_map { + if let Some(id) = ruleset.id { + ruleset_diffs.push(RulesetDiff { + name, + operation: RulesetDiffOperation::Delete(id), + }); + } + } + + Ok(ruleset_diffs) + } + fn expected_role(&self, org: &str, user: u64) -> TeamRole { if let Some(true) = self .org_owners @@ -779,6 +886,103 @@ pub fn construct_branch_protection( } } +/// Convert a branch pattern to a full ref pattern for use in rulesets. +/// GitHub rulesets require full ref paths like "refs/heads/main" instead of just "main". +pub(crate) fn convert_branch_pattern_to_ref_pattern(pattern: &str) -> String { + if pattern.starts_with("refs/") { + return pattern.to_string(); + } + format!("refs/heads/{}", pattern) +} + +/// Convert a branch protection to a ruleset with merge queue enabled +pub fn construct_ruleset( + _expected_repo: &rust_team_data::v1::Repo, + branch_protection: &rust_team_data::v1::BranchProtection, +) -> api::Ruleset { + use api::*; + + let uses_merge_bot = !branch_protection.merge_bots.is_empty(); + let branch_protection_mode = if uses_merge_bot { + BranchProtectionMode::PrNotRequired + } else { + branch_protection.mode.clone() + }; + + let mut rules: Vec = Vec::new(); + + // Add pull request rule if PRs are required + if let BranchProtectionMode::PrRequired { + required_approvals, .. + } = &branch_protection_mode + { + rules.push(RulesetRule::PullRequest { + parameters: PullRequestParameters { + dismiss_stale_reviews_on_push: branch_protection.dismiss_stale_review, + require_code_owner_review: false, + require_last_push_approval: false, + required_approving_review_count: *required_approvals as i32, + required_review_thread_resolution: false, + }, + }); + } + + // Add required status checks if any + if let BranchProtectionMode::PrRequired { ci_checks, .. } = &branch_protection_mode + && !ci_checks.is_empty() + { + let mut checks = ci_checks.clone(); + checks.sort(); + rules.push(RulesetRule::RequiredStatusChecks { + parameters: RequiredStatusChecksParameters { + do_not_enforce_on_create: Some(false), + required_status_checks: checks + .iter() + .map(|context| RequiredStatusCheck { + context: context.clone(), + integration_id: None, + }) + .collect(), + strict_required_status_checks_policy: false, + }, + }); + } + + // Enable merge queue with default settings + rules.push(RulesetRule::MergeQueue { + parameters: MergeQueueParameters { + check_response_timeout_minutes: 360, + grouping_strategy: MergeQueueGroupingStrategy::Allgreen, + max_entries_to_build: 5, + max_entries_to_merge: 5, + merge_method: MergeQueueMergeMethod::Merge, + min_entries_to_merge: 0, + min_entries_to_merge_wait_minutes: 0, + }, + }); + + // TODO: Add bypass actors based on allowed_merge_teams and merge_bots + // This would require fetching team IDs, which needs more integration + + api::Ruleset { + id: None, + name: format!("Branch protection for {}", branch_protection.pattern), + target: RulesetTarget::Branch, + source_type: RulesetSourceType::Repository, + enforcement: RulesetEnforcement::Active, + bypass_actors: None, + conditions: Some(RulesetConditions { + ref_name: Some(RulesetRefNameCondition { + include: vec![convert_branch_pattern_to_ref_pattern( + &branch_protection.pattern, + )], + exclude: vec![], + }), + }), + rules, + } +} + /// The special bot teams const BOTS_TEAMS: &[&str] = &["bors", "highfive", "rfcbot", "bots"]; @@ -905,6 +1109,7 @@ struct CreateRepoDiff { settings: RepoSettings, permissions: Vec, branch_protections: Vec<(String, api::BranchProtection)>, + rulesets: Vec, environments: Vec<(String, rust_team_data::v1::Environment)>, } @@ -916,6 +1121,7 @@ impl CreateRepoDiff { permission.apply(sync, &self.org, &self.name)?; } + // Apply branch protections for (branch, protection) in &self.branch_protections { BranchProtectionDiff { pattern: branch.clone(), @@ -924,6 +1130,15 @@ impl CreateRepoDiff { .apply(sync, &self.org, &self.name, &repo.node_id)?; } + // Apply rulesets (in addition to branch protections if configured) + for ruleset in &self.rulesets { + RulesetDiff { + name: ruleset.name.clone(), + operation: RulesetDiffOperation::Create(ruleset.clone()), + } + .apply(sync, &self.org, &self.name)?; + } + for (env_name, env) in &self.environments { sync.create_environment(&self.org, &self.name, env_name, &env.branches, &env.tags)?; } @@ -940,6 +1155,7 @@ impl std::fmt::Display for CreateRepoDiff { settings, permissions, branch_protections, + rulesets, environments, } = self; @@ -960,11 +1176,35 @@ impl std::fmt::Display for CreateRepoDiff { for diff in permissions { write!(f, "{diff}")?; } - writeln!(f, " Branch Protections:")?; - for (branch_name, branch_protection) in branch_protections { - writeln!(&mut f, " {branch_name}")?; - log_branch_protection(branch_protection, None, &mut f)?; + + if !branch_protections.is_empty() { + writeln!(f, " Branch Protections:")?; + for (branch_name, branch_protection) in branch_protections { + writeln!(&mut f, " {branch_name}")?; + log_branch_protection(branch_protection, None, &mut f)?; + } } + + if !rulesets.is_empty() { + writeln!(f, " Rulesets:")?; + for ruleset in rulesets { + let id_str = ruleset.id.map_or("new".to_string(), |id| id.to_string()); + writeln!(f, " - {} (ID: {})", ruleset.name, id_str)?; + if let Some(conditions) = &ruleset.conditions + && let Some(ref_name) = &conditions.ref_name + { + writeln!(f, " Branches: {}", ref_name.include.join(", "))?; + } + if ruleset + .rules + .iter() + .any(|r| matches!(r, api::RulesetRule::MergeQueue { .. })) + { + writeln!(f, " Merge Queue: Enabled")?; + } + } + } + if !environments.is_empty() { writeln!(f, " Environments:")?; for (env_name, env) in environments { @@ -990,6 +1230,7 @@ struct UpdateRepoDiff { settings_diff: (RepoSettings, RepoSettings), permission_diffs: Vec, branch_protection_diffs: Vec, + ruleset_diffs: Vec, environment_diffs: Vec, } @@ -1021,12 +1262,14 @@ impl UpdateRepoDiff { settings_diff, permission_diffs, branch_protection_diffs, + ruleset_diffs, environment_diffs, } = self; settings_diff.0 == settings_diff.1 && permission_diffs.is_empty() && branch_protection_diffs.is_empty() + && ruleset_diffs.is_empty() && environment_diffs.is_empty() } @@ -1063,6 +1306,10 @@ impl UpdateRepoDiff { branch_protection.apply(sync, &self.org, &self.name, &self.repo_node_id)?; } + for ruleset in &self.ruleset_diffs { + ruleset.apply(sync, &self.org, &self.name)?; + } + for env_diff in &self.environment_diffs { match env_diff { EnvironmentDiff::Create(name, env) => { @@ -1070,12 +1317,9 @@ impl UpdateRepoDiff { } EnvironmentDiff::Update { name, - add_branches: _, - remove_branches: _, - add_tags: _, - remove_tags: _, new_branches, new_tags, + .. } => { sync.update_environment(&self.org, &self.name, name, new_branches, new_tags)?; } @@ -1106,6 +1350,7 @@ impl std::fmt::Display for UpdateRepoDiff { settings_diff, permission_diffs, branch_protection_diffs, + ruleset_diffs, environment_diffs, } = self; @@ -1154,6 +1399,12 @@ impl std::fmt::Display for UpdateRepoDiff { write!(f, "{branch_protection_diff}")?; } } + if !ruleset_diffs.is_empty() { + writeln!(f, " Rulesets:")?; + for ruleset_diff in ruleset_diffs { + write!(f, "{ruleset_diff}")?; + } + } if !environment_diffs.is_empty() { writeln!(f, " Environments:")?; for env_diff in environment_diffs { @@ -1383,6 +1634,184 @@ fn log_branch_protection( Ok(()) } +fn log_ruleset( + current: &api::Ruleset, + new: Option<&api::Ruleset>, + mut result: impl Write, +) -> std::fmt::Result { + macro_rules! log { + ($str:literal, $field:expr, $new_field:expr) => { + let old = $field; + let new_val = $new_field; + if Some(old) != new_val { + if let Some(n) = new_val { + writeln!(result, " {}: {:?} => {:?}", $str, old, n)?; + } else { + writeln!(result, " {}: {:?}", $str, old)?; + } + } + }; + } + + // Log basic ruleset properties + log!("Target", ¤t.target, new.map(|n| &n.target)); + log!( + "Source Type", + ¤t.source_type, + new.map(|n| &n.source_type) + ); + log!( + "Enforcement", + ¤t.enforcement, + new.map(|n| &n.enforcement) + ); + + // Log branch conditions + if let Some(conditions) = ¤t.conditions + && let Some(ref_name) = &conditions.ref_name + { + let new_ref_name = new.and_then(|n| n.conditions.as_ref()?.ref_name.as_ref()); + if !ref_name.include.is_empty() { + log!( + "Include Branches", + &ref_name.include, + new_ref_name.map(|r| &r.include) + ); + } + if !ref_name.exclude.is_empty() { + log!( + "Exclude Branches", + &ref_name.exclude, + new_ref_name.map(|r| &r.exclude) + ); + } + } + + // Log bypass actors + if let Some(bypass_actors) = ¤t.bypass_actors + && !bypass_actors.is_empty() + { + let new_bypass = new.and_then(|n| n.bypass_actors.as_ref()); + log!("Bypass Actors", bypass_actors, new_bypass); + } + + // Log individual rules + for rule in ¤t.rules { + match rule { + api::RulesetRule::Creation => { + writeln!(result, " Rule: Creation")?; + } + api::RulesetRule::Update => { + writeln!(result, " Rule: Update")?; + } + api::RulesetRule::Deletion => { + writeln!(result, " Rule: Deletion")?; + } + api::RulesetRule::RequiredLinearHistory => { + writeln!(result, " Rule: Required Linear History")?; + } + api::RulesetRule::RequiredSignatures => { + writeln!(result, " Rule: Required Signatures")?; + } + api::RulesetRule::NonFastForward => { + writeln!(result, " Rule: Non-Fast-Forward")?; + } + api::RulesetRule::MergeQueue { parameters } => { + writeln!(result, " Rule: Merge Queue")?; + writeln!( + result, + " Timeout: {} minutes", + parameters.check_response_timeout_minutes + )?; + writeln!( + result, + " Grouping Strategy: {:?}", + parameters.grouping_strategy + )?; + writeln!( + result, + " Merge Method: {:?}", + parameters.merge_method + )?; + writeln!( + result, + " Max Entries to Build: {}", + parameters.max_entries_to_build + )?; + writeln!( + result, + " Max Entries to Merge: {}", + parameters.max_entries_to_merge + )?; + writeln!( + result, + " Min Entries to Merge: {}", + parameters.min_entries_to_merge + )?; + writeln!( + result, + " Min Wait Time: {} minutes", + parameters.min_entries_to_merge_wait_minutes + )?; + } + api::RulesetRule::PullRequest { parameters } => { + writeln!(result, " Rule: Pull Request")?; + writeln!( + result, + " Dismiss Stale Reviews: {}", + parameters.dismiss_stale_reviews_on_push + )?; + writeln!( + result, + " Require Code Owner Review: {}", + parameters.require_code_owner_review + )?; + writeln!( + result, + " Require Last Push Approval: {}", + parameters.require_last_push_approval + )?; + writeln!( + result, + " Required Approving Review Count: {}", + parameters.required_approving_review_count + )?; + writeln!( + result, + " Required Review Thread Resolution: {}", + parameters.required_review_thread_resolution + )?; + } + api::RulesetRule::RequiredStatusChecks { parameters } => { + writeln!(result, " Rule: Required Status Checks")?; + writeln!( + result, + " Strict Policy: {}", + parameters.strict_required_status_checks_policy + )?; + if !parameters.required_status_checks.is_empty() { + let checks: Vec<_> = parameters + .required_status_checks + .iter() + .map(|c| &c.context) + .collect(); + writeln!(result, " Checks: {:?}", checks)?; + } + } + api::RulesetRule::RequiredDeployments { parameters } => { + writeln!(result, " Rule: Required Deployments")?; + writeln!( + result, + " Environments: {:?}", + parameters.required_deployment_environments + )?; + } + } + } + + Ok(()) +} + #[derive(Debug)] enum BranchProtectionDiffOperation { Create(api::BranchProtection), @@ -1390,6 +1819,55 @@ enum BranchProtectionDiffOperation { Delete(String), } +#[derive(Debug)] +struct RulesetDiff { + name: String, + operation: RulesetDiffOperation, +} + +impl RulesetDiff { + fn apply(&self, sync: &GitHubWrite, org: &str, repo_name: &str) -> anyhow::Result<()> { + use api::RulesetOp; + match &self.operation { + RulesetDiffOperation::Create(ruleset) => { + sync.upsert_ruleset(RulesetOp::CreateForRepo, org, repo_name, ruleset)?; + } + RulesetDiffOperation::Update(id, _, new_ruleset) => { + sync.upsert_ruleset(RulesetOp::UpdateRuleset(*id), org, repo_name, new_ruleset)?; + } + RulesetDiffOperation::Delete(id) => { + debug!( + "Deleting ruleset '{}' (id: {}) on '{}/{}' as \ + the ruleset is not in the team repo", + self.name, id, org, repo_name + ); + sync.delete_ruleset(org, repo_name, *id)?; + } + } + Ok(()) + } +} + +impl std::fmt::Display for RulesetDiff { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + writeln!(f, " {}", self.name)?; + match &self.operation { + RulesetDiffOperation::Create(ruleset) => log_ruleset(ruleset, None, f), + RulesetDiffOperation::Update(_, old, new) => log_ruleset(old, Some(new), f), + RulesetDiffOperation::Delete(_) => { + writeln!(f, " Deleting ruleset") + } + } + } +} + +#[derive(Debug)] +enum RulesetDiffOperation { + Create(api::Ruleset), + Update(i64, api::Ruleset, api::Ruleset), // id, old, new + Delete(i64), +} + #[derive(Debug)] enum TeamDiff { Create(CreateTeamDiff), diff --git a/sync-team/src/github/tests/mod.rs b/sync-team/src/github/tests/mod.rs index 4795f86e0..429e1602d 100644 --- a/sync-team/src/github/tests/mod.rs +++ b/sync-team/src/github/tests/mod.rs @@ -232,6 +232,7 @@ fn repo_change_description() { ), permission_diffs: [], branch_protection_diffs: [], + ruleset_diffs: [], environment_diffs: [], }, ), @@ -274,6 +275,7 @@ fn repo_change_homepage() { ), permission_diffs: [], branch_protection_diffs: [], + ruleset_diffs: [], environment_diffs: [], }, ), @@ -342,6 +344,7 @@ fn repo_create() { }, ), ], + rulesets: [], environments: [], }, ), @@ -396,6 +399,7 @@ fn repo_add_member() { }, ], branch_protection_diffs: [], + ruleset_diffs: [], environment_diffs: [], }, ), @@ -450,6 +454,7 @@ fn repo_change_member_permissions() { }, ], branch_protection_diffs: [], + ruleset_diffs: [], environment_diffs: [], }, ), @@ -498,6 +503,7 @@ fn repo_remove_member() { }, ], branch_protection_diffs: [], + ruleset_diffs: [], environment_diffs: [], }, ), @@ -548,6 +554,7 @@ fn repo_add_team() { }, ], branch_protection_diffs: [], + ruleset_diffs: [], environment_diffs: [], }, ), @@ -597,6 +604,7 @@ fn repo_change_team_permissions() { }, ], branch_protection_diffs: [], + ruleset_diffs: [], environment_diffs: [], }, ), @@ -645,6 +653,7 @@ fn repo_remove_team() { }, ], branch_protection_diffs: [], + ruleset_diffs: [], environment_diffs: [], }, ), @@ -684,6 +693,7 @@ fn repo_archive_repo() { ), permission_diffs: [], branch_protection_diffs: [], + ruleset_diffs: [], environment_diffs: [], }, ), @@ -758,6 +768,7 @@ fn repo_add_branch_protection() { ), }, ], + ruleset_diffs: [], environment_diffs: [], }, ), @@ -848,6 +859,7 @@ fn repo_update_branch_protection() { ), }, ], + ruleset_diffs: [], environment_diffs: [], }, ), @@ -901,6 +913,7 @@ fn repo_remove_branch_protection() { ), }, ], + ruleset_diffs: [], environment_diffs: [], }, ), @@ -989,6 +1002,7 @@ fn repo_environment_create() { ), permission_diffs: [], branch_protection_diffs: [], + ruleset_diffs: [], environment_diffs: [ Create( "production", @@ -1047,6 +1061,7 @@ fn repo_environment_delete() { ), permission_diffs: [], branch_protection_diffs: [], + ruleset_diffs: [], environment_diffs: [ Delete( "production", @@ -1112,6 +1127,7 @@ fn repo_environment_update() { ), permission_diffs: [], branch_protection_diffs: [], + ruleset_diffs: [], environment_diffs: [ Create( "dev", @@ -1171,6 +1187,7 @@ fn repo_environment_update_branches() { ), permission_diffs: [], branch_protection_diffs: [], + ruleset_diffs: [], environment_diffs: [ Update { name: "production", diff --git a/sync-team/src/github/tests/test_utils.rs b/sync-team/src/github/tests/test_utils.rs index 6e59b77c0..36cdb83e7 100644 --- a/sync-team/src/github/tests/test_utils.rs +++ b/sync-team/src/github/tests/test_utils.rs @@ -1,5 +1,7 @@ use std::collections::{BTreeSet, HashMap, HashSet}; +use indexmap::IndexMap; + use derive_builder::Builder; use rust_team_data::v1::{ self, Bot, BranchProtectionMode, Environment, GitHubTeam, MergeBot, Person, RepoPermission, @@ -311,7 +313,7 @@ pub struct RepoData { #[builder(default)] pub branch_protections: Vec, #[builder(default)] - pub environments: indexmap::IndexMap, + pub environments: IndexMap, } impl RepoData { @@ -394,8 +396,8 @@ impl RepoDataBuilder { environments.insert( name.to_string(), v1::Environment { - branches: vec![], - tags: vec![], + branches: Vec::new(), + tags: Vec::new(), }, ); self.environments = Some(environments); @@ -408,7 +410,7 @@ impl RepoDataBuilder { name.to_string(), v1::Environment { branches: branches.iter().map(|s| s.to_string()).collect(), - tags: vec![], + tags: Vec::new(), }, ); self.environments = Some(environments); @@ -620,6 +622,19 @@ impl GithubRead for GithubMock { Ok(result) } + fn repo_rulesets( + &self, + org: &str, + repo: &str, + ) -> anyhow::Result> { + Ok(self + .get_org(org) + .rulesets + .get(repo) + .cloned() + .unwrap_or_default()) + } + fn repo_environments( &self, org: &str, @@ -649,7 +664,9 @@ struct GithubOrg { repo_members: HashMap, // Repo name -> Vec<(protection ID, branch protection)> branch_protections: HashMap>, - // Repo name -> HashMap + // Repo name -> Vec + rulesets: HashMap>, + // Repo name -> HashMap repo_environments: HashMap>, } diff --git a/sync-team/src/lib.rs b/sync-team/src/lib.rs index e327f0686..e8c73a733 100644 --- a/sync-team/src/lib.rs +++ b/sync-team/src/lib.rs @@ -21,6 +21,7 @@ const USER_AGENT: &str = "rust-lang teams sync (https://github.com/rust-lang/syn pub struct Config { pub special_org_members: BTreeSet, pub independent_github_orgs: BTreeSet, + pub enable_rulesets_repos: BTreeSet, } pub fn run_sync_team(