chore(ci): externalize workflow scripts and relocate main flow doc (#722)
* feat: Add GitHub Actions workflows for security audits, CodeQL analysis, contributor updates, performance benchmarks, integration tests, fuzz testing, and reusable Rust build jobs - Implemented `sec-audit.yml` for Rust package security audits using `rustsec/audit-check` and `cargo-deny-action`. - Created `sec-codeql.yml` for CodeQL analysis scheduled twice daily. - Added `sync-contributors.yml` to update the NOTICE file with new contributors automatically. - Introduced `test-benchmarks.yml` for performance benchmarks using Criterion. - Established `test-e2e.yml` for running integration and end-to-end tests. - Developed `test-fuzz.yml` for fuzz testing with configurable runtime. - Created `test-rust-build.yml` as a reusable job for executing Rust commands with customizable parameters. - Documented main branch delivery flows in `main-branch-flow.md` for clarity on CI/CD processes. * ci(workflows): update workflow scripts and rename for clarity; remove obsolete lint feedback script * chore(ci): externalize workflow scripts and relocate main flow doc
This commit is contained in:
parent
41da46e2b2
commit
69a3b54968
34 changed files with 2090 additions and 1777 deletions
1
.github/actionlint.yaml
vendored
1
.github/actionlint.yaml
vendored
|
|
@ -1,4 +1,3 @@
|
||||||
self-hosted-runner:
|
self-hosted-runner:
|
||||||
labels:
|
labels:
|
||||||
- lxc-ci
|
|
||||||
- blacksmith-2vcpu-ubuntu-2404
|
- blacksmith-2vcpu-ubuntu-2404
|
||||||
|
|
|
||||||
29
.github/workflows/README.md
vendored
Normal file
29
.github/workflows/README.md
vendored
Normal file
|
|
@ -0,0 +1,29 @@
|
||||||
|
# Workflow Directory Layout
|
||||||
|
|
||||||
|
GitHub Actions only loads workflow entry files from:
|
||||||
|
|
||||||
|
- `.github/workflows/*.yml`
|
||||||
|
- `.github/workflows/*.yaml`
|
||||||
|
|
||||||
|
Subdirectories are not valid locations for workflow entry files.
|
||||||
|
|
||||||
|
Repository convention:
|
||||||
|
|
||||||
|
1. Keep runnable workflow entry files at `.github/workflows/` root.
|
||||||
|
2. Keep workflow-only helper scripts under `.github/workflows/scripts/`.
|
||||||
|
3. Keep cross-tooling/local CI scripts under `scripts/ci/` when they are used outside Actions.
|
||||||
|
|
||||||
|
Workflow behavior documentation in this directory:
|
||||||
|
|
||||||
|
- `.github/workflows/main-branch-flow.md`
|
||||||
|
|
||||||
|
Current workflow helper scripts:
|
||||||
|
|
||||||
|
- `.github/workflows/scripts/ci_workflow_owner_approval.js`
|
||||||
|
- `.github/workflows/scripts/lint_feedback.js`
|
||||||
|
- `.github/workflows/scripts/pr_auto_response_contributor_tier.js`
|
||||||
|
- `.github/workflows/scripts/pr_auto_response_labeled_routes.js`
|
||||||
|
- `.github/workflows/scripts/pr_check_status_nudge.js`
|
||||||
|
- `.github/workflows/scripts/pr_intake_checks.js`
|
||||||
|
- `.github/workflows/scripts/pr_labeler.js`
|
||||||
|
- `.github/workflows/scripts/test_benchmarks_pr_comment.js`
|
||||||
285
.github/workflows/auto-response.yml
vendored
285
.github/workflows/auto-response.yml
vendored
|
|
@ -1,285 +0,0 @@
|
||||||
name: PR Auto Responder
|
|
||||||
|
|
||||||
on:
|
|
||||||
issues:
|
|
||||||
types: [opened, reopened, labeled, unlabeled]
|
|
||||||
pull_request_target:
|
|
||||||
types: [opened, labeled, unlabeled]
|
|
||||||
|
|
||||||
permissions: {}
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
contributor-tier-issues:
|
|
||||||
if: >-
|
|
||||||
(github.event_name == 'issues' &&
|
|
||||||
(github.event.action == 'opened' || github.event.action == 'reopened' || github.event.action == 'labeled' || github.event.action == 'unlabeled')) ||
|
|
||||||
(github.event_name == 'pull_request_target' &&
|
|
||||||
(github.event.action == 'labeled' || github.event.action == 'unlabeled'))
|
|
||||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
|
||||||
permissions:
|
|
||||||
issues: write
|
|
||||||
pull-requests: write
|
|
||||||
steps:
|
|
||||||
- name: Apply contributor tier label for issue author
|
|
||||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
|
||||||
with:
|
|
||||||
script: |
|
|
||||||
const owner = context.repo.owner;
|
|
||||||
const repo = context.repo.repo;
|
|
||||||
const issue = context.payload.issue;
|
|
||||||
const pullRequest = context.payload.pull_request;
|
|
||||||
const target = issue ?? pullRequest;
|
|
||||||
async function loadContributorTierPolicy() {
|
|
||||||
const fallback = {
|
|
||||||
contributorTierColor: "2ED9FF",
|
|
||||||
contributorTierRules: [
|
|
||||||
{ label: "distinguished contributor", minMergedPRs: 50 },
|
|
||||||
{ label: "principal contributor", minMergedPRs: 20 },
|
|
||||||
{ label: "experienced contributor", minMergedPRs: 10 },
|
|
||||||
{ label: "trusted contributor", minMergedPRs: 5 },
|
|
||||||
],
|
|
||||||
};
|
|
||||||
try {
|
|
||||||
const { data } = await github.rest.repos.getContent({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
path: ".github/label-policy.json",
|
|
||||||
ref: context.payload.repository?.default_branch || "main",
|
|
||||||
});
|
|
||||||
const json = JSON.parse(Buffer.from(data.content, "base64").toString("utf8"));
|
|
||||||
const contributorTierRules = (json.contributor_tiers || []).map((entry) => ({
|
|
||||||
label: String(entry.label || "").trim(),
|
|
||||||
minMergedPRs: Number(entry.min_merged_prs || 0),
|
|
||||||
}));
|
|
||||||
const contributorTierColor = String(json.contributor_tier_color || "").toUpperCase();
|
|
||||||
if (!contributorTierColor || contributorTierRules.length === 0) {
|
|
||||||
return fallback;
|
|
||||||
}
|
|
||||||
return { contributorTierColor, contributorTierRules };
|
|
||||||
} catch (error) {
|
|
||||||
core.warning(`failed to load .github/label-policy.json, using fallback policy: ${error.message}`);
|
|
||||||
return fallback;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const { contributorTierColor, contributorTierRules } = await loadContributorTierPolicy();
|
|
||||||
const contributorTierLabels = contributorTierRules.map((rule) => rule.label);
|
|
||||||
const managedContributorLabels = new Set(contributorTierLabels);
|
|
||||||
const action = context.payload.action;
|
|
||||||
const changedLabel = context.payload.label?.name;
|
|
||||||
|
|
||||||
if (!target) return;
|
|
||||||
if ((action === "labeled" || action === "unlabeled") && !managedContributorLabels.has(changedLabel)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const author = target.user;
|
|
||||||
if (!author || author.type === "Bot") return;
|
|
||||||
|
|
||||||
function contributorTierDescription(rule) {
|
|
||||||
return `Contributor with ${rule.minMergedPRs}+ merged PRs.`;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function ensureContributorTierLabels() {
|
|
||||||
for (const rule of contributorTierRules) {
|
|
||||||
const label = rule.label;
|
|
||||||
const expectedDescription = contributorTierDescription(rule);
|
|
||||||
try {
|
|
||||||
const { data: existing } = await github.rest.issues.getLabel({ owner, repo, name: label });
|
|
||||||
const currentColor = (existing.color || "").toUpperCase();
|
|
||||||
const currentDescription = (existing.description || "").trim();
|
|
||||||
if (currentColor !== contributorTierColor || currentDescription !== expectedDescription) {
|
|
||||||
await github.rest.issues.updateLabel({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
name: label,
|
|
||||||
new_name: label,
|
|
||||||
color: contributorTierColor,
|
|
||||||
description: expectedDescription,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
if (error.status !== 404) throw error;
|
|
||||||
await github.rest.issues.createLabel({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
name: label,
|
|
||||||
color: contributorTierColor,
|
|
||||||
description: expectedDescription,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function selectContributorTier(mergedCount) {
|
|
||||||
const matchedTier = contributorTierRules.find((rule) => mergedCount >= rule.minMergedPRs);
|
|
||||||
return matchedTier ? matchedTier.label : null;
|
|
||||||
}
|
|
||||||
|
|
||||||
let contributorTierLabel = null;
|
|
||||||
try {
|
|
||||||
const { data: mergedSearch } = await github.rest.search.issuesAndPullRequests({
|
|
||||||
q: `repo:${owner}/${repo} is:pr is:merged author:${author.login}`,
|
|
||||||
per_page: 1,
|
|
||||||
});
|
|
||||||
const mergedCount = mergedSearch.total_count || 0;
|
|
||||||
contributorTierLabel = selectContributorTier(mergedCount);
|
|
||||||
} catch (error) {
|
|
||||||
core.warning(`failed to evaluate contributor tier status: ${error.message}`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
await ensureContributorTierLabels();
|
|
||||||
|
|
||||||
const { data: currentLabels } = await github.rest.issues.listLabelsOnIssue({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
issue_number: target.number,
|
|
||||||
});
|
|
||||||
const keepLabels = currentLabels
|
|
||||||
.map((label) => label.name)
|
|
||||||
.filter((label) => !contributorTierLabels.includes(label));
|
|
||||||
|
|
||||||
if (contributorTierLabel) {
|
|
||||||
keepLabels.push(contributorTierLabel);
|
|
||||||
}
|
|
||||||
|
|
||||||
await github.rest.issues.setLabels({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
issue_number: target.number,
|
|
||||||
labels: [...new Set(keepLabels)],
|
|
||||||
});
|
|
||||||
|
|
||||||
first-interaction:
|
|
||||||
if: github.event.action == 'opened'
|
|
||||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
|
||||||
permissions:
|
|
||||||
issues: write
|
|
||||||
pull-requests: write
|
|
||||||
steps:
|
|
||||||
- name: Greet first-time contributors
|
|
||||||
uses: actions/first-interaction@a1db7729b356323c7988c20ed6f0d33fe31297be # v1
|
|
||||||
with:
|
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
issue-message: |
|
|
||||||
Thanks for opening this issue.
|
|
||||||
|
|
||||||
Before maintainers triage it, please confirm:
|
|
||||||
- Repro steps are complete and run on latest `main`
|
|
||||||
- Environment details are included (OS, Rust version, ZeroClaw version)
|
|
||||||
- Sensitive values are redacted
|
|
||||||
|
|
||||||
This helps us keep issue throughput high and response latency low.
|
|
||||||
pr-message: |
|
|
||||||
Thanks for contributing to ZeroClaw.
|
|
||||||
|
|
||||||
For faster review, please ensure:
|
|
||||||
- PR template sections are fully completed
|
|
||||||
- `cargo fmt --all -- --check`, `cargo clippy --all-targets -- -D warnings`, and `cargo test` are included
|
|
||||||
- If automation/agents were used heavily, add brief workflow notes
|
|
||||||
- Scope is focused (prefer one concern per PR)
|
|
||||||
|
|
||||||
See `CONTRIBUTING.md` and `docs/pr-workflow.md` for full collaboration rules.
|
|
||||||
|
|
||||||
labeled-routes:
|
|
||||||
if: github.event.action == 'labeled'
|
|
||||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
|
||||||
permissions:
|
|
||||||
issues: write
|
|
||||||
pull-requests: write
|
|
||||||
steps:
|
|
||||||
- name: Handle label-driven responses
|
|
||||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
|
||||||
with:
|
|
||||||
script: |
|
|
||||||
const label = context.payload.label?.name;
|
|
||||||
if (!label) return;
|
|
||||||
|
|
||||||
const issue = context.payload.issue;
|
|
||||||
const pullRequest = context.payload.pull_request;
|
|
||||||
const target = issue ?? pullRequest;
|
|
||||||
if (!target) return;
|
|
||||||
|
|
||||||
const isIssue = Boolean(issue);
|
|
||||||
const issueNumber = target.number;
|
|
||||||
const owner = context.repo.owner;
|
|
||||||
const repo = context.repo.repo;
|
|
||||||
|
|
||||||
const rules = [
|
|
||||||
{
|
|
||||||
label: "r:support",
|
|
||||||
close: true,
|
|
||||||
closeIssuesOnly: true,
|
|
||||||
closeReason: "not_planned",
|
|
||||||
message:
|
|
||||||
"This looks like a usage/support request. Please use README + docs first, then open a focused bug with repro details if behavior is incorrect.",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: "r:needs-repro",
|
|
||||||
close: false,
|
|
||||||
message:
|
|
||||||
"Thanks for the report. Please add deterministic repro steps, exact environment, and redacted logs so maintainers can triage quickly.",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: "invalid",
|
|
||||||
close: true,
|
|
||||||
closeIssuesOnly: true,
|
|
||||||
closeReason: "not_planned",
|
|
||||||
message:
|
|
||||||
"Closing as invalid based on current information. If this is still relevant, open a new issue with updated evidence and reproducible steps.",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: "duplicate",
|
|
||||||
close: true,
|
|
||||||
closeIssuesOnly: true,
|
|
||||||
closeReason: "not_planned",
|
|
||||||
message:
|
|
||||||
"Closing as duplicate. Please continue discussion in the canonical linked issue/PR.",
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
const rule = rules.find((entry) => entry.label === label);
|
|
||||||
if (!rule) return;
|
|
||||||
|
|
||||||
const marker = `<!-- auto-response:${rule.label} -->`;
|
|
||||||
const comments = await github.paginate(github.rest.issues.listComments, {
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
issue_number: issueNumber,
|
|
||||||
per_page: 100,
|
|
||||||
});
|
|
||||||
|
|
||||||
const alreadyCommented = comments.some((comment) =>
|
|
||||||
(comment.body || "").includes(marker)
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!alreadyCommented) {
|
|
||||||
await github.rest.issues.createComment({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
issue_number: issueNumber,
|
|
||||||
body: `${rule.message}\n\n${marker}`,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!rule.close) return;
|
|
||||||
if (rule.closeIssuesOnly && !isIssue) return;
|
|
||||||
if (target.state === "closed") return;
|
|
||||||
|
|
||||||
if (isIssue) {
|
|
||||||
await github.rest.issues.update({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
issue_number: issueNumber,
|
|
||||||
state: "closed",
|
|
||||||
state_reason: rule.closeReason || "not_planned",
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
await github.rest.issues.update({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
issue_number: issueNumber,
|
|
||||||
state: "closed",
|
|
||||||
});
|
|
||||||
}
|
|
||||||
101
.github/workflows/benchmarks.yml
vendored
101
.github/workflows/benchmarks.yml
vendored
|
|
@ -1,101 +0,0 @@
|
||||||
name: Performance Benchmarks
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [main]
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: bench-${{ github.event.pull_request.number || github.sha }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
pull-requests: write
|
|
||||||
|
|
||||||
env:
|
|
||||||
CARGO_TERM_COLOR: always
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
benchmarks:
|
|
||||||
name: Criterion Benchmarks
|
|
||||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
|
||||||
timeout-minutes: 30
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
|
||||||
- uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
|
||||||
with:
|
|
||||||
toolchain: 1.92.0
|
|
||||||
- uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3
|
|
||||||
|
|
||||||
- name: Run benchmarks
|
|
||||||
run: cargo bench --locked 2>&1 | tee benchmark_output.txt
|
|
||||||
|
|
||||||
- name: Upload benchmark results
|
|
||||||
if: always()
|
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
|
||||||
with:
|
|
||||||
name: benchmark-results
|
|
||||||
path: |
|
|
||||||
target/criterion/
|
|
||||||
benchmark_output.txt
|
|
||||||
retention-days: 30
|
|
||||||
|
|
||||||
- name: Post benchmark summary on PR
|
|
||||||
if: github.event_name == 'pull_request'
|
|
||||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
|
||||||
with:
|
|
||||||
script: |
|
|
||||||
const fs = require('fs');
|
|
||||||
const output = fs.readFileSync('benchmark_output.txt', 'utf8');
|
|
||||||
|
|
||||||
// Extract Criterion result lines
|
|
||||||
const lines = output.split('\n').filter(l =>
|
|
||||||
l.includes('time:') || l.includes('change:') || l.includes('Performance')
|
|
||||||
);
|
|
||||||
|
|
||||||
if (lines.length === 0) {
|
|
||||||
core.info('No benchmark results to post.');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const body = [
|
|
||||||
'## 📊 Benchmark Results',
|
|
||||||
'',
|
|
||||||
'```',
|
|
||||||
lines.join('\n'),
|
|
||||||
'```',
|
|
||||||
'',
|
|
||||||
'<details><summary>Full output</summary>',
|
|
||||||
'',
|
|
||||||
'```',
|
|
||||||
output.substring(0, 60000),
|
|
||||||
'```',
|
|
||||||
'</details>',
|
|
||||||
].join('\n');
|
|
||||||
|
|
||||||
// Find and update or create comment
|
|
||||||
const { data: comments } = await github.rest.issues.listComments({
|
|
||||||
owner: context.repo.owner,
|
|
||||||
repo: context.repo.repo,
|
|
||||||
issue_number: context.payload.pull_request.number,
|
|
||||||
});
|
|
||||||
|
|
||||||
const marker = '## 📊 Benchmark Results';
|
|
||||||
const existing = comments.find(c => c.body && c.body.startsWith(marker));
|
|
||||||
|
|
||||||
if (existing) {
|
|
||||||
await github.rest.issues.updateComment({
|
|
||||||
owner: context.repo.owner,
|
|
||||||
repo: context.repo.repo,
|
|
||||||
comment_id: existing.id,
|
|
||||||
body,
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
await github.rest.issues.createComment({
|
|
||||||
owner: context.repo.owner,
|
|
||||||
repo: context.repo.repo,
|
|
||||||
issue_number: context.payload.pull_request.number,
|
|
||||||
body,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
@ -205,7 +205,7 @@ jobs:
|
||||||
DOCS_RESULT: ${{ needs.docs-quality.result }}
|
DOCS_RESULT: ${{ needs.docs-quality.result }}
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const script = require('./scripts/ci/lint_feedback.js');
|
const script = require('./.github/workflows/scripts/lint_feedback.js');
|
||||||
await script({github, context, core});
|
await script({github, context, core});
|
||||||
|
|
||||||
workflow-owner-approval:
|
workflow-owner-approval:
|
||||||
|
|
@ -217,91 +217,17 @@ jobs:
|
||||||
contents: read
|
contents: read
|
||||||
pull-requests: read
|
pull-requests: read
|
||||||
steps:
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||||
|
|
||||||
- name: Require owner approval for workflow file changes
|
- name: Require owner approval for workflow file changes
|
||||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||||
env:
|
env:
|
||||||
WORKFLOW_OWNER_LOGINS: ${{ vars.WORKFLOW_OWNER_LOGINS }}
|
WORKFLOW_OWNER_LOGINS: ${{ vars.WORKFLOW_OWNER_LOGINS }}
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const owner = context.repo.owner;
|
const script = require('./.github/workflows/scripts/ci_workflow_owner_approval.js');
|
||||||
const repo = context.repo.repo;
|
await script({ github, context, core });
|
||||||
const prNumber = context.payload.pull_request?.number;
|
|
||||||
const prAuthor = context.payload.pull_request?.user?.login?.toLowerCase() || "";
|
|
||||||
if (!prNumber) {
|
|
||||||
core.setFailed("Missing pull_request context.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const baseOwners = ["theonlyhennygod", "willsarg"];
|
|
||||||
const configuredOwners = (process.env.WORKFLOW_OWNER_LOGINS || "")
|
|
||||||
.split(",")
|
|
||||||
.map((login) => login.trim().toLowerCase())
|
|
||||||
.filter(Boolean);
|
|
||||||
const ownerAllowlist = [...new Set([...baseOwners, ...configuredOwners])];
|
|
||||||
|
|
||||||
if (ownerAllowlist.length === 0) {
|
|
||||||
core.setFailed("Workflow owner allowlist is empty.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
core.info(`Workflow owner allowlist: ${ownerAllowlist.join(", ")}`);
|
|
||||||
|
|
||||||
const files = await github.paginate(github.rest.pulls.listFiles, {
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
pull_number: prNumber,
|
|
||||||
per_page: 100,
|
|
||||||
});
|
|
||||||
|
|
||||||
const workflowFiles = files
|
|
||||||
.map((file) => file.filename)
|
|
||||||
.filter((name) => name.startsWith(".github/workflows/"));
|
|
||||||
|
|
||||||
if (workflowFiles.length === 0) {
|
|
||||||
core.info("No workflow files changed in this PR.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
core.info(`Workflow files changed:\n- ${workflowFiles.join("\n- ")}`);
|
|
||||||
|
|
||||||
if (prAuthor && ownerAllowlist.includes(prAuthor)) {
|
|
||||||
core.info(`Workflow PR authored by allowlisted owner: @${prAuthor}`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const reviews = await github.paginate(github.rest.pulls.listReviews, {
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
pull_number: prNumber,
|
|
||||||
per_page: 100,
|
|
||||||
});
|
|
||||||
|
|
||||||
const latestReviewByUser = new Map();
|
|
||||||
for (const review of reviews) {
|
|
||||||
const login = review.user?.login;
|
|
||||||
if (!login) continue;
|
|
||||||
latestReviewByUser.set(login.toLowerCase(), review.state);
|
|
||||||
}
|
|
||||||
|
|
||||||
const approvedUsers = [...latestReviewByUser.entries()]
|
|
||||||
.filter(([, state]) => state === "APPROVED")
|
|
||||||
.map(([login]) => login);
|
|
||||||
|
|
||||||
if (approvedUsers.length === 0) {
|
|
||||||
core.setFailed("Workflow files changed but no approving review is present.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const ownerApprover = approvedUsers.find((login) => ownerAllowlist.includes(login));
|
|
||||||
if (!ownerApprover) {
|
|
||||||
core.setFailed(
|
|
||||||
`Workflow files changed. Approvals found (${approvedUsers.join(", ")}), but none match workflow owner allowlist.`,
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
core.info(`Workflow owner approval present: @${ownerApprover}`);
|
|
||||||
|
|
||||||
ci-required:
|
ci-required:
|
||||||
name: CI Required Gate
|
name: CI Required Gate
|
||||||
if: always()
|
if: always()
|
||||||
841
.github/workflows/labeler.yml
vendored
841
.github/workflows/labeler.yml
vendored
|
|
@ -1,841 +0,0 @@
|
||||||
name: PR Labeler
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request_target:
|
|
||||||
types: [opened, reopened, synchronize, edited, labeled, unlabeled]
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
mode:
|
|
||||||
description: "Run mode for managed-label governance"
|
|
||||||
required: true
|
|
||||||
default: "audit"
|
|
||||||
type: choice
|
|
||||||
options:
|
|
||||||
- audit
|
|
||||||
- repair
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: pr-labeler-${{ github.event.pull_request.number || github.run_id }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
pull-requests: write
|
|
||||||
issues: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
label:
|
|
||||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
|
||||||
timeout-minutes: 10
|
|
||||||
steps:
|
|
||||||
- name: Apply path labels
|
|
||||||
if: github.event_name == 'pull_request_target'
|
|
||||||
uses: actions/labeler@634933edcd8ababfe52f92936142cc22ac488b1b # v6.0.1
|
|
||||||
continue-on-error: true
|
|
||||||
with:
|
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
sync-labels: true
|
|
||||||
|
|
||||||
- name: Apply size/risk/module labels
|
|
||||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
|
||||||
continue-on-error: true
|
|
||||||
with:
|
|
||||||
script: |
|
|
||||||
const pr = context.payload.pull_request;
|
|
||||||
const owner = context.repo.owner;
|
|
||||||
const repo = context.repo.repo;
|
|
||||||
const action = context.payload.action;
|
|
||||||
const changedLabel = context.payload.label?.name;
|
|
||||||
|
|
||||||
const sizeLabels = ["size: XS", "size: S", "size: M", "size: L", "size: XL"];
|
|
||||||
const computedRiskLabels = ["risk: low", "risk: medium", "risk: high"];
|
|
||||||
const manualRiskOverrideLabel = "risk: manual";
|
|
||||||
const managedEnforcedLabels = new Set([
|
|
||||||
...sizeLabels,
|
|
||||||
manualRiskOverrideLabel,
|
|
||||||
...computedRiskLabels,
|
|
||||||
]);
|
|
||||||
if ((action === "labeled" || action === "unlabeled") && !managedEnforcedLabels.has(changedLabel)) {
|
|
||||||
core.info(`skip non-size/risk label event: ${changedLabel || "unknown"}`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function loadContributorTierPolicy() {
|
|
||||||
const fallback = {
|
|
||||||
contributorTierColor: "2ED9FF",
|
|
||||||
contributorTierRules: [
|
|
||||||
{ label: "distinguished contributor", minMergedPRs: 50 },
|
|
||||||
{ label: "principal contributor", minMergedPRs: 20 },
|
|
||||||
{ label: "experienced contributor", minMergedPRs: 10 },
|
|
||||||
{ label: "trusted contributor", minMergedPRs: 5 },
|
|
||||||
],
|
|
||||||
};
|
|
||||||
try {
|
|
||||||
const { data } = await github.rest.repos.getContent({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
path: ".github/label-policy.json",
|
|
||||||
ref: context.payload.repository?.default_branch || "main",
|
|
||||||
});
|
|
||||||
const json = JSON.parse(Buffer.from(data.content, "base64").toString("utf8"));
|
|
||||||
const contributorTierRules = (json.contributor_tiers || []).map((entry) => ({
|
|
||||||
label: String(entry.label || "").trim(),
|
|
||||||
minMergedPRs: Number(entry.min_merged_prs || 0),
|
|
||||||
}));
|
|
||||||
const contributorTierColor = String(json.contributor_tier_color || "").toUpperCase();
|
|
||||||
if (!contributorTierColor || contributorTierRules.length === 0) {
|
|
||||||
return fallback;
|
|
||||||
}
|
|
||||||
return { contributorTierColor, contributorTierRules };
|
|
||||||
} catch (error) {
|
|
||||||
core.warning(`failed to load .github/label-policy.json, using fallback policy: ${error.message}`);
|
|
||||||
return fallback;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const { contributorTierColor, contributorTierRules } = await loadContributorTierPolicy();
|
|
||||||
const contributorTierLabels = contributorTierRules.map((rule) => rule.label);
|
|
||||||
|
|
||||||
const managedPathLabels = [
|
|
||||||
"docs",
|
|
||||||
"dependencies",
|
|
||||||
"ci",
|
|
||||||
"core",
|
|
||||||
"agent",
|
|
||||||
"channel",
|
|
||||||
"config",
|
|
||||||
"cron",
|
|
||||||
"daemon",
|
|
||||||
"doctor",
|
|
||||||
"gateway",
|
|
||||||
"health",
|
|
||||||
"heartbeat",
|
|
||||||
"integration",
|
|
||||||
"memory",
|
|
||||||
"observability",
|
|
||||||
"onboard",
|
|
||||||
"provider",
|
|
||||||
"runtime",
|
|
||||||
"security",
|
|
||||||
"service",
|
|
||||||
"skillforge",
|
|
||||||
"skills",
|
|
||||||
"tool",
|
|
||||||
"tunnel",
|
|
||||||
"tests",
|
|
||||||
"scripts",
|
|
||||||
"dev",
|
|
||||||
];
|
|
||||||
const managedPathLabelSet = new Set(managedPathLabels);
|
|
||||||
|
|
||||||
const moduleNamespaceRules = [
|
|
||||||
{ root: "src/agent/", prefix: "agent", coreEntries: new Set(["mod.rs"]) },
|
|
||||||
{ root: "src/channels/", prefix: "channel", coreEntries: new Set(["mod.rs", "traits.rs"]) },
|
|
||||||
{ root: "src/config/", prefix: "config", coreEntries: new Set(["mod.rs", "schema.rs"]) },
|
|
||||||
{ root: "src/cron/", prefix: "cron", coreEntries: new Set(["mod.rs"]) },
|
|
||||||
{ root: "src/daemon/", prefix: "daemon", coreEntries: new Set(["mod.rs"]) },
|
|
||||||
{ root: "src/doctor/", prefix: "doctor", coreEntries: new Set(["mod.rs"]) },
|
|
||||||
{ root: "src/gateway/", prefix: "gateway", coreEntries: new Set(["mod.rs"]) },
|
|
||||||
{ root: "src/health/", prefix: "health", coreEntries: new Set(["mod.rs"]) },
|
|
||||||
{ root: "src/heartbeat/", prefix: "heartbeat", coreEntries: new Set(["mod.rs"]) },
|
|
||||||
{ root: "src/integrations/", prefix: "integration", coreEntries: new Set(["mod.rs", "registry.rs"]) },
|
|
||||||
{ root: "src/memory/", prefix: "memory", coreEntries: new Set(["mod.rs", "traits.rs"]) },
|
|
||||||
{ root: "src/observability/", prefix: "observability", coreEntries: new Set(["mod.rs", "traits.rs"]) },
|
|
||||||
{ root: "src/onboard/", prefix: "onboard", coreEntries: new Set(["mod.rs"]) },
|
|
||||||
{ root: "src/providers/", prefix: "provider", coreEntries: new Set(["mod.rs", "traits.rs"]) },
|
|
||||||
{ root: "src/runtime/", prefix: "runtime", coreEntries: new Set(["mod.rs", "traits.rs"]) },
|
|
||||||
{ root: "src/security/", prefix: "security", coreEntries: new Set(["mod.rs"]) },
|
|
||||||
{ root: "src/service/", prefix: "service", coreEntries: new Set(["mod.rs"]) },
|
|
||||||
{ root: "src/skillforge/", prefix: "skillforge", coreEntries: new Set(["mod.rs"]) },
|
|
||||||
{ root: "src/skills/", prefix: "skills", coreEntries: new Set(["mod.rs"]) },
|
|
||||||
{ root: "src/tools/", prefix: "tool", coreEntries: new Set(["mod.rs", "traits.rs"]) },
|
|
||||||
{ root: "src/tunnel/", prefix: "tunnel", coreEntries: new Set(["mod.rs"]) },
|
|
||||||
];
|
|
||||||
const managedModulePrefixes = [...new Set(moduleNamespaceRules.map((rule) => `${rule.prefix}:`))];
|
|
||||||
const orderedOtherLabelStyles = [
|
|
||||||
{ label: "health", color: "8EC9B8" },
|
|
||||||
{ label: "tool", color: "7FC4B6" },
|
|
||||||
{ label: "agent", color: "86C4A2" },
|
|
||||||
{ label: "memory", color: "8FCB99" },
|
|
||||||
{ label: "channel", color: "7EB6F2" },
|
|
||||||
{ label: "service", color: "95C7B6" },
|
|
||||||
{ label: "integration", color: "8DC9AE" },
|
|
||||||
{ label: "tunnel", color: "9FC8B3" },
|
|
||||||
{ label: "config", color: "AABCD0" },
|
|
||||||
{ label: "observability", color: "84C9D0" },
|
|
||||||
{ label: "docs", color: "8FBBE0" },
|
|
||||||
{ label: "dev", color: "B9C1CC" },
|
|
||||||
{ label: "tests", color: "9DC8C7" },
|
|
||||||
{ label: "skills", color: "BFC89B" },
|
|
||||||
{ label: "skillforge", color: "C9C39B" },
|
|
||||||
{ label: "provider", color: "958DF0" },
|
|
||||||
{ label: "runtime", color: "A3ADD8" },
|
|
||||||
{ label: "heartbeat", color: "C0C88D" },
|
|
||||||
{ label: "daemon", color: "C8C498" },
|
|
||||||
{ label: "doctor", color: "C1CF9D" },
|
|
||||||
{ label: "onboard", color: "D2BF86" },
|
|
||||||
{ label: "cron", color: "D2B490" },
|
|
||||||
{ label: "ci", color: "AEB4CE" },
|
|
||||||
{ label: "dependencies", color: "9FB1DE" },
|
|
||||||
{ label: "gateway", color: "B5A8E5" },
|
|
||||||
{ label: "security", color: "E58D85" },
|
|
||||||
{ label: "core", color: "C8A99B" },
|
|
||||||
{ label: "scripts", color: "C9B49F" },
|
|
||||||
];
|
|
||||||
const otherLabelDisplayOrder = orderedOtherLabelStyles.map((entry) => entry.label);
|
|
||||||
const modulePrefixSet = new Set(moduleNamespaceRules.map((rule) => rule.prefix));
|
|
||||||
const modulePrefixPriority = otherLabelDisplayOrder.filter((label) => modulePrefixSet.has(label));
|
|
||||||
const pathLabelPriority = [...otherLabelDisplayOrder];
|
|
||||||
const riskDisplayOrder = ["risk: high", "risk: medium", "risk: low", "risk: manual"];
|
|
||||||
const sizeDisplayOrder = ["size: XS", "size: S", "size: M", "size: L", "size: XL"];
|
|
||||||
const contributorDisplayOrder = [
|
|
||||||
"distinguished contributor",
|
|
||||||
"principal contributor",
|
|
||||||
"experienced contributor",
|
|
||||||
"trusted contributor",
|
|
||||||
];
|
|
||||||
const modulePrefixPriorityIndex = new Map(
|
|
||||||
modulePrefixPriority.map((prefix, index) => [prefix, index])
|
|
||||||
);
|
|
||||||
const pathLabelPriorityIndex = new Map(
|
|
||||||
pathLabelPriority.map((label, index) => [label, index])
|
|
||||||
);
|
|
||||||
const riskPriorityIndex = new Map(
|
|
||||||
riskDisplayOrder.map((label, index) => [label, index])
|
|
||||||
);
|
|
||||||
const sizePriorityIndex = new Map(
|
|
||||||
sizeDisplayOrder.map((label, index) => [label, index])
|
|
||||||
);
|
|
||||||
const contributorPriorityIndex = new Map(
|
|
||||||
contributorDisplayOrder.map((label, index) => [label, index])
|
|
||||||
);
|
|
||||||
|
|
||||||
const otherLabelColors = Object.fromEntries(
|
|
||||||
orderedOtherLabelStyles.map((entry) => [entry.label, entry.color])
|
|
||||||
);
|
|
||||||
const staticLabelColors = {
|
|
||||||
"size: XS": "E7CDD3",
|
|
||||||
"size: S": "E1BEC7",
|
|
||||||
"size: M": "DBB0BB",
|
|
||||||
"size: L": "D4A2AF",
|
|
||||||
"size: XL": "CE94A4",
|
|
||||||
"risk: low": "97D3A6",
|
|
||||||
"risk: medium": "E4C47B",
|
|
||||||
"risk: high": "E98E88",
|
|
||||||
"risk: manual": "B7A4E0",
|
|
||||||
...otherLabelColors,
|
|
||||||
};
|
|
||||||
const staticLabelDescriptions = {
|
|
||||||
"size: XS": "Auto size: <=80 non-doc changed lines.",
|
|
||||||
"size: S": "Auto size: 81-250 non-doc changed lines.",
|
|
||||||
"size: M": "Auto size: 251-500 non-doc changed lines.",
|
|
||||||
"size: L": "Auto size: 501-1000 non-doc changed lines.",
|
|
||||||
"size: XL": "Auto size: >1000 non-doc changed lines.",
|
|
||||||
"risk: low": "Auto risk: docs/chore-only paths.",
|
|
||||||
"risk: medium": "Auto risk: src/** or dependency/config changes.",
|
|
||||||
"risk: high": "Auto risk: security/runtime/gateway/tools/workflows.",
|
|
||||||
"risk: manual": "Maintainer override: keep selected risk label.",
|
|
||||||
docs: "Auto scope: docs/markdown/template files changed.",
|
|
||||||
dependencies: "Auto scope: dependency manifest/lock/policy changed.",
|
|
||||||
ci: "Auto scope: CI/workflow/hook files changed.",
|
|
||||||
core: "Auto scope: root src/*.rs files changed.",
|
|
||||||
agent: "Auto scope: src/agent/** changed.",
|
|
||||||
channel: "Auto scope: src/channels/** changed.",
|
|
||||||
config: "Auto scope: src/config/** changed.",
|
|
||||||
cron: "Auto scope: src/cron/** changed.",
|
|
||||||
daemon: "Auto scope: src/daemon/** changed.",
|
|
||||||
doctor: "Auto scope: src/doctor/** changed.",
|
|
||||||
gateway: "Auto scope: src/gateway/** changed.",
|
|
||||||
health: "Auto scope: src/health/** changed.",
|
|
||||||
heartbeat: "Auto scope: src/heartbeat/** changed.",
|
|
||||||
integration: "Auto scope: src/integrations/** changed.",
|
|
||||||
memory: "Auto scope: src/memory/** changed.",
|
|
||||||
observability: "Auto scope: src/observability/** changed.",
|
|
||||||
onboard: "Auto scope: src/onboard/** changed.",
|
|
||||||
provider: "Auto scope: src/providers/** changed.",
|
|
||||||
runtime: "Auto scope: src/runtime/** changed.",
|
|
||||||
security: "Auto scope: src/security/** changed.",
|
|
||||||
service: "Auto scope: src/service/** changed.",
|
|
||||||
skillforge: "Auto scope: src/skillforge/** changed.",
|
|
||||||
skills: "Auto scope: src/skills/** changed.",
|
|
||||||
tool: "Auto scope: src/tools/** changed.",
|
|
||||||
tunnel: "Auto scope: src/tunnel/** changed.",
|
|
||||||
tests: "Auto scope: tests/** changed.",
|
|
||||||
scripts: "Auto scope: scripts/** changed.",
|
|
||||||
dev: "Auto scope: dev/** changed.",
|
|
||||||
};
|
|
||||||
for (const label of contributorTierLabels) {
|
|
||||||
staticLabelColors[label] = contributorTierColor;
|
|
||||||
const rule = contributorTierRules.find((entry) => entry.label === label);
|
|
||||||
if (rule) {
|
|
||||||
staticLabelDescriptions[label] = `Contributor with ${rule.minMergedPRs}+ merged PRs.`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const modulePrefixColors = Object.fromEntries(
|
|
||||||
modulePrefixPriority.map((prefix) => [
|
|
||||||
`${prefix}:`,
|
|
||||||
otherLabelColors[prefix] || "BFDADC",
|
|
||||||
])
|
|
||||||
);
|
|
||||||
|
|
||||||
const providerKeywordHints = [
|
|
||||||
"deepseek",
|
|
||||||
"moonshot",
|
|
||||||
"kimi",
|
|
||||||
"qwen",
|
|
||||||
"mistral",
|
|
||||||
"doubao",
|
|
||||||
"baichuan",
|
|
||||||
"yi",
|
|
||||||
"siliconflow",
|
|
||||||
"vertex",
|
|
||||||
"azure",
|
|
||||||
"perplexity",
|
|
||||||
"venice",
|
|
||||||
"vercel",
|
|
||||||
"cloudflare",
|
|
||||||
"synthetic",
|
|
||||||
"opencode",
|
|
||||||
"zai",
|
|
||||||
"glm",
|
|
||||||
"minimax",
|
|
||||||
"bedrock",
|
|
||||||
"qianfan",
|
|
||||||
"groq",
|
|
||||||
"together",
|
|
||||||
"fireworks",
|
|
||||||
"cohere",
|
|
||||||
"openai",
|
|
||||||
"openrouter",
|
|
||||||
"anthropic",
|
|
||||||
"gemini",
|
|
||||||
"ollama",
|
|
||||||
];
|
|
||||||
|
|
||||||
const channelKeywordHints = [
|
|
||||||
"telegram",
|
|
||||||
"discord",
|
|
||||||
"slack",
|
|
||||||
"whatsapp",
|
|
||||||
"matrix",
|
|
||||||
"irc",
|
|
||||||
"imessage",
|
|
||||||
"email",
|
|
||||||
"cli",
|
|
||||||
];
|
|
||||||
|
|
||||||
function isDocsLike(path) {
|
|
||||||
return (
|
|
||||||
path.startsWith("docs/") ||
|
|
||||||
path.endsWith(".md") ||
|
|
||||||
path.endsWith(".mdx") ||
|
|
||||||
path === "LICENSE" ||
|
|
||||||
path === ".markdownlint-cli2.yaml" ||
|
|
||||||
path === ".github/pull_request_template.md" ||
|
|
||||||
path.startsWith(".github/ISSUE_TEMPLATE/")
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
function normalizeLabelSegment(segment) {
|
|
||||||
return (segment || "")
|
|
||||||
.toLowerCase()
|
|
||||||
.replace(/\.rs$/g, "")
|
|
||||||
.replace(/[^a-z0-9_-]+/g, "-")
|
|
||||||
.replace(/^[-_]+|[-_]+$/g, "")
|
|
||||||
.slice(0, 40);
|
|
||||||
}
|
|
||||||
|
|
||||||
function containsKeyword(text, keyword) {
|
|
||||||
const escaped = keyword.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
||||||
const pattern = new RegExp(`(^|[^a-z0-9_])${escaped}([^a-z0-9_]|$)`, "i");
|
|
||||||
return pattern.test(text);
|
|
||||||
}
|
|
||||||
|
|
||||||
function formatModuleLabel(prefix, segment) {
|
|
||||||
return `${prefix}: ${segment}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseModuleLabel(label) {
|
|
||||||
if (typeof label !== "string") return null;
|
|
||||||
const match = label.match(/^([^:]+):\s*(.+)$/);
|
|
||||||
if (!match) return null;
|
|
||||||
const prefix = match[1].trim().toLowerCase();
|
|
||||||
const segment = (match[2] || "").trim().toLowerCase();
|
|
||||||
if (!prefix || !segment) return null;
|
|
||||||
return { prefix, segment };
|
|
||||||
}
|
|
||||||
|
|
||||||
function sortByPriority(labels, priorityIndex) {
|
|
||||||
return [...new Set(labels)].sort((left, right) => {
|
|
||||||
const leftPriority = priorityIndex.has(left) ? priorityIndex.get(left) : Number.MAX_SAFE_INTEGER;
|
|
||||||
const rightPriority = priorityIndex.has(right)
|
|
||||||
? priorityIndex.get(right)
|
|
||||||
: Number.MAX_SAFE_INTEGER;
|
|
||||||
if (leftPriority !== rightPriority) return leftPriority - rightPriority;
|
|
||||||
return left.localeCompare(right);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function sortModuleLabels(labels) {
|
|
||||||
return [...new Set(labels)].sort((left, right) => {
|
|
||||||
const leftParsed = parseModuleLabel(left);
|
|
||||||
const rightParsed = parseModuleLabel(right);
|
|
||||||
if (!leftParsed || !rightParsed) return left.localeCompare(right);
|
|
||||||
|
|
||||||
const leftPrefixPriority = modulePrefixPriorityIndex.has(leftParsed.prefix)
|
|
||||||
? modulePrefixPriorityIndex.get(leftParsed.prefix)
|
|
||||||
: Number.MAX_SAFE_INTEGER;
|
|
||||||
const rightPrefixPriority = modulePrefixPriorityIndex.has(rightParsed.prefix)
|
|
||||||
? modulePrefixPriorityIndex.get(rightParsed.prefix)
|
|
||||||
: Number.MAX_SAFE_INTEGER;
|
|
||||||
|
|
||||||
if (leftPrefixPriority !== rightPrefixPriority) {
|
|
||||||
return leftPrefixPriority - rightPrefixPriority;
|
|
||||||
}
|
|
||||||
if (leftParsed.prefix !== rightParsed.prefix) {
|
|
||||||
return leftParsed.prefix.localeCompare(rightParsed.prefix);
|
|
||||||
}
|
|
||||||
|
|
||||||
const leftIsCore = leftParsed.segment === "core";
|
|
||||||
const rightIsCore = rightParsed.segment === "core";
|
|
||||||
if (leftIsCore !== rightIsCore) return leftIsCore ? 1 : -1;
|
|
||||||
|
|
||||||
return leftParsed.segment.localeCompare(rightParsed.segment);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function refineModuleLabels(rawLabels) {
|
|
||||||
const refined = new Set(rawLabels);
|
|
||||||
const segmentsByPrefix = new Map();
|
|
||||||
|
|
||||||
for (const label of rawLabels) {
|
|
||||||
const parsed = parseModuleLabel(label);
|
|
||||||
if (!parsed) continue;
|
|
||||||
if (!segmentsByPrefix.has(parsed.prefix)) {
|
|
||||||
segmentsByPrefix.set(parsed.prefix, new Set());
|
|
||||||
}
|
|
||||||
segmentsByPrefix.get(parsed.prefix).add(parsed.segment);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const [prefix, segments] of segmentsByPrefix) {
|
|
||||||
const hasSpecificSegment = [...segments].some((segment) => segment !== "core");
|
|
||||||
if (hasSpecificSegment) {
|
|
||||||
refined.delete(formatModuleLabel(prefix, "core"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return refined;
|
|
||||||
}
|
|
||||||
|
|
||||||
function compactModuleLabels(labels) {
|
|
||||||
const groupedSegments = new Map();
|
|
||||||
const compactedModuleLabels = new Set();
|
|
||||||
const forcePathPrefixes = new Set();
|
|
||||||
|
|
||||||
for (const label of labels) {
|
|
||||||
const parsed = parseModuleLabel(label);
|
|
||||||
if (!parsed) {
|
|
||||||
compactedModuleLabels.add(label);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (!groupedSegments.has(parsed.prefix)) {
|
|
||||||
groupedSegments.set(parsed.prefix, new Set());
|
|
||||||
}
|
|
||||||
groupedSegments.get(parsed.prefix).add(parsed.segment);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const [prefix, segments] of groupedSegments) {
|
|
||||||
const uniqueSegments = [...new Set([...segments].filter(Boolean))];
|
|
||||||
if (uniqueSegments.length === 0) continue;
|
|
||||||
|
|
||||||
if (uniqueSegments.length === 1) {
|
|
||||||
compactedModuleLabels.add(formatModuleLabel(prefix, uniqueSegments[0]));
|
|
||||||
} else {
|
|
||||||
forcePathPrefixes.add(prefix);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
moduleLabels: compactedModuleLabels,
|
|
||||||
forcePathPrefixes,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function colorForLabel(label) {
|
|
||||||
if (staticLabelColors[label]) return staticLabelColors[label];
|
|
||||||
const matchedPrefix = Object.keys(modulePrefixColors).find((prefix) => label.startsWith(prefix));
|
|
||||||
if (matchedPrefix) return modulePrefixColors[matchedPrefix];
|
|
||||||
return "BFDADC";
|
|
||||||
}
|
|
||||||
|
|
||||||
function descriptionForLabel(label) {
|
|
||||||
if (staticLabelDescriptions[label]) return staticLabelDescriptions[label];
|
|
||||||
|
|
||||||
const parsed = parseModuleLabel(label);
|
|
||||||
if (parsed) {
|
|
||||||
if (parsed.segment === "core") {
|
|
||||||
return `Auto module: ${parsed.prefix} core files changed.`;
|
|
||||||
}
|
|
||||||
return `Auto module: ${parsed.prefix}/${parsed.segment} changed.`;
|
|
||||||
}
|
|
||||||
|
|
||||||
return "Auto-managed label.";
|
|
||||||
}
|
|
||||||
|
|
||||||
async function ensureLabel(name, existing = null) {
|
|
||||||
const expectedColor = colorForLabel(name);
|
|
||||||
const expectedDescription = descriptionForLabel(name);
|
|
||||||
try {
|
|
||||||
const current = existing || (await github.rest.issues.getLabel({ owner, repo, name })).data;
|
|
||||||
const currentColor = (current.color || "").toUpperCase();
|
|
||||||
const currentDescription = (current.description || "").trim();
|
|
||||||
if (currentColor !== expectedColor || currentDescription !== expectedDescription) {
|
|
||||||
await github.rest.issues.updateLabel({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
name,
|
|
||||||
new_name: name,
|
|
||||||
color: expectedColor,
|
|
||||||
description: expectedDescription,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
if (error.status !== 404) throw error;
|
|
||||||
await github.rest.issues.createLabel({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
name,
|
|
||||||
color: expectedColor,
|
|
||||||
description: expectedDescription,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function isManagedLabel(label) {
|
|
||||||
if (label === manualRiskOverrideLabel) return true;
|
|
||||||
if (sizeLabels.includes(label) || computedRiskLabels.includes(label)) return true;
|
|
||||||
if (managedPathLabelSet.has(label)) return true;
|
|
||||||
if (contributorTierLabels.includes(label)) return true;
|
|
||||||
if (managedModulePrefixes.some((prefix) => label.startsWith(prefix))) return true;
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function ensureManagedRepoLabelsMetadata() {
|
|
||||||
const repoLabels = await github.paginate(github.rest.issues.listLabelsForRepo, {
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
per_page: 100,
|
|
||||||
});
|
|
||||||
|
|
||||||
for (const existingLabel of repoLabels) {
|
|
||||||
const labelName = existingLabel.name || "";
|
|
||||||
if (!isManagedLabel(labelName)) continue;
|
|
||||||
await ensureLabel(labelName, existingLabel);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function selectContributorTier(mergedCount) {
|
|
||||||
const matchedTier = contributorTierRules.find((rule) => mergedCount >= rule.minMergedPRs);
|
|
||||||
return matchedTier ? matchedTier.label : null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (context.eventName === "workflow_dispatch") {
|
|
||||||
const mode = (context.payload.inputs?.mode || "audit").toLowerCase();
|
|
||||||
const shouldRepair = mode === "repair";
|
|
||||||
const repoLabels = await github.paginate(github.rest.issues.listLabelsForRepo, {
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
per_page: 100,
|
|
||||||
});
|
|
||||||
|
|
||||||
let managedScanned = 0;
|
|
||||||
const drifts = [];
|
|
||||||
|
|
||||||
for (const existingLabel of repoLabels) {
|
|
||||||
const labelName = existingLabel.name || "";
|
|
||||||
if (!isManagedLabel(labelName)) continue;
|
|
||||||
managedScanned += 1;
|
|
||||||
|
|
||||||
const expectedColor = colorForLabel(labelName);
|
|
||||||
const expectedDescription = descriptionForLabel(labelName);
|
|
||||||
const currentColor = (existingLabel.color || "").toUpperCase();
|
|
||||||
const currentDescription = (existingLabel.description || "").trim();
|
|
||||||
if (currentColor !== expectedColor || currentDescription !== expectedDescription) {
|
|
||||||
drifts.push({
|
|
||||||
name: labelName,
|
|
||||||
currentColor,
|
|
||||||
expectedColor,
|
|
||||||
currentDescription,
|
|
||||||
expectedDescription,
|
|
||||||
});
|
|
||||||
if (shouldRepair) {
|
|
||||||
await ensureLabel(labelName, existingLabel);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
core.summary
|
|
||||||
.addHeading("Managed Label Governance", 2)
|
|
||||||
.addRaw(`Mode: ${shouldRepair ? "repair" : "audit"}`)
|
|
||||||
.addEOL()
|
|
||||||
.addRaw(`Managed labels scanned: ${managedScanned}`)
|
|
||||||
.addEOL()
|
|
||||||
.addRaw(`Drifts found: ${drifts.length}`)
|
|
||||||
.addEOL();
|
|
||||||
|
|
||||||
if (drifts.length > 0) {
|
|
||||||
const sample = drifts.slice(0, 30).map((entry) => [
|
|
||||||
entry.name,
|
|
||||||
`${entry.currentColor} -> ${entry.expectedColor}`,
|
|
||||||
`${entry.currentDescription || "(blank)"} -> ${entry.expectedDescription}`,
|
|
||||||
]);
|
|
||||||
core.summary.addTable([
|
|
||||||
[{ data: "Label", header: true }, { data: "Color", header: true }, { data: "Description", header: true }],
|
|
||||||
...sample,
|
|
||||||
]);
|
|
||||||
if (drifts.length > sample.length) {
|
|
||||||
core.summary
|
|
||||||
.addRaw(`Additional drifts not shown: ${drifts.length - sample.length}`)
|
|
||||||
.addEOL();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
await core.summary.write();
|
|
||||||
|
|
||||||
if (!shouldRepair && drifts.length > 0) {
|
|
||||||
core.info(`Managed-label metadata drifts detected: ${drifts.length}. Re-run with mode=repair to auto-fix.`);
|
|
||||||
} else if (shouldRepair) {
|
|
||||||
core.info(`Managed-label metadata repair applied to ${drifts.length} labels.`);
|
|
||||||
} else {
|
|
||||||
core.info("No managed-label metadata drift detected.");
|
|
||||||
}
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const files = await github.paginate(github.rest.pulls.listFiles, {
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
pull_number: pr.number,
|
|
||||||
per_page: 100,
|
|
||||||
});
|
|
||||||
|
|
||||||
const detectedModuleLabels = new Set();
|
|
||||||
for (const file of files) {
|
|
||||||
const path = (file.filename || "").toLowerCase();
|
|
||||||
for (const rule of moduleNamespaceRules) {
|
|
||||||
if (!path.startsWith(rule.root)) continue;
|
|
||||||
|
|
||||||
const relative = path.slice(rule.root.length);
|
|
||||||
if (!relative) continue;
|
|
||||||
|
|
||||||
const first = relative.split("/")[0];
|
|
||||||
const firstStem = first.endsWith(".rs") ? first.slice(0, -3) : first;
|
|
||||||
let segment = firstStem;
|
|
||||||
|
|
||||||
if (rule.coreEntries.has(first) || rule.coreEntries.has(firstStem)) {
|
|
||||||
segment = "core";
|
|
||||||
}
|
|
||||||
|
|
||||||
segment = normalizeLabelSegment(segment);
|
|
||||||
if (!segment) continue;
|
|
||||||
|
|
||||||
detectedModuleLabels.add(formatModuleLabel(rule.prefix, segment));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const providerRelevantFiles = files.filter((file) => {
|
|
||||||
const path = file.filename || "";
|
|
||||||
return (
|
|
||||||
path.startsWith("src/providers/") ||
|
|
||||||
path.startsWith("src/integrations/") ||
|
|
||||||
path.startsWith("src/onboard/") ||
|
|
||||||
path.startsWith("src/config/")
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
if (providerRelevantFiles.length > 0) {
|
|
||||||
const searchableText = [
|
|
||||||
pr.title || "",
|
|
||||||
pr.body || "",
|
|
||||||
...providerRelevantFiles.map((file) => file.filename || ""),
|
|
||||||
...providerRelevantFiles.map((file) => file.patch || ""),
|
|
||||||
]
|
|
||||||
.join("\n")
|
|
||||||
.toLowerCase();
|
|
||||||
|
|
||||||
for (const keyword of providerKeywordHints) {
|
|
||||||
if (containsKeyword(searchableText, keyword)) {
|
|
||||||
detectedModuleLabels.add(formatModuleLabel("provider", keyword));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const channelRelevantFiles = files.filter((file) => {
|
|
||||||
const path = file.filename || "";
|
|
||||||
return (
|
|
||||||
path.startsWith("src/channels/") ||
|
|
||||||
path.startsWith("src/onboard/") ||
|
|
||||||
path.startsWith("src/config/")
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
if (channelRelevantFiles.length > 0) {
|
|
||||||
const searchableText = [
|
|
||||||
pr.title || "",
|
|
||||||
pr.body || "",
|
|
||||||
...channelRelevantFiles.map((file) => file.filename || ""),
|
|
||||||
...channelRelevantFiles.map((file) => file.patch || ""),
|
|
||||||
]
|
|
||||||
.join("\n")
|
|
||||||
.toLowerCase();
|
|
||||||
|
|
||||||
for (const keyword of channelKeywordHints) {
|
|
||||||
if (containsKeyword(searchableText, keyword)) {
|
|
||||||
detectedModuleLabels.add(formatModuleLabel("channel", keyword));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const refinedModuleLabels = refineModuleLabels(detectedModuleLabels);
|
|
||||||
const compactedModuleState = compactModuleLabels(refinedModuleLabels);
|
|
||||||
const selectedModuleLabels = compactedModuleState.moduleLabels;
|
|
||||||
const forcePathPrefixes = compactedModuleState.forcePathPrefixes;
|
|
||||||
const modulePrefixesWithLabels = new Set(
|
|
||||||
[...selectedModuleLabels]
|
|
||||||
.map((label) => parseModuleLabel(label)?.prefix)
|
|
||||||
.filter(Boolean)
|
|
||||||
);
|
|
||||||
|
|
||||||
const { data: currentLabels } = await github.rest.issues.listLabelsOnIssue({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
issue_number: pr.number,
|
|
||||||
});
|
|
||||||
const currentLabelNames = currentLabels.map((label) => label.name);
|
|
||||||
const currentPathLabels = currentLabelNames.filter((label) => managedPathLabelSet.has(label));
|
|
||||||
const candidatePathLabels = new Set([...currentPathLabels, ...forcePathPrefixes]);
|
|
||||||
|
|
||||||
const dedupedPathLabels = [...candidatePathLabels].filter((label) => {
|
|
||||||
if (label === "core") return true;
|
|
||||||
if (forcePathPrefixes.has(label)) return true;
|
|
||||||
return !modulePrefixesWithLabels.has(label);
|
|
||||||
});
|
|
||||||
|
|
||||||
const excludedLockfiles = new Set(["Cargo.lock"]);
|
|
||||||
const changedLines = files.reduce((total, file) => {
|
|
||||||
const path = file.filename || "";
|
|
||||||
if (isDocsLike(path) || excludedLockfiles.has(path)) {
|
|
||||||
return total;
|
|
||||||
}
|
|
||||||
return total + (file.additions || 0) + (file.deletions || 0);
|
|
||||||
}, 0);
|
|
||||||
|
|
||||||
let sizeLabel = "size: XL";
|
|
||||||
if (changedLines <= 80) sizeLabel = "size: XS";
|
|
||||||
else if (changedLines <= 250) sizeLabel = "size: S";
|
|
||||||
else if (changedLines <= 500) sizeLabel = "size: M";
|
|
||||||
else if (changedLines <= 1000) sizeLabel = "size: L";
|
|
||||||
|
|
||||||
const hasHighRiskPath = files.some((file) => {
|
|
||||||
const path = file.filename || "";
|
|
||||||
return (
|
|
||||||
path.startsWith("src/security/") ||
|
|
||||||
path.startsWith("src/runtime/") ||
|
|
||||||
path.startsWith("src/gateway/") ||
|
|
||||||
path.startsWith("src/tools/") ||
|
|
||||||
path.startsWith(".github/workflows/")
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
const hasMediumRiskPath = files.some((file) => {
|
|
||||||
const path = file.filename || "";
|
|
||||||
return (
|
|
||||||
path.startsWith("src/") ||
|
|
||||||
path === "Cargo.toml" ||
|
|
||||||
path === "Cargo.lock" ||
|
|
||||||
path === "deny.toml" ||
|
|
||||||
path.startsWith(".githooks/")
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
let riskLabel = "risk: low";
|
|
||||||
if (hasHighRiskPath) {
|
|
||||||
riskLabel = "risk: high";
|
|
||||||
} else if (hasMediumRiskPath) {
|
|
||||||
riskLabel = "risk: medium";
|
|
||||||
}
|
|
||||||
|
|
||||||
await ensureManagedRepoLabelsMetadata();
|
|
||||||
|
|
||||||
const labelsToEnsure = new Set([
|
|
||||||
...sizeLabels,
|
|
||||||
...computedRiskLabels,
|
|
||||||
manualRiskOverrideLabel,
|
|
||||||
...managedPathLabels,
|
|
||||||
...contributorTierLabels,
|
|
||||||
...selectedModuleLabels,
|
|
||||||
]);
|
|
||||||
|
|
||||||
for (const label of labelsToEnsure) {
|
|
||||||
await ensureLabel(label);
|
|
||||||
}
|
|
||||||
|
|
||||||
let contributorTierLabel = null;
|
|
||||||
const authorLogin = pr.user?.login;
|
|
||||||
if (authorLogin && pr.user?.type !== "Bot") {
|
|
||||||
try {
|
|
||||||
const { data: mergedSearch } = await github.rest.search.issuesAndPullRequests({
|
|
||||||
q: `repo:${owner}/${repo} is:pr is:merged author:${authorLogin}`,
|
|
||||||
per_page: 1,
|
|
||||||
});
|
|
||||||
const mergedCount = mergedSearch.total_count || 0;
|
|
||||||
contributorTierLabel = selectContributorTier(mergedCount);
|
|
||||||
} catch (error) {
|
|
||||||
core.warning(`failed to compute contributor tier label: ${error.message}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const hasManualRiskOverride = currentLabelNames.includes(manualRiskOverrideLabel);
|
|
||||||
const keepNonManagedLabels = currentLabelNames.filter((label) => {
|
|
||||||
if (label === manualRiskOverrideLabel) return true;
|
|
||||||
if (contributorTierLabels.includes(label)) return false;
|
|
||||||
if (sizeLabels.includes(label) || computedRiskLabels.includes(label)) return false;
|
|
||||||
if (managedPathLabelSet.has(label)) return false;
|
|
||||||
if (managedModulePrefixes.some((prefix) => label.startsWith(prefix))) return false;
|
|
||||||
return true;
|
|
||||||
});
|
|
||||||
|
|
||||||
const manualRiskSelection =
|
|
||||||
currentLabelNames.find((label) => computedRiskLabels.includes(label)) || riskLabel;
|
|
||||||
|
|
||||||
const moduleLabelList = sortModuleLabels([...selectedModuleLabels]);
|
|
||||||
const contributorLabelList = contributorTierLabel ? [contributorTierLabel] : [];
|
|
||||||
const selectedRiskLabels = hasManualRiskOverride
|
|
||||||
? sortByPriority([manualRiskSelection, manualRiskOverrideLabel], riskPriorityIndex)
|
|
||||||
: sortByPriority([riskLabel], riskPriorityIndex);
|
|
||||||
const selectedSizeLabels = sortByPriority([sizeLabel], sizePriorityIndex);
|
|
||||||
const sortedContributorLabels = sortByPriority(contributorLabelList, contributorPriorityIndex);
|
|
||||||
const sortedPathLabels = sortByPriority(dedupedPathLabels, pathLabelPriorityIndex);
|
|
||||||
const sortedKeepNonManagedLabels = [...new Set(keepNonManagedLabels)].sort((left, right) =>
|
|
||||||
left.localeCompare(right)
|
|
||||||
);
|
|
||||||
|
|
||||||
const nextLabels = [
|
|
||||||
...new Set([
|
|
||||||
...selectedRiskLabels,
|
|
||||||
...selectedSizeLabels,
|
|
||||||
...sortedContributorLabels,
|
|
||||||
...moduleLabelList,
|
|
||||||
...sortedPathLabels,
|
|
||||||
...sortedKeepNonManagedLabels,
|
|
||||||
]),
|
|
||||||
];
|
|
||||||
|
|
||||||
await github.rest.issues.setLabels({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
issue_number: pr.number,
|
|
||||||
labels: nextLabels,
|
|
||||||
});
|
|
||||||
211
.github/workflows/main-branch-flow.md
vendored
Normal file
211
.github/workflows/main-branch-flow.md
vendored
Normal file
|
|
@ -0,0 +1,211 @@
|
||||||
|
# Main Branch Delivery Flows
|
||||||
|
|
||||||
|
This document explains what runs when code is proposed to `main`, merged into `main`, and released via tags.
|
||||||
|
|
||||||
|
Use this with:
|
||||||
|
|
||||||
|
- [`docs/ci-map.md`](../../docs/ci-map.md)
|
||||||
|
- [`docs/pr-workflow.md`](../../docs/pr-workflow.md)
|
||||||
|
|
||||||
|
## Event Summary
|
||||||
|
|
||||||
|
| Event | Main workflows |
|
||||||
|
| --- | --- |
|
||||||
|
| PR activity (`pull_request_target`) | `pr-intake-checks.yml`, `pr-labeler.yml`, `pr-auto-response.yml` |
|
||||||
|
| PR activity (`pull_request`) | `ci-run.yml`, `sec-audit.yml`, plus path-scoped `pub-docker-img.yml`, `workflow-sanity.yml`, `pr-label-policy-check.yml` |
|
||||||
|
| Push to `main` | `ci-run.yml`, `sec-audit.yml`, plus path-scoped workflows |
|
||||||
|
| Tag push (`v*`) | `pub-release.yml`, `pub-docker-img.yml` publish job |
|
||||||
|
| Scheduled/manual | `sec-codeql.yml`, `feature-matrix.yml`, `test-fuzz.yml`, `pr-check-stale.yml`, `pr-check-status.yml`, `sync-contributors.yml`, `test-benchmarks.yml`, `test-e2e.yml` |
|
||||||
|
|
||||||
|
## Runtime and Docker Matrix
|
||||||
|
|
||||||
|
Observed averages below are from recent completed runs (sampled from GitHub Actions on February 17, 2026). Values are directional, not SLA.
|
||||||
|
|
||||||
|
| Workflow | Typical trigger in main flow | Avg runtime | Docker build? | Docker run? | Docker push? |
|
||||||
|
| --- | --- | ---:| --- | --- | --- |
|
||||||
|
| `pr-intake-checks.yml` | PR open/update (`pull_request_target`) | 14.5s | No | No | No |
|
||||||
|
| `pr-labeler.yml` | PR open/update (`pull_request_target`) | 53.7s | No | No | No |
|
||||||
|
| `pr-auto-response.yml` | PR/issue automation | 24.3s | No | No | No |
|
||||||
|
| `ci-run.yml` | PR + push to `main` | 74.7s | No | No | No |
|
||||||
|
| `sec-audit.yml` | PR + push to `main` | 127.2s | No | No | No |
|
||||||
|
| `workflow-sanity.yml` | Workflow-file changes | 34.2s | No | No | No |
|
||||||
|
| `pr-label-policy-check.yml` | Label policy/automation changes | 14.7s | No | No | No |
|
||||||
|
| `pub-docker-img.yml` (`pull_request`) | Docker-related PR changes | 240.4s | Yes | Yes | No |
|
||||||
|
| `pub-docker-img.yml` (`push` tag) | Tag push `v*` | 139.9s | Yes | No | Yes |
|
||||||
|
| `pub-release.yml` | Tag push `v*` | N/A in recent sample | No | No | No |
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
|
||||||
|
1. `pub-docker-img.yml` is the only workflow in the main PR/push path that builds Docker images.
|
||||||
|
2. Container runtime verification (`docker run`) occurs in PR smoke only.
|
||||||
|
3. Container registry push occurs on tag pushes only.
|
||||||
|
4. `ci-run.yml` "Build (Smoke)" builds Rust binaries, not Docker images.
|
||||||
|
|
||||||
|
## Step-By-Step
|
||||||
|
|
||||||
|
### 1) PR from branch in this repository -> `main`
|
||||||
|
|
||||||
|
1. Contributor opens or updates PR against `main`.
|
||||||
|
2. `pull_request_target` automation runs (typical runtime):
|
||||||
|
- `pr-intake-checks.yml` posts intake warnings/errors.
|
||||||
|
- `pr-labeler.yml` sets size/risk/scope labels.
|
||||||
|
- `pr-auto-response.yml` runs first-interaction and label routes.
|
||||||
|
3. `pull_request` CI workflows start:
|
||||||
|
- `ci-run.yml`
|
||||||
|
- `sec-audit.yml`
|
||||||
|
- path-scoped workflows if matching files changed:
|
||||||
|
- `pub-docker-img.yml` (Docker-related paths only)
|
||||||
|
- `workflow-sanity.yml` (workflow files only)
|
||||||
|
- `pr-label-policy-check.yml` (label-policy files only)
|
||||||
|
4. In `ci-run.yml`, `changes` computes:
|
||||||
|
- `docs_only`
|
||||||
|
- `docs_changed`
|
||||||
|
- `rust_changed`
|
||||||
|
- `workflow_changed`
|
||||||
|
5. `build` runs for Rust-impacting changes.
|
||||||
|
6. On PRs, full lint/test/docs checks run when PR has label `ci:full`:
|
||||||
|
- `lint`
|
||||||
|
- `lint-strict-delta`
|
||||||
|
- `test`
|
||||||
|
- `docs-quality`
|
||||||
|
7. If `.github/workflows/**` changed, `workflow-owner-approval` must pass.
|
||||||
|
8. `lint-feedback` posts actionable comment if lint/docs gates fail.
|
||||||
|
9. `CI Required Gate` aggregates results to final pass/fail.
|
||||||
|
10. Maintainer merges PR once checks and review policy are satisfied.
|
||||||
|
11. Merge emits a `push` event on `main` (see scenario 3).
|
||||||
|
|
||||||
|
### 2) PR from fork -> `main`
|
||||||
|
|
||||||
|
1. External contributor opens PR from `fork/<branch>` into `zeroclaw:main`.
|
||||||
|
2. Immediately on `opened`:
|
||||||
|
- `pull_request_target` workflows start with base-repo context and base-repo token:
|
||||||
|
- `pr-intake-checks.yml`
|
||||||
|
- `pr-labeler.yml`
|
||||||
|
- `pr-auto-response.yml`
|
||||||
|
- `pull_request` workflows are queued for the fork head commit:
|
||||||
|
- `ci-run.yml`
|
||||||
|
- `sec-audit.yml`
|
||||||
|
- path-scoped workflows (`pub-docker-img.yml`, `workflow-sanity.yml`, `pr-label-policy-check.yml`) if changed files match.
|
||||||
|
3. Fork-specific permission behavior in `pull_request` workflows:
|
||||||
|
- token is restricted (read-focused), so jobs that try to write PR comments/status extras can be limited.
|
||||||
|
- secrets from the base repo are not exposed to fork PR `pull_request` jobs.
|
||||||
|
4. Approval gate possibility:
|
||||||
|
- if Actions settings require maintainer approval for fork workflows, the `pull_request` run stays in `action_required`/waiting state until approved.
|
||||||
|
5. Event fan-out after labeling:
|
||||||
|
- `pr-labeler.yml` and manual label changes emit `labeled`/`unlabeled` events.
|
||||||
|
- those events retrigger `pull_request_target` automation (`pr-labeler.yml` and `pr-auto-response.yml`), creating extra run volume/noise.
|
||||||
|
6. When contributor pushes new commits to fork branch (`synchronize`):
|
||||||
|
- reruns: `pr-intake-checks.yml`, `pr-labeler.yml`, `ci-run.yml`, `sec-audit.yml`, and matching path-scoped PR workflows.
|
||||||
|
- does not rerun `pr-auto-response.yml` unless label/open events occur.
|
||||||
|
7. `ci-run.yml` execution details for fork PR:
|
||||||
|
- `changes` computes `docs_only`, `docs_changed`, `rust_changed`, `workflow_changed`.
|
||||||
|
- `build` runs for Rust-impacting changes.
|
||||||
|
- `lint`/`lint-strict-delta`/`test`/`docs-quality` run on PR when `ci:full` label exists.
|
||||||
|
- `workflow-owner-approval` runs when `.github/workflows/**` changed.
|
||||||
|
- `CI Required Gate` emits final pass/fail for the PR head.
|
||||||
|
8. Fork PR merge blockers to check first when diagnosing stalls:
|
||||||
|
- run approval pending for fork workflows.
|
||||||
|
- `workflow-owner-approval` failing on workflow-file changes.
|
||||||
|
- `CI Required Gate` failure caused by upstream jobs.
|
||||||
|
- repeated `pull_request_target` reruns from label churn causing noisy signals.
|
||||||
|
9. After merge, normal `push` workflows on `main` execute (scenario 3).
|
||||||
|
|
||||||
|
### 3) Push to `main` (including after merge)
|
||||||
|
|
||||||
|
1. Commit reaches `main` (usually from a merged PR).
|
||||||
|
2. `ci-run.yml` runs on `push`.
|
||||||
|
3. `sec-audit.yml` runs on `push`.
|
||||||
|
4. Path-filtered workflows run only if touched files match their filters.
|
||||||
|
5. In `ci-run.yml`, push behavior differs from PR behavior:
|
||||||
|
- Rust path: `lint`, `lint-strict-delta`, `test`, `build` are expected.
|
||||||
|
- Docs/non-rust paths: fast-path behavior applies.
|
||||||
|
6. `CI Required Gate` computes overall push result.
|
||||||
|
|
||||||
|
## Docker Publish Logic
|
||||||
|
|
||||||
|
Workflow: `.github/workflows/pub-docker-img.yml`
|
||||||
|
|
||||||
|
### PR behavior
|
||||||
|
|
||||||
|
1. Triggered on `pull_request` to `main` when Docker-related paths change.
|
||||||
|
2. Runs `PR Docker Smoke` job:
|
||||||
|
- Builds local smoke image with Blacksmith builder.
|
||||||
|
- Verifies container with `docker run ... --version`.
|
||||||
|
3. Typical runtime in recent sample: ~240.4s.
|
||||||
|
4. No registry push happens on PR events.
|
||||||
|
|
||||||
|
### Push behavior
|
||||||
|
|
||||||
|
1. `publish` job runs only when `github.event_name == 'push'` and ref starts with `refs/tags/`.
|
||||||
|
2. Tag format expected by workflow trigger is `v*`.
|
||||||
|
3. Login to `ghcr.io` uses `${{ github.actor }}` and `${{ secrets.GITHUB_TOKEN }}`.
|
||||||
|
4. Tag computation includes:
|
||||||
|
- semantic tag from pushed git tag (`vX.Y.Z`)
|
||||||
|
- SHA tag (`sha-<12 chars>`)
|
||||||
|
5. Multi-platform publish is used for tag pushes (`linux/amd64,linux/arm64`).
|
||||||
|
6. Typical runtime in recent sample: ~139.9s.
|
||||||
|
7. Result: pushed image tags under `ghcr.io/<owner>/<repo>`.
|
||||||
|
|
||||||
|
Important: Docker publish does not run on plain `main` pushes anymore.
|
||||||
|
|
||||||
|
## Release Logic
|
||||||
|
|
||||||
|
Workflow: `.github/workflows/pub-release.yml`
|
||||||
|
|
||||||
|
1. Triggered only on tag push `v*`.
|
||||||
|
2. Builds release artifacts across matrix targets.
|
||||||
|
3. Generates SBOM (`CycloneDX` + `SPDX`).
|
||||||
|
4. Generates `SHA256SUMS`.
|
||||||
|
5. Signs artifacts with keyless cosign.
|
||||||
|
6. Publishes GitHub Release with artifacts.
|
||||||
|
|
||||||
|
## Merge/Policy Notes
|
||||||
|
|
||||||
|
1. Workflow-file changes (`.github/workflows/**`) activate owner-approval gate in `ci-run.yml`.
|
||||||
|
2. PR lint/test strictness is intentionally controlled by `ci:full` label.
|
||||||
|
3. `sec-audit.yml` runs on both PR and push, plus scheduled weekly.
|
||||||
|
4. Some workflows are operational and non-merge-path (`pr-check-stale`, `pr-check-status`, `sync-contributors`, etc.).
|
||||||
|
5. Workflow-specific JavaScript helpers are organized under `.github/workflows/scripts/`.
|
||||||
|
|
||||||
|
## Mermaid Diagrams
|
||||||
|
|
||||||
|
### PR to Main (Internal/Fork)
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
flowchart TD
|
||||||
|
A["PR opened or updated -> main"] --> B["pull_request_target lane"]
|
||||||
|
B --> B1["pr-intake-checks.yml"]
|
||||||
|
B --> B2["pr-labeler.yml"]
|
||||||
|
B --> B3["pr-auto-response.yml"]
|
||||||
|
A --> C["pull_request CI lane"]
|
||||||
|
C --> C1["ci-run.yml"]
|
||||||
|
C --> C2["sec-audit.yml"]
|
||||||
|
C --> C3["pub-docker-img.yml (if Docker paths changed)"]
|
||||||
|
C --> C4["workflow-sanity.yml (if workflow files changed)"]
|
||||||
|
C --> C5["pr-label-policy-check.yml (if policy files changed)"]
|
||||||
|
C1 --> D["CI Required Gate"]
|
||||||
|
D --> E{"Checks + review policy pass?"}
|
||||||
|
E -->|No| F["PR stays open"]
|
||||||
|
E -->|Yes| G["Merge PR"]
|
||||||
|
G --> H["push event on main"]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Push/Tag Delivery
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
flowchart TD
|
||||||
|
A["Commit reaches main"] --> B["ci-run.yml"]
|
||||||
|
A --> C["sec-audit.yml"]
|
||||||
|
A --> D["path-scoped workflows (if matched)"]
|
||||||
|
T["Tag push v*"] --> R["pub-release.yml"]
|
||||||
|
T --> P["pub-docker-img.yml publish job"]
|
||||||
|
R --> R1["Artifacts + SBOM + checksums + signatures + GitHub Release"]
|
||||||
|
P --> P1["Push ghcr image tags (version + sha)"]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Quick Troubleshooting
|
||||||
|
|
||||||
|
1. Unexpected skipped jobs: inspect `scripts/ci/detect_change_scope.sh` outputs.
|
||||||
|
2. Workflow-change PR blocked: verify `WORKFLOW_OWNER_LOGINS` and approvals.
|
||||||
|
3. Fork PR appears stalled: check whether Actions run approval is pending.
|
||||||
|
4. Docker not published: confirm a `v*` tag push happened (plain `main` push will not publish).
|
||||||
80
.github/workflows/pr-auto-response.yml
vendored
Normal file
80
.github/workflows/pr-auto-response.yml
vendored
Normal file
|
|
@ -0,0 +1,80 @@
|
||||||
|
name: PR Auto Responder
|
||||||
|
|
||||||
|
on:
|
||||||
|
issues:
|
||||||
|
types: [opened, reopened, labeled, unlabeled]
|
||||||
|
pull_request_target:
|
||||||
|
types: [opened, labeled, unlabeled]
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
contributor-tier-issues:
|
||||||
|
if: >-
|
||||||
|
(github.event_name == 'issues' &&
|
||||||
|
(github.event.action == 'opened' || github.event.action == 'reopened' || github.event.action == 'labeled' || github.event.action == 'unlabeled')) ||
|
||||||
|
(github.event_name == 'pull_request_target' &&
|
||||||
|
(github.event.action == 'labeled' || github.event.action == 'unlabeled'))
|
||||||
|
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
issues: write
|
||||||
|
pull-requests: write
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||||
|
|
||||||
|
- name: Apply contributor tier label for issue author
|
||||||
|
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const script = require('./.github/workflows/scripts/pr_auto_response_contributor_tier.js');
|
||||||
|
await script({ github, context, core });
|
||||||
|
first-interaction:
|
||||||
|
if: github.event.action == 'opened'
|
||||||
|
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
pull-requests: write
|
||||||
|
steps:
|
||||||
|
- name: Greet first-time contributors
|
||||||
|
uses: actions/first-interaction@a1db7729b356323c7988c20ed6f0d33fe31297be # v1
|
||||||
|
with:
|
||||||
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
issue-message: |
|
||||||
|
Thanks for opening this issue.
|
||||||
|
|
||||||
|
Before maintainers triage it, please confirm:
|
||||||
|
- Repro steps are complete and run on latest `main`
|
||||||
|
- Environment details are included (OS, Rust version, ZeroClaw version)
|
||||||
|
- Sensitive values are redacted
|
||||||
|
|
||||||
|
This helps us keep issue throughput high and response latency low.
|
||||||
|
pr-message: |
|
||||||
|
Thanks for contributing to ZeroClaw.
|
||||||
|
|
||||||
|
For faster review, please ensure:
|
||||||
|
- PR template sections are fully completed
|
||||||
|
- `cargo fmt --all -- --check`, `cargo clippy --all-targets -- -D warnings`, and `cargo test` are included
|
||||||
|
- If automation/agents were used heavily, add brief workflow notes
|
||||||
|
- Scope is focused (prefer one concern per PR)
|
||||||
|
|
||||||
|
See `CONTRIBUTING.md` and `docs/pr-workflow.md` for full collaboration rules.
|
||||||
|
|
||||||
|
labeled-routes:
|
||||||
|
if: github.event.action == 'labeled'
|
||||||
|
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
issues: write
|
||||||
|
pull-requests: write
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||||
|
|
||||||
|
- name: Handle label-driven responses
|
||||||
|
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const script = require('./.github/workflows/scripts/pr_auto_response_labeled_routes.js');
|
||||||
|
await script({ github, context, core });
|
||||||
44
.github/workflows/pr-check-stale.yml
vendored
Normal file
44
.github/workflows/pr-check-stale.yml
vendored
Normal file
|
|
@ -0,0 +1,44 @@
|
||||||
|
name: Stale PR Check
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: "20 2 * * *"
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
stale:
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
pull-requests: write
|
||||||
|
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||||
|
steps:
|
||||||
|
- name: Mark stale issues and pull requests
|
||||||
|
uses: actions/stale@b5d41d4e1d5dceea10e7104786b73624c18a190f # v10.2.0
|
||||||
|
with:
|
||||||
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
days-before-issue-stale: 21
|
||||||
|
days-before-issue-close: 7
|
||||||
|
days-before-pr-stale: 14
|
||||||
|
days-before-pr-close: 7
|
||||||
|
stale-issue-label: stale
|
||||||
|
stale-pr-label: stale
|
||||||
|
exempt-issue-labels: security,pinned,no-stale,no-pr-hygiene,maintainer
|
||||||
|
exempt-pr-labels: no-stale,no-pr-hygiene,maintainer
|
||||||
|
remove-stale-when-updated: true
|
||||||
|
exempt-all-assignees: true
|
||||||
|
operations-per-run: 300
|
||||||
|
stale-issue-message: |
|
||||||
|
This issue was automatically marked as stale due to inactivity.
|
||||||
|
Please provide an update, reproduction details, or current status to keep it open.
|
||||||
|
close-issue-message: |
|
||||||
|
Closing this issue due to inactivity.
|
||||||
|
If the problem still exists on the latest `main`, please open a new issue with fresh repro steps.
|
||||||
|
close-issue-reason: not_planned
|
||||||
|
stale-pr-message: |
|
||||||
|
This PR was automatically marked as stale due to inactivity.
|
||||||
|
Please rebase/update and post the latest validation results.
|
||||||
|
close-pr-message: |
|
||||||
|
Closing this PR due to inactivity.
|
||||||
|
Maintainers can reopen once the branch is updated and validation is provided.
|
||||||
32
.github/workflows/pr-check-status.yml
vendored
Normal file
32
.github/workflows/pr-check-status.yml
vendored
Normal file
|
|
@ -0,0 +1,32 @@
|
||||||
|
name: PR Hygiene
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: "15 */12 * * *"
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: pr-check-status
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
nudge-stale-prs:
|
||||||
|
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
pull-requests: write
|
||||||
|
issues: write
|
||||||
|
env:
|
||||||
|
STALE_HOURS: "48"
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||||
|
|
||||||
|
- name: Nudge PRs that need rebase or CI refresh
|
||||||
|
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const script = require('./.github/workflows/scripts/pr_check_status_nudge.js');
|
||||||
|
await script({ github, context, core });
|
||||||
184
.github/workflows/pr-hygiene.yml
vendored
184
.github/workflows/pr-hygiene.yml
vendored
|
|
@ -1,184 +0,0 @@
|
||||||
name: PR Hygiene
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: "15 */12 * * *"
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
permissions: {}
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: pr-hygiene
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
nudge-stale-prs:
|
|
||||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
pull-requests: write
|
|
||||||
issues: write
|
|
||||||
env:
|
|
||||||
STALE_HOURS: "48"
|
|
||||||
steps:
|
|
||||||
- name: Nudge PRs that need rebase or CI refresh
|
|
||||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
|
||||||
with:
|
|
||||||
script: |
|
|
||||||
const staleHours = Number(process.env.STALE_HOURS || "48");
|
|
||||||
const ignoreLabels = new Set(["no-stale", "stale", "maintainer", "no-pr-hygiene"]);
|
|
||||||
const marker = "<!-- pr-hygiene-nudge -->";
|
|
||||||
const owner = context.repo.owner;
|
|
||||||
const repo = context.repo.repo;
|
|
||||||
|
|
||||||
const openPrs = await github.paginate(github.rest.pulls.list, {
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
state: "open",
|
|
||||||
per_page: 100,
|
|
||||||
});
|
|
||||||
|
|
||||||
const activePrs = openPrs.filter((pr) => {
|
|
||||||
if (pr.draft) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
const labels = new Set((pr.labels || []).map((label) => label.name));
|
|
||||||
return ![...ignoreLabels].some((label) => labels.has(label));
|
|
||||||
});
|
|
||||||
|
|
||||||
core.info(`Scanning ${activePrs.length} open PR(s) for hygiene nudges.`);
|
|
||||||
|
|
||||||
let nudged = 0;
|
|
||||||
let skipped = 0;
|
|
||||||
|
|
||||||
for (const pr of activePrs) {
|
|
||||||
const { data: headCommit } = await github.rest.repos.getCommit({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
ref: pr.head.sha,
|
|
||||||
});
|
|
||||||
|
|
||||||
const headCommitAt =
|
|
||||||
headCommit.commit?.committer?.date || headCommit.commit?.author?.date;
|
|
||||||
if (!headCommitAt) {
|
|
||||||
skipped += 1;
|
|
||||||
core.info(`#${pr.number}: missing head commit timestamp, skipping.`);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const ageHours = (Date.now() - new Date(headCommitAt).getTime()) / 3600000;
|
|
||||||
if (ageHours < staleHours) {
|
|
||||||
skipped += 1;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const { data: prDetail } = await github.rest.pulls.get({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
pull_number: pr.number,
|
|
||||||
});
|
|
||||||
|
|
||||||
const isBehindBase = prDetail.mergeable_state === "behind";
|
|
||||||
|
|
||||||
const { data: checkRunsData } = await github.rest.checks.listForRef({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
ref: pr.head.sha,
|
|
||||||
per_page: 100,
|
|
||||||
});
|
|
||||||
|
|
||||||
const ciGateRuns = (checkRunsData.check_runs || [])
|
|
||||||
.filter((run) => run.name === "CI Required Gate")
|
|
||||||
.sort((a, b) => {
|
|
||||||
const aTime = new Date(a.started_at || a.completed_at || a.created_at).getTime();
|
|
||||||
const bTime = new Date(b.started_at || b.completed_at || b.created_at).getTime();
|
|
||||||
return bTime - aTime;
|
|
||||||
});
|
|
||||||
|
|
||||||
let ciState = "missing";
|
|
||||||
if (ciGateRuns.length > 0) {
|
|
||||||
const latest = ciGateRuns[0];
|
|
||||||
if (latest.status !== "completed") {
|
|
||||||
ciState = "in_progress";
|
|
||||||
} else if (["success", "neutral", "skipped"].includes(latest.conclusion || "")) {
|
|
||||||
ciState = "success";
|
|
||||||
} else {
|
|
||||||
ciState = String(latest.conclusion || "failure");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const ciMissing = ciState === "missing";
|
|
||||||
const ciFailing = !["success", "in_progress", "missing"].includes(ciState);
|
|
||||||
|
|
||||||
if (!isBehindBase && !ciMissing && !ciFailing) {
|
|
||||||
skipped += 1;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const reasons = [];
|
|
||||||
if (isBehindBase) {
|
|
||||||
reasons.push("- Branch is behind `main` (please rebase or merge the latest base branch).");
|
|
||||||
}
|
|
||||||
if (ciMissing) {
|
|
||||||
reasons.push("- No `CI Required Gate` run was found for the current head commit.");
|
|
||||||
}
|
|
||||||
if (ciFailing) {
|
|
||||||
reasons.push(`- Latest \`CI Required Gate\` result is \`${ciState}\`.`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const shortSha = pr.head.sha.slice(0, 12);
|
|
||||||
const body = [
|
|
||||||
marker,
|
|
||||||
`Hi @${pr.user.login}, friendly automation nudge from PR hygiene.`,
|
|
||||||
"",
|
|
||||||
`This PR has had no new commits for **${Math.floor(ageHours)}h** and still needs an update before merge:`,
|
|
||||||
"",
|
|
||||||
...reasons,
|
|
||||||
"",
|
|
||||||
"### Recommended next steps",
|
|
||||||
"1. Rebase your branch on `main`.",
|
|
||||||
"2. Push the updated branch and re-run checks (or use **Re-run failed jobs**).",
|
|
||||||
"3. Post fresh validation output in this PR thread.",
|
|
||||||
"",
|
|
||||||
"Maintainers: apply `no-stale` to opt out for accepted-but-blocked work.",
|
|
||||||
`Head SHA: \`${shortSha}\``,
|
|
||||||
].join("\n");
|
|
||||||
|
|
||||||
const { data: comments } = await github.rest.issues.listComments({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
issue_number: pr.number,
|
|
||||||
per_page: 100,
|
|
||||||
});
|
|
||||||
|
|
||||||
const existing = comments.find(
|
|
||||||
(comment) => comment.user?.type === "Bot" && comment.body?.includes(marker),
|
|
||||||
);
|
|
||||||
|
|
||||||
if (existing) {
|
|
||||||
if (existing.body === body) {
|
|
||||||
skipped += 1;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
await github.rest.issues.updateComment({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
comment_id: existing.id,
|
|
||||||
body,
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
await github.rest.issues.createComment({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
issue_number: pr.number,
|
|
||||||
body,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
nudged += 1;
|
|
||||||
core.info(`#${pr.number}: hygiene nudge posted/updated.`);
|
|
||||||
}
|
|
||||||
|
|
||||||
core.info(`Done. Nudged=${nudged}, skipped=${skipped}`);
|
|
||||||
30
.github/workflows/pr-intake-checks.yml
vendored
Normal file
30
.github/workflows/pr-intake-checks.yml
vendored
Normal file
|
|
@ -0,0 +1,30 @@
|
||||||
|
name: PR Intake Checks
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request_target:
|
||||||
|
types: [opened, reopened, synchronize, edited, ready_for_review]
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: pr-intake-checks-${{ github.event.pull_request.number || github.run_id }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
pull-requests: write
|
||||||
|
issues: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
intake:
|
||||||
|
name: Intake Checks
|
||||||
|
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||||
|
timeout-minutes: 10
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||||
|
|
||||||
|
- name: Run safe PR intake checks
|
||||||
|
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const script = require('./.github/workflows/scripts/pr_intake_checks.js');
|
||||||
|
await script({ github, context, core });
|
||||||
206
.github/workflows/pr-intake-sanity.yml
vendored
206
.github/workflows/pr-intake-sanity.yml
vendored
|
|
@ -1,206 +0,0 @@
|
||||||
name: PR Intake Sanity
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request_target:
|
|
||||||
types: [opened, reopened, synchronize, edited, ready_for_review]
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: pr-intake-sanity-${{ github.event.pull_request.number || github.run_id }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
pull-requests: write
|
|
||||||
issues: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
intake:
|
|
||||||
name: Intake Sanity
|
|
||||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
|
||||||
timeout-minutes: 10
|
|
||||||
steps:
|
|
||||||
- name: Run safe PR intake checks
|
|
||||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
|
||||||
with:
|
|
||||||
script: |
|
|
||||||
const owner = context.repo.owner;
|
|
||||||
const repo = context.repo.repo;
|
|
||||||
const pr = context.payload.pull_request;
|
|
||||||
if (!pr) return;
|
|
||||||
|
|
||||||
const marker = "<!-- pr-intake-sanity -->";
|
|
||||||
const requiredSections = [
|
|
||||||
"## Summary",
|
|
||||||
"## Validation Evidence (required)",
|
|
||||||
"## Security Impact (required)",
|
|
||||||
"## Privacy and Data Hygiene (required)",
|
|
||||||
"## Rollback Plan (required)",
|
|
||||||
];
|
|
||||||
const body = pr.body || "";
|
|
||||||
|
|
||||||
const missingSections = requiredSections.filter((section) => !body.includes(section));
|
|
||||||
const missingFields = [];
|
|
||||||
const requiredFieldChecks = [
|
|
||||||
["summary problem", /- Problem:\s*\S+/m],
|
|
||||||
["summary why it matters", /- Why it matters:\s*\S+/m],
|
|
||||||
["summary what changed", /- What changed:\s*\S+/m],
|
|
||||||
["validation commands", /Commands and result summary:\s*[\s\S]*```/m],
|
|
||||||
["security risk/mitigation", /- New permissions\/capabilities\?\s*\(`Yes\/No`\):\s*\S+/m],
|
|
||||||
["privacy status", /- Data-hygiene status\s*\(`pass\|needs-follow-up`\):\s*\S+/m],
|
|
||||||
["rollback plan", /- Fast rollback command\/path:\s*\S+/m],
|
|
||||||
];
|
|
||||||
for (const [name, pattern] of requiredFieldChecks) {
|
|
||||||
if (!pattern.test(body)) {
|
|
||||||
missingFields.push(name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const files = await github.paginate(github.rest.pulls.listFiles, {
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
pull_number: pr.number,
|
|
||||||
per_page: 100,
|
|
||||||
});
|
|
||||||
|
|
||||||
const formatWarnings = [];
|
|
||||||
const dangerousProblems = [];
|
|
||||||
for (const file of files) {
|
|
||||||
const patch = file.patch || "";
|
|
||||||
if (!patch) continue;
|
|
||||||
const lines = patch.split("\n");
|
|
||||||
for (let idx = 0; idx < lines.length; idx += 1) {
|
|
||||||
const line = lines[idx];
|
|
||||||
if (!line.startsWith("+") || line.startsWith("+++")) continue;
|
|
||||||
const added = line.slice(1);
|
|
||||||
const lineNo = idx + 1;
|
|
||||||
if (/\t/.test(added)) {
|
|
||||||
formatWarnings.push(`${file.filename}:patch#${lineNo} contains tab characters`);
|
|
||||||
}
|
|
||||||
if (/[ \t]+$/.test(added)) {
|
|
||||||
formatWarnings.push(`${file.filename}:patch#${lineNo} contains trailing whitespace`);
|
|
||||||
}
|
|
||||||
if (/^(<<<<<<<|=======|>>>>>>>)/.test(added)) {
|
|
||||||
dangerousProblems.push(`${file.filename}:patch#${lineNo} contains merge conflict markers`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const workflowFilesChanged = files
|
|
||||||
.map((file) => file.filename)
|
|
||||||
.filter((name) => name.startsWith(".github/workflows/"));
|
|
||||||
|
|
||||||
const advisoryFindings = [];
|
|
||||||
const blockingFindings = [];
|
|
||||||
if (missingSections.length > 0) {
|
|
||||||
advisoryFindings.push(`Missing required PR template sections: ${missingSections.join(", ")}`);
|
|
||||||
}
|
|
||||||
if (missingFields.length > 0) {
|
|
||||||
advisoryFindings.push(`Incomplete required PR template fields: ${missingFields.join(", ")}`);
|
|
||||||
}
|
|
||||||
if (formatWarnings.length > 0) {
|
|
||||||
advisoryFindings.push(`Formatting issues in added lines (${formatWarnings.length})`);
|
|
||||||
}
|
|
||||||
if (dangerousProblems.length > 0) {
|
|
||||||
blockingFindings.push(`Dangerous patch markers found (${dangerousProblems.length})`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const comments = await github.paginate(github.rest.issues.listComments, {
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
issue_number: pr.number,
|
|
||||||
per_page: 100,
|
|
||||||
});
|
|
||||||
const existing = comments.find((comment) => (comment.body || "").includes(marker));
|
|
||||||
|
|
||||||
if (advisoryFindings.length === 0 && blockingFindings.length === 0) {
|
|
||||||
if (existing) {
|
|
||||||
await github.rest.issues.deleteComment({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
comment_id: existing.id,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
core.info("PR intake sanity checks passed.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const runUrl = `${context.serverUrl}/${owner}/${repo}/actions/runs/${context.runId}`;
|
|
||||||
const advisoryDetails = [];
|
|
||||||
if (formatWarnings.length > 0) {
|
|
||||||
advisoryDetails.push(...formatWarnings.slice(0, 20).map((entry) => `- ${entry}`));
|
|
||||||
if (formatWarnings.length > 20) {
|
|
||||||
advisoryDetails.push(`- ...and ${formatWarnings.length - 20} more issue(s)`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const blockingDetails = [];
|
|
||||||
if (dangerousProblems.length > 0) {
|
|
||||||
blockingDetails.push(...dangerousProblems.slice(0, 20).map((entry) => `- ${entry}`));
|
|
||||||
if (dangerousProblems.length > 20) {
|
|
||||||
blockingDetails.push(`- ...and ${dangerousProblems.length - 20} more issue(s)`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const isBlocking = blockingFindings.length > 0;
|
|
||||||
|
|
||||||
const ownerApprovalNote = workflowFilesChanged.length > 0
|
|
||||||
? [
|
|
||||||
"",
|
|
||||||
"Workflow files changed in this PR:",
|
|
||||||
...workflowFilesChanged.map((name) => `- \`${name}\``),
|
|
||||||
"",
|
|
||||||
"Reminder: workflow changes require owner approval via `CI Required Gate`.",
|
|
||||||
].join("\n")
|
|
||||||
: "";
|
|
||||||
|
|
||||||
const commentBody = [
|
|
||||||
marker,
|
|
||||||
isBlocking
|
|
||||||
? "### PR intake checks failed (blocking)"
|
|
||||||
: "### PR intake checks found warnings (non-blocking)",
|
|
||||||
"",
|
|
||||||
isBlocking
|
|
||||||
? "Fast safe checks found blocking safety issues:"
|
|
||||||
: "Fast safe checks found advisory issues. CI lint/test/build gates still enforce merge quality.",
|
|
||||||
...(blockingFindings.length > 0 ? blockingFindings.map((entry) => `- ${entry}`) : []),
|
|
||||||
...(advisoryFindings.length > 0 ? advisoryFindings.map((entry) => `- ${entry}`) : []),
|
|
||||||
"",
|
|
||||||
"Action items:",
|
|
||||||
"1. Complete required PR template sections/fields.",
|
|
||||||
"2. Remove tabs, trailing whitespace, and merge conflict markers from added lines.",
|
|
||||||
"3. Re-run local checks before pushing:",
|
|
||||||
" - `./scripts/ci/rust_quality_gate.sh`",
|
|
||||||
" - `./scripts/ci/rust_strict_delta_gate.sh`",
|
|
||||||
" - `./scripts/ci/docs_quality_gate.sh`",
|
|
||||||
"",
|
|
||||||
`Run logs: ${runUrl}`,
|
|
||||||
"",
|
|
||||||
"Detected blocking line issues (sample):",
|
|
||||||
...(blockingDetails.length > 0 ? blockingDetails : ["- none"]),
|
|
||||||
"",
|
|
||||||
"Detected advisory line issues (sample):",
|
|
||||||
...(advisoryDetails.length > 0 ? advisoryDetails : ["- none"]),
|
|
||||||
ownerApprovalNote,
|
|
||||||
].join("\n");
|
|
||||||
|
|
||||||
if (existing) {
|
|
||||||
await github.rest.issues.updateComment({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
comment_id: existing.id,
|
|
||||||
body: commentBody,
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
await github.rest.issues.createComment({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
issue_number: pr.number,
|
|
||||||
body: commentBody,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isBlocking) {
|
|
||||||
core.setFailed("PR intake sanity checks found blocking issues. See sticky comment for details.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
core.info("PR intake sanity checks found advisory issues only.");
|
|
||||||
|
|
@ -4,16 +4,16 @@ on:
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
paths:
|
||||||
- ".github/label-policy.json"
|
- ".github/label-policy.json"
|
||||||
- ".github/workflows/labeler.yml"
|
- ".github/workflows/pr-labeler.yml"
|
||||||
- ".github/workflows/auto-response.yml"
|
- ".github/workflows/pr-auto-response.yml"
|
||||||
push:
|
push:
|
||||||
paths:
|
paths:
|
||||||
- ".github/label-policy.json"
|
- ".github/label-policy.json"
|
||||||
- ".github/workflows/labeler.yml"
|
- ".github/workflows/pr-labeler.yml"
|
||||||
- ".github/workflows/auto-response.yml"
|
- ".github/workflows/pr-auto-response.yml"
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: label-policy-sanity-${{ github.event.pull_request.number || github.sha }}
|
group: pr-label-policy-check-${{ github.event.pull_request.number || github.sha }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
|
|
@ -60,8 +60,8 @@ jobs:
|
||||||
prev_min = min_merged
|
prev_min = min_merged
|
||||||
|
|
||||||
workflow_paths = [
|
workflow_paths = [
|
||||||
Path('.github/workflows/labeler.yml'),
|
Path('.github/workflows/pr-labeler.yml'),
|
||||||
Path('.github/workflows/auto-response.yml'),
|
Path('.github/workflows/pr-auto-response.yml'),
|
||||||
]
|
]
|
||||||
for workflow in workflow_paths:
|
for workflow in workflow_paths:
|
||||||
text = workflow.read_text(encoding='utf-8')
|
text = workflow.read_text(encoding='utf-8')
|
||||||
48
.github/workflows/pr-labeler.yml
vendored
Normal file
48
.github/workflows/pr-labeler.yml
vendored
Normal file
|
|
@ -0,0 +1,48 @@
|
||||||
|
name: PR Labeler
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request_target:
|
||||||
|
types: [opened, reopened, synchronize, edited, labeled, unlabeled]
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
mode:
|
||||||
|
description: "Run mode for managed-label governance"
|
||||||
|
required: true
|
||||||
|
default: "audit"
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- audit
|
||||||
|
- repair
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: pr-labeler-${{ github.event.pull_request.number || github.run_id }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
pull-requests: write
|
||||||
|
issues: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
label:
|
||||||
|
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||||
|
timeout-minutes: 10
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||||
|
|
||||||
|
- name: Apply path labels
|
||||||
|
if: github.event_name == 'pull_request_target'
|
||||||
|
uses: actions/labeler@634933edcd8ababfe52f92936142cc22ac488b1b # v6.0.1
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
sync-labels: true
|
||||||
|
|
||||||
|
- name: Apply size/risk/module labels
|
||||||
|
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const script = require('./.github/workflows/scripts/pr_labeler.js');
|
||||||
|
await script({ github, context, core });
|
||||||
|
|
@ -10,7 +10,7 @@ on:
|
||||||
- "docker-compose.yml"
|
- "docker-compose.yml"
|
||||||
- "dev/docker-compose.yml"
|
- "dev/docker-compose.yml"
|
||||||
- "dev/sandbox/**"
|
- "dev/sandbox/**"
|
||||||
- ".github/workflows/docker.yml"
|
- ".github/workflows/pub-docker-img.yml"
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
|
|
@ -105,9 +105,11 @@ jobs:
|
||||||
- name: Generate SBOM (CycloneDX)
|
- name: Generate SBOM (CycloneDX)
|
||||||
run: |
|
run: |
|
||||||
syft dir:. --source-name zeroclaw -o cyclonedx-json=artifacts/zeroclaw.cdx.json -o spdx-json=artifacts/zeroclaw.spdx.json
|
syft dir:. --source-name zeroclaw -o cyclonedx-json=artifacts/zeroclaw.cdx.json -o spdx-json=artifacts/zeroclaw.spdx.json
|
||||||
echo "### SBOM Generated" >> "$GITHUB_STEP_SUMMARY"
|
{
|
||||||
echo "- CycloneDX: zeroclaw.cdx.json" >> "$GITHUB_STEP_SUMMARY"
|
echo "### SBOM Generated"
|
||||||
echo "- SPDX: zeroclaw.spdx.json" >> "$GITHUB_STEP_SUMMARY"
|
echo "- CycloneDX: zeroclaw.cdx.json"
|
||||||
|
echo "- SPDX: zeroclaw.spdx.json"
|
||||||
|
} >> "$GITHUB_STEP_SUMMARY"
|
||||||
|
|
||||||
- name: Generate SHA256 checksums
|
- name: Generate SHA256 checksums
|
||||||
run: |
|
run: |
|
||||||
83
.github/workflows/scripts/ci_workflow_owner_approval.js
vendored
Normal file
83
.github/workflows/scripts/ci_workflow_owner_approval.js
vendored
Normal file
|
|
@ -0,0 +1,83 @@
|
||||||
|
// Extracted from ci-run.yml step: Require owner approval for workflow file changes
|
||||||
|
|
||||||
|
module.exports = async ({ github, context, core }) => {
|
||||||
|
const owner = context.repo.owner;
|
||||||
|
const repo = context.repo.repo;
|
||||||
|
const prNumber = context.payload.pull_request?.number;
|
||||||
|
const prAuthor = context.payload.pull_request?.user?.login?.toLowerCase() || "";
|
||||||
|
if (!prNumber) {
|
||||||
|
core.setFailed("Missing pull_request context.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const baseOwners = ["theonlyhennygod", "willsarg"];
|
||||||
|
const configuredOwners = (process.env.WORKFLOW_OWNER_LOGINS || "")
|
||||||
|
.split(",")
|
||||||
|
.map((login) => login.trim().toLowerCase())
|
||||||
|
.filter(Boolean);
|
||||||
|
const ownerAllowlist = [...new Set([...baseOwners, ...configuredOwners])];
|
||||||
|
|
||||||
|
if (ownerAllowlist.length === 0) {
|
||||||
|
core.setFailed("Workflow owner allowlist is empty.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
core.info(`Workflow owner allowlist: ${ownerAllowlist.join(", ")}`);
|
||||||
|
|
||||||
|
const files = await github.paginate(github.rest.pulls.listFiles, {
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
pull_number: prNumber,
|
||||||
|
per_page: 100,
|
||||||
|
});
|
||||||
|
|
||||||
|
const workflowFiles = files
|
||||||
|
.map((file) => file.filename)
|
||||||
|
.filter((name) => name.startsWith(".github/workflows/"));
|
||||||
|
|
||||||
|
if (workflowFiles.length === 0) {
|
||||||
|
core.info("No workflow files changed in this PR.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
core.info(`Workflow files changed:\n- ${workflowFiles.join("\n- ")}`);
|
||||||
|
|
||||||
|
if (prAuthor && ownerAllowlist.includes(prAuthor)) {
|
||||||
|
core.info(`Workflow PR authored by allowlisted owner: @${prAuthor}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const reviews = await github.paginate(github.rest.pulls.listReviews, {
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
pull_number: prNumber,
|
||||||
|
per_page: 100,
|
||||||
|
});
|
||||||
|
|
||||||
|
const latestReviewByUser = new Map();
|
||||||
|
for (const review of reviews) {
|
||||||
|
const login = review.user?.login;
|
||||||
|
if (!login) continue;
|
||||||
|
latestReviewByUser.set(login.toLowerCase(), review.state);
|
||||||
|
}
|
||||||
|
|
||||||
|
const approvedUsers = [...latestReviewByUser.entries()]
|
||||||
|
.filter(([, state]) => state === "APPROVED")
|
||||||
|
.map(([login]) => login);
|
||||||
|
|
||||||
|
if (approvedUsers.length === 0) {
|
||||||
|
core.setFailed("Workflow files changed but no approving review is present.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const ownerApprover = approvedUsers.find((login) => ownerAllowlist.includes(login));
|
||||||
|
if (!ownerApprover) {
|
||||||
|
core.setFailed(
|
||||||
|
`Workflow files changed. Approvals found (${approvedUsers.join(", ")}), but none match workflow owner allowlist.`,
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
core.info(`Workflow owner approval present: @${ownerApprover}`);
|
||||||
|
|
||||||
|
};
|
||||||
131
.github/workflows/scripts/pr_auto_response_contributor_tier.js
vendored
Normal file
131
.github/workflows/scripts/pr_auto_response_contributor_tier.js
vendored
Normal file
|
|
@ -0,0 +1,131 @@
|
||||||
|
// Extracted from pr-auto-response.yml step: Apply contributor tier label for issue author
|
||||||
|
|
||||||
|
module.exports = async ({ github, context, core }) => {
|
||||||
|
const owner = context.repo.owner;
|
||||||
|
const repo = context.repo.repo;
|
||||||
|
const issue = context.payload.issue;
|
||||||
|
const pullRequest = context.payload.pull_request;
|
||||||
|
const target = issue ?? pullRequest;
|
||||||
|
async function loadContributorTierPolicy() {
|
||||||
|
const fallback = {
|
||||||
|
contributorTierColor: "2ED9FF",
|
||||||
|
contributorTierRules: [
|
||||||
|
{ label: "distinguished contributor", minMergedPRs: 50 },
|
||||||
|
{ label: "principal contributor", minMergedPRs: 20 },
|
||||||
|
{ label: "experienced contributor", minMergedPRs: 10 },
|
||||||
|
{ label: "trusted contributor", minMergedPRs: 5 },
|
||||||
|
],
|
||||||
|
};
|
||||||
|
try {
|
||||||
|
const { data } = await github.rest.repos.getContent({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
path: ".github/label-policy.json",
|
||||||
|
ref: context.payload.repository?.default_branch || "main",
|
||||||
|
});
|
||||||
|
const json = JSON.parse(Buffer.from(data.content, "base64").toString("utf8"));
|
||||||
|
const contributorTierRules = (json.contributor_tiers || []).map((entry) => ({
|
||||||
|
label: String(entry.label || "").trim(),
|
||||||
|
minMergedPRs: Number(entry.min_merged_prs || 0),
|
||||||
|
}));
|
||||||
|
const contributorTierColor = String(json.contributor_tier_color || "").toUpperCase();
|
||||||
|
if (!contributorTierColor || contributorTierRules.length === 0) {
|
||||||
|
return fallback;
|
||||||
|
}
|
||||||
|
return { contributorTierColor, contributorTierRules };
|
||||||
|
} catch (error) {
|
||||||
|
core.warning(`failed to load .github/label-policy.json, using fallback policy: ${error.message}`);
|
||||||
|
return fallback;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const { contributorTierColor, contributorTierRules } = await loadContributorTierPolicy();
|
||||||
|
const contributorTierLabels = contributorTierRules.map((rule) => rule.label);
|
||||||
|
const managedContributorLabels = new Set(contributorTierLabels);
|
||||||
|
const action = context.payload.action;
|
||||||
|
const changedLabel = context.payload.label?.name;
|
||||||
|
|
||||||
|
if (!target) return;
|
||||||
|
if ((action === "labeled" || action === "unlabeled") && !managedContributorLabels.has(changedLabel)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const author = target.user;
|
||||||
|
if (!author || author.type === "Bot") return;
|
||||||
|
|
||||||
|
function contributorTierDescription(rule) {
|
||||||
|
return `Contributor with ${rule.minMergedPRs}+ merged PRs.`;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function ensureContributorTierLabels() {
|
||||||
|
for (const rule of contributorTierRules) {
|
||||||
|
const label = rule.label;
|
||||||
|
const expectedDescription = contributorTierDescription(rule);
|
||||||
|
try {
|
||||||
|
const { data: existing } = await github.rest.issues.getLabel({ owner, repo, name: label });
|
||||||
|
const currentColor = (existing.color || "").toUpperCase();
|
||||||
|
const currentDescription = (existing.description || "").trim();
|
||||||
|
if (currentColor !== contributorTierColor || currentDescription !== expectedDescription) {
|
||||||
|
await github.rest.issues.updateLabel({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
name: label,
|
||||||
|
new_name: label,
|
||||||
|
color: contributorTierColor,
|
||||||
|
description: expectedDescription,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
if (error.status !== 404) throw error;
|
||||||
|
await github.rest.issues.createLabel({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
name: label,
|
||||||
|
color: contributorTierColor,
|
||||||
|
description: expectedDescription,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function selectContributorTier(mergedCount) {
|
||||||
|
const matchedTier = contributorTierRules.find((rule) => mergedCount >= rule.minMergedPRs);
|
||||||
|
return matchedTier ? matchedTier.label : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
let contributorTierLabel = null;
|
||||||
|
try {
|
||||||
|
const { data: mergedSearch } = await github.rest.search.issuesAndPullRequests({
|
||||||
|
q: `repo:${owner}/${repo} is:pr is:merged author:${author.login}`,
|
||||||
|
per_page: 1,
|
||||||
|
});
|
||||||
|
const mergedCount = mergedSearch.total_count || 0;
|
||||||
|
contributorTierLabel = selectContributorTier(mergedCount);
|
||||||
|
} catch (error) {
|
||||||
|
core.warning(`failed to evaluate contributor tier status: ${error.message}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await ensureContributorTierLabels();
|
||||||
|
|
||||||
|
const { data: currentLabels } = await github.rest.issues.listLabelsOnIssue({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
issue_number: target.number,
|
||||||
|
});
|
||||||
|
const keepLabels = currentLabels
|
||||||
|
.map((label) => label.name)
|
||||||
|
.filter((label) => !contributorTierLabels.includes(label));
|
||||||
|
|
||||||
|
if (contributorTierLabel) {
|
||||||
|
keepLabels.push(contributorTierLabel);
|
||||||
|
}
|
||||||
|
|
||||||
|
await github.rest.issues.setLabels({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
issue_number: target.number,
|
||||||
|
labels: [...new Set(keepLabels)],
|
||||||
|
});
|
||||||
|
|
||||||
|
};
|
||||||
94
.github/workflows/scripts/pr_auto_response_labeled_routes.js
vendored
Normal file
94
.github/workflows/scripts/pr_auto_response_labeled_routes.js
vendored
Normal file
|
|
@ -0,0 +1,94 @@
|
||||||
|
// Extracted from pr-auto-response.yml step: Handle label-driven responses
|
||||||
|
|
||||||
|
module.exports = async ({ github, context, core }) => {
|
||||||
|
const label = context.payload.label?.name;
|
||||||
|
if (!label) return;
|
||||||
|
|
||||||
|
const issue = context.payload.issue;
|
||||||
|
const pullRequest = context.payload.pull_request;
|
||||||
|
const target = issue ?? pullRequest;
|
||||||
|
if (!target) return;
|
||||||
|
|
||||||
|
const isIssue = Boolean(issue);
|
||||||
|
const issueNumber = target.number;
|
||||||
|
const owner = context.repo.owner;
|
||||||
|
const repo = context.repo.repo;
|
||||||
|
|
||||||
|
const rules = [
|
||||||
|
{
|
||||||
|
label: "r:support",
|
||||||
|
close: true,
|
||||||
|
closeIssuesOnly: true,
|
||||||
|
closeReason: "not_planned",
|
||||||
|
message:
|
||||||
|
"This looks like a usage/support request. Please use README + docs first, then open a focused bug with repro details if behavior is incorrect.",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: "r:needs-repro",
|
||||||
|
close: false,
|
||||||
|
message:
|
||||||
|
"Thanks for the report. Please add deterministic repro steps, exact environment, and redacted logs so maintainers can triage quickly.",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: "invalid",
|
||||||
|
close: true,
|
||||||
|
closeIssuesOnly: true,
|
||||||
|
closeReason: "not_planned",
|
||||||
|
message:
|
||||||
|
"Closing as invalid based on current information. If this is still relevant, open a new issue with updated evidence and reproducible steps.",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: "duplicate",
|
||||||
|
close: true,
|
||||||
|
closeIssuesOnly: true,
|
||||||
|
closeReason: "not_planned",
|
||||||
|
message:
|
||||||
|
"Closing as duplicate. Please continue discussion in the canonical linked issue/PR.",
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const rule = rules.find((entry) => entry.label === label);
|
||||||
|
if (!rule) return;
|
||||||
|
|
||||||
|
const marker = `<!-- auto-response:${rule.label} -->`;
|
||||||
|
const comments = await github.paginate(github.rest.issues.listComments, {
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
issue_number: issueNumber,
|
||||||
|
per_page: 100,
|
||||||
|
});
|
||||||
|
|
||||||
|
const alreadyCommented = comments.some((comment) =>
|
||||||
|
(comment.body || "").includes(marker)
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!alreadyCommented) {
|
||||||
|
await github.rest.issues.createComment({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
issue_number: issueNumber,
|
||||||
|
body: `${rule.message}\n\n${marker}`,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!rule.close) return;
|
||||||
|
if (rule.closeIssuesOnly && !isIssue) return;
|
||||||
|
if (target.state === "closed") return;
|
||||||
|
|
||||||
|
if (isIssue) {
|
||||||
|
await github.rest.issues.update({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
issue_number: issueNumber,
|
||||||
|
state: "closed",
|
||||||
|
state_reason: rule.closeReason || "not_planned",
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
await github.rest.issues.update({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
issue_number: issueNumber,
|
||||||
|
state: "closed",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
161
.github/workflows/scripts/pr_check_status_nudge.js
vendored
Normal file
161
.github/workflows/scripts/pr_check_status_nudge.js
vendored
Normal file
|
|
@ -0,0 +1,161 @@
|
||||||
|
// Extracted from pr-check-status.yml step: Nudge PRs that need rebase or CI refresh
|
||||||
|
|
||||||
|
module.exports = async ({ github, context, core }) => {
|
||||||
|
const staleHours = Number(process.env.STALE_HOURS || "48");
|
||||||
|
const ignoreLabels = new Set(["no-stale", "stale", "maintainer", "no-pr-hygiene"]);
|
||||||
|
const marker = "<!-- pr-hygiene-nudge -->";
|
||||||
|
const owner = context.repo.owner;
|
||||||
|
const repo = context.repo.repo;
|
||||||
|
|
||||||
|
const openPrs = await github.paginate(github.rest.pulls.list, {
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
state: "open",
|
||||||
|
per_page: 100,
|
||||||
|
});
|
||||||
|
|
||||||
|
const activePrs = openPrs.filter((pr) => {
|
||||||
|
if (pr.draft) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const labels = new Set((pr.labels || []).map((label) => label.name));
|
||||||
|
return ![...ignoreLabels].some((label) => labels.has(label));
|
||||||
|
});
|
||||||
|
|
||||||
|
core.info(`Scanning ${activePrs.length} open PR(s) for hygiene nudges.`);
|
||||||
|
|
||||||
|
let nudged = 0;
|
||||||
|
let skipped = 0;
|
||||||
|
|
||||||
|
for (const pr of activePrs) {
|
||||||
|
const { data: headCommit } = await github.rest.repos.getCommit({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
ref: pr.head.sha,
|
||||||
|
});
|
||||||
|
|
||||||
|
const headCommitAt =
|
||||||
|
headCommit.commit?.committer?.date || headCommit.commit?.author?.date;
|
||||||
|
if (!headCommitAt) {
|
||||||
|
skipped += 1;
|
||||||
|
core.info(`#${pr.number}: missing head commit timestamp, skipping.`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const ageHours = (Date.now() - new Date(headCommitAt).getTime()) / 3600000;
|
||||||
|
if (ageHours < staleHours) {
|
||||||
|
skipped += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { data: prDetail } = await github.rest.pulls.get({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
pull_number: pr.number,
|
||||||
|
});
|
||||||
|
|
||||||
|
const isBehindBase = prDetail.mergeable_state === "behind";
|
||||||
|
|
||||||
|
const { data: checkRunsData } = await github.rest.checks.listForRef({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
ref: pr.head.sha,
|
||||||
|
per_page: 100,
|
||||||
|
});
|
||||||
|
|
||||||
|
const ciGateRuns = (checkRunsData.check_runs || [])
|
||||||
|
.filter((run) => run.name === "CI Required Gate")
|
||||||
|
.sort((a, b) => {
|
||||||
|
const aTime = new Date(a.started_at || a.completed_at || a.created_at).getTime();
|
||||||
|
const bTime = new Date(b.started_at || b.completed_at || b.created_at).getTime();
|
||||||
|
return bTime - aTime;
|
||||||
|
});
|
||||||
|
|
||||||
|
let ciState = "missing";
|
||||||
|
if (ciGateRuns.length > 0) {
|
||||||
|
const latest = ciGateRuns[0];
|
||||||
|
if (latest.status !== "completed") {
|
||||||
|
ciState = "in_progress";
|
||||||
|
} else if (["success", "neutral", "skipped"].includes(latest.conclusion || "")) {
|
||||||
|
ciState = "success";
|
||||||
|
} else {
|
||||||
|
ciState = String(latest.conclusion || "failure");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const ciMissing = ciState === "missing";
|
||||||
|
const ciFailing = !["success", "in_progress", "missing"].includes(ciState);
|
||||||
|
|
||||||
|
if (!isBehindBase && !ciMissing && !ciFailing) {
|
||||||
|
skipped += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const reasons = [];
|
||||||
|
if (isBehindBase) {
|
||||||
|
reasons.push("- Branch is behind `main` (please rebase or merge the latest base branch).");
|
||||||
|
}
|
||||||
|
if (ciMissing) {
|
||||||
|
reasons.push("- No `CI Required Gate` run was found for the current head commit.");
|
||||||
|
}
|
||||||
|
if (ciFailing) {
|
||||||
|
reasons.push(`- Latest \`CI Required Gate\` result is \`${ciState}\`.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const shortSha = pr.head.sha.slice(0, 12);
|
||||||
|
const body = [
|
||||||
|
marker,
|
||||||
|
`Hi @${pr.user.login}, friendly automation nudge from PR hygiene.`,
|
||||||
|
"",
|
||||||
|
`This PR has had no new commits for **${Math.floor(ageHours)}h** and still needs an update before merge:`,
|
||||||
|
"",
|
||||||
|
...reasons,
|
||||||
|
"",
|
||||||
|
"### Recommended next steps",
|
||||||
|
"1. Rebase your branch on `main`.",
|
||||||
|
"2. Push the updated branch and re-run checks (or use **Re-run failed jobs**).",
|
||||||
|
"3. Post fresh validation output in this PR thread.",
|
||||||
|
"",
|
||||||
|
"Maintainers: apply `no-stale` to opt out for accepted-but-blocked work.",
|
||||||
|
`Head SHA: \`${shortSha}\``,
|
||||||
|
].join("\n");
|
||||||
|
|
||||||
|
const { data: comments } = await github.rest.issues.listComments({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
issue_number: pr.number,
|
||||||
|
per_page: 100,
|
||||||
|
});
|
||||||
|
|
||||||
|
const existing = comments.find(
|
||||||
|
(comment) => comment.user?.type === "Bot" && comment.body?.includes(marker),
|
||||||
|
);
|
||||||
|
|
||||||
|
if (existing) {
|
||||||
|
if (existing.body === body) {
|
||||||
|
skipped += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
await github.rest.issues.updateComment({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
comment_id: existing.id,
|
||||||
|
body,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
await github.rest.issues.createComment({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
issue_number: pr.number,
|
||||||
|
body,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
nudged += 1;
|
||||||
|
core.info(`#${pr.number}: hygiene nudge posted/updated.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
core.info(`Done. Nudged=${nudged}, skipped=${skipped}`);
|
||||||
|
};
|
||||||
190
.github/workflows/scripts/pr_intake_checks.js
vendored
Normal file
190
.github/workflows/scripts/pr_intake_checks.js
vendored
Normal file
|
|
@ -0,0 +1,190 @@
|
||||||
|
// Run safe intake checks for PR events and maintain a single sticky comment.
|
||||||
|
// Used by .github/workflows/pr-intake-checks.yml via actions/github-script.
|
||||||
|
|
||||||
|
module.exports = async ({ github, context, core }) => {
|
||||||
|
const owner = context.repo.owner;
|
||||||
|
const repo = context.repo.repo;
|
||||||
|
const pr = context.payload.pull_request;
|
||||||
|
if (!pr) return;
|
||||||
|
|
||||||
|
const marker = "<!-- pr-intake-checks -->";
|
||||||
|
const legacyMarker = "<!-- pr-intake-sanity -->";
|
||||||
|
const requiredSections = [
|
||||||
|
"## Summary",
|
||||||
|
"## Validation Evidence (required)",
|
||||||
|
"## Security Impact (required)",
|
||||||
|
"## Privacy and Data Hygiene (required)",
|
||||||
|
"## Rollback Plan (required)",
|
||||||
|
];
|
||||||
|
const body = pr.body || "";
|
||||||
|
|
||||||
|
const missingSections = requiredSections.filter((section) => !body.includes(section));
|
||||||
|
const missingFields = [];
|
||||||
|
const requiredFieldChecks = [
|
||||||
|
["summary problem", /- Problem:\s*\S+/m],
|
||||||
|
["summary why it matters", /- Why it matters:\s*\S+/m],
|
||||||
|
["summary what changed", /- What changed:\s*\S+/m],
|
||||||
|
["validation commands", /Commands and result summary:\s*[\s\S]*```/m],
|
||||||
|
["security risk/mitigation", /- New permissions\/capabilities\?\s*\(`Yes\/No`\):\s*\S+/m],
|
||||||
|
["privacy status", /- Data-hygiene status\s*\(`pass\|needs-follow-up`\):\s*\S+/m],
|
||||||
|
["rollback plan", /- Fast rollback command\/path:\s*\S+/m],
|
||||||
|
];
|
||||||
|
for (const [name, pattern] of requiredFieldChecks) {
|
||||||
|
if (!pattern.test(body)) {
|
||||||
|
missingFields.push(name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const files = await github.paginate(github.rest.pulls.listFiles, {
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
pull_number: pr.number,
|
||||||
|
per_page: 100,
|
||||||
|
});
|
||||||
|
|
||||||
|
const formatWarnings = [];
|
||||||
|
const dangerousProblems = [];
|
||||||
|
for (const file of files) {
|
||||||
|
const patch = file.patch || "";
|
||||||
|
if (!patch) continue;
|
||||||
|
const lines = patch.split("\n");
|
||||||
|
for (let idx = 0; idx < lines.length; idx += 1) {
|
||||||
|
const line = lines[idx];
|
||||||
|
if (!line.startsWith("+") || line.startsWith("+++")) continue;
|
||||||
|
const added = line.slice(1);
|
||||||
|
const lineNo = idx + 1;
|
||||||
|
if (/\t/.test(added)) {
|
||||||
|
formatWarnings.push(`${file.filename}:patch#${lineNo} contains tab characters`);
|
||||||
|
}
|
||||||
|
if (/[ \t]+$/.test(added)) {
|
||||||
|
formatWarnings.push(`${file.filename}:patch#${lineNo} contains trailing whitespace`);
|
||||||
|
}
|
||||||
|
if (/^(<<<<<<<|=======|>>>>>>>)/.test(added)) {
|
||||||
|
dangerousProblems.push(`${file.filename}:patch#${lineNo} contains merge conflict markers`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const workflowFilesChanged = files
|
||||||
|
.map((file) => file.filename)
|
||||||
|
.filter((name) => name.startsWith(".github/workflows/"));
|
||||||
|
|
||||||
|
const advisoryFindings = [];
|
||||||
|
const blockingFindings = [];
|
||||||
|
if (missingSections.length > 0) {
|
||||||
|
advisoryFindings.push(`Missing required PR template sections: ${missingSections.join(", ")}`);
|
||||||
|
}
|
||||||
|
if (missingFields.length > 0) {
|
||||||
|
advisoryFindings.push(`Incomplete required PR template fields: ${missingFields.join(", ")}`);
|
||||||
|
}
|
||||||
|
if (formatWarnings.length > 0) {
|
||||||
|
advisoryFindings.push(`Formatting issues in added lines (${formatWarnings.length})`);
|
||||||
|
}
|
||||||
|
if (dangerousProblems.length > 0) {
|
||||||
|
blockingFindings.push(`Dangerous patch markers found (${dangerousProblems.length})`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const comments = await github.paginate(github.rest.issues.listComments, {
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
issue_number: pr.number,
|
||||||
|
per_page: 100,
|
||||||
|
});
|
||||||
|
const existing = comments.find((comment) => {
|
||||||
|
const body = comment.body || "";
|
||||||
|
return body.includes(marker) || body.includes(legacyMarker);
|
||||||
|
});
|
||||||
|
|
||||||
|
if (advisoryFindings.length === 0 && blockingFindings.length === 0) {
|
||||||
|
if (existing) {
|
||||||
|
await github.rest.issues.deleteComment({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
comment_id: existing.id,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
core.info("PR intake sanity checks passed.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const runUrl = `${context.serverUrl}/${owner}/${repo}/actions/runs/${context.runId}`;
|
||||||
|
const advisoryDetails = [];
|
||||||
|
if (formatWarnings.length > 0) {
|
||||||
|
advisoryDetails.push(...formatWarnings.slice(0, 20).map((entry) => `- ${entry}`));
|
||||||
|
if (formatWarnings.length > 20) {
|
||||||
|
advisoryDetails.push(`- ...and ${formatWarnings.length - 20} more issue(s)`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const blockingDetails = [];
|
||||||
|
if (dangerousProblems.length > 0) {
|
||||||
|
blockingDetails.push(...dangerousProblems.slice(0, 20).map((entry) => `- ${entry}`));
|
||||||
|
if (dangerousProblems.length > 20) {
|
||||||
|
blockingDetails.push(`- ...and ${dangerousProblems.length - 20} more issue(s)`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const isBlocking = blockingFindings.length > 0;
|
||||||
|
|
||||||
|
const ownerApprovalNote = workflowFilesChanged.length > 0
|
||||||
|
? [
|
||||||
|
"",
|
||||||
|
"Workflow files changed in this PR:",
|
||||||
|
...workflowFilesChanged.map((name) => `- \`${name}\``),
|
||||||
|
"",
|
||||||
|
"Reminder: workflow changes require owner approval via `CI Required Gate`.",
|
||||||
|
].join("\n")
|
||||||
|
: "";
|
||||||
|
|
||||||
|
const commentBody = [
|
||||||
|
marker,
|
||||||
|
isBlocking
|
||||||
|
? "### PR intake checks failed (blocking)"
|
||||||
|
: "### PR intake checks found warnings (non-blocking)",
|
||||||
|
"",
|
||||||
|
isBlocking
|
||||||
|
? "Fast safe checks found blocking safety issues:"
|
||||||
|
: "Fast safe checks found advisory issues. CI lint/test/build gates still enforce merge quality.",
|
||||||
|
...(blockingFindings.length > 0 ? blockingFindings.map((entry) => `- ${entry}`) : []),
|
||||||
|
...(advisoryFindings.length > 0 ? advisoryFindings.map((entry) => `- ${entry}`) : []),
|
||||||
|
"",
|
||||||
|
"Action items:",
|
||||||
|
"1. Complete required PR template sections/fields.",
|
||||||
|
"2. Remove tabs, trailing whitespace, and merge conflict markers from added lines.",
|
||||||
|
"3. Re-run local checks before pushing:",
|
||||||
|
" - `./scripts/ci/rust_quality_gate.sh`",
|
||||||
|
" - `./scripts/ci/rust_strict_delta_gate.sh`",
|
||||||
|
" - `./scripts/ci/docs_quality_gate.sh`",
|
||||||
|
"",
|
||||||
|
`Run logs: ${runUrl}`,
|
||||||
|
"",
|
||||||
|
"Detected blocking line issues (sample):",
|
||||||
|
...(blockingDetails.length > 0 ? blockingDetails : ["- none"]),
|
||||||
|
"",
|
||||||
|
"Detected advisory line issues (sample):",
|
||||||
|
...(advisoryDetails.length > 0 ? advisoryDetails : ["- none"]),
|
||||||
|
ownerApprovalNote,
|
||||||
|
].join("\n");
|
||||||
|
|
||||||
|
if (existing) {
|
||||||
|
await github.rest.issues.updateComment({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
comment_id: existing.id,
|
||||||
|
body: commentBody,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
await github.rest.issues.createComment({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
issue_number: pr.number,
|
||||||
|
body: commentBody,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isBlocking) {
|
||||||
|
core.setFailed("PR intake sanity checks found blocking issues. See sticky comment for details.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
core.info("PR intake sanity checks found advisory issues only.");
|
||||||
|
};
|
||||||
803
.github/workflows/scripts/pr_labeler.js
vendored
Normal file
803
.github/workflows/scripts/pr_labeler.js
vendored
Normal file
|
|
@ -0,0 +1,803 @@
|
||||||
|
// Apply managed PR labels (size/risk/path/module/contributor tiers).
|
||||||
|
// Extracted from pr-labeler workflow inline github-script for maintainability.
|
||||||
|
|
||||||
|
module.exports = async ({ github, context, core }) => {
|
||||||
|
const pr = context.payload.pull_request;
|
||||||
|
const owner = context.repo.owner;
|
||||||
|
const repo = context.repo.repo;
|
||||||
|
const action = context.payload.action;
|
||||||
|
const changedLabel = context.payload.label?.name;
|
||||||
|
|
||||||
|
const sizeLabels = ["size: XS", "size: S", "size: M", "size: L", "size: XL"];
|
||||||
|
const computedRiskLabels = ["risk: low", "risk: medium", "risk: high"];
|
||||||
|
const manualRiskOverrideLabel = "risk: manual";
|
||||||
|
const managedEnforcedLabels = new Set([
|
||||||
|
...sizeLabels,
|
||||||
|
manualRiskOverrideLabel,
|
||||||
|
...computedRiskLabels,
|
||||||
|
]);
|
||||||
|
if ((action === "labeled" || action === "unlabeled") && !managedEnforcedLabels.has(changedLabel)) {
|
||||||
|
core.info(`skip non-size/risk label event: ${changedLabel || "unknown"}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function loadContributorTierPolicy() {
|
||||||
|
const fallback = {
|
||||||
|
contributorTierColor: "2ED9FF",
|
||||||
|
contributorTierRules: [
|
||||||
|
{ label: "distinguished contributor", minMergedPRs: 50 },
|
||||||
|
{ label: "principal contributor", minMergedPRs: 20 },
|
||||||
|
{ label: "experienced contributor", minMergedPRs: 10 },
|
||||||
|
{ label: "trusted contributor", minMergedPRs: 5 },
|
||||||
|
],
|
||||||
|
};
|
||||||
|
try {
|
||||||
|
const { data } = await github.rest.repos.getContent({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
path: ".github/label-policy.json",
|
||||||
|
ref: context.payload.repository?.default_branch || "main",
|
||||||
|
});
|
||||||
|
const json = JSON.parse(Buffer.from(data.content, "base64").toString("utf8"));
|
||||||
|
const contributorTierRules = (json.contributor_tiers || []).map((entry) => ({
|
||||||
|
label: String(entry.label || "").trim(),
|
||||||
|
minMergedPRs: Number(entry.min_merged_prs || 0),
|
||||||
|
}));
|
||||||
|
const contributorTierColor = String(json.contributor_tier_color || "").toUpperCase();
|
||||||
|
if (!contributorTierColor || contributorTierRules.length === 0) {
|
||||||
|
return fallback;
|
||||||
|
}
|
||||||
|
return { contributorTierColor, contributorTierRules };
|
||||||
|
} catch (error) {
|
||||||
|
core.warning(`failed to load .github/label-policy.json, using fallback policy: ${error.message}`);
|
||||||
|
return fallback;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const { contributorTierColor, contributorTierRules } = await loadContributorTierPolicy();
|
||||||
|
const contributorTierLabels = contributorTierRules.map((rule) => rule.label);
|
||||||
|
|
||||||
|
const managedPathLabels = [
|
||||||
|
"docs",
|
||||||
|
"dependencies",
|
||||||
|
"ci",
|
||||||
|
"core",
|
||||||
|
"agent",
|
||||||
|
"channel",
|
||||||
|
"config",
|
||||||
|
"cron",
|
||||||
|
"daemon",
|
||||||
|
"doctor",
|
||||||
|
"gateway",
|
||||||
|
"health",
|
||||||
|
"heartbeat",
|
||||||
|
"integration",
|
||||||
|
"memory",
|
||||||
|
"observability",
|
||||||
|
"onboard",
|
||||||
|
"provider",
|
||||||
|
"runtime",
|
||||||
|
"security",
|
||||||
|
"service",
|
||||||
|
"skillforge",
|
||||||
|
"skills",
|
||||||
|
"tool",
|
||||||
|
"tunnel",
|
||||||
|
"tests",
|
||||||
|
"scripts",
|
||||||
|
"dev",
|
||||||
|
];
|
||||||
|
const managedPathLabelSet = new Set(managedPathLabels);
|
||||||
|
|
||||||
|
const moduleNamespaceRules = [
|
||||||
|
{ root: "src/agent/", prefix: "agent", coreEntries: new Set(["mod.rs"]) },
|
||||||
|
{ root: "src/channels/", prefix: "channel", coreEntries: new Set(["mod.rs", "traits.rs"]) },
|
||||||
|
{ root: "src/config/", prefix: "config", coreEntries: new Set(["mod.rs", "schema.rs"]) },
|
||||||
|
{ root: "src/cron/", prefix: "cron", coreEntries: new Set(["mod.rs"]) },
|
||||||
|
{ root: "src/daemon/", prefix: "daemon", coreEntries: new Set(["mod.rs"]) },
|
||||||
|
{ root: "src/doctor/", prefix: "doctor", coreEntries: new Set(["mod.rs"]) },
|
||||||
|
{ root: "src/gateway/", prefix: "gateway", coreEntries: new Set(["mod.rs"]) },
|
||||||
|
{ root: "src/health/", prefix: "health", coreEntries: new Set(["mod.rs"]) },
|
||||||
|
{ root: "src/heartbeat/", prefix: "heartbeat", coreEntries: new Set(["mod.rs"]) },
|
||||||
|
{ root: "src/integrations/", prefix: "integration", coreEntries: new Set(["mod.rs", "registry.rs"]) },
|
||||||
|
{ root: "src/memory/", prefix: "memory", coreEntries: new Set(["mod.rs", "traits.rs"]) },
|
||||||
|
{ root: "src/observability/", prefix: "observability", coreEntries: new Set(["mod.rs", "traits.rs"]) },
|
||||||
|
{ root: "src/onboard/", prefix: "onboard", coreEntries: new Set(["mod.rs"]) },
|
||||||
|
{ root: "src/providers/", prefix: "provider", coreEntries: new Set(["mod.rs", "traits.rs"]) },
|
||||||
|
{ root: "src/runtime/", prefix: "runtime", coreEntries: new Set(["mod.rs", "traits.rs"]) },
|
||||||
|
{ root: "src/security/", prefix: "security", coreEntries: new Set(["mod.rs"]) },
|
||||||
|
{ root: "src/service/", prefix: "service", coreEntries: new Set(["mod.rs"]) },
|
||||||
|
{ root: "src/skillforge/", prefix: "skillforge", coreEntries: new Set(["mod.rs"]) },
|
||||||
|
{ root: "src/skills/", prefix: "skills", coreEntries: new Set(["mod.rs"]) },
|
||||||
|
{ root: "src/tools/", prefix: "tool", coreEntries: new Set(["mod.rs", "traits.rs"]) },
|
||||||
|
{ root: "src/tunnel/", prefix: "tunnel", coreEntries: new Set(["mod.rs"]) },
|
||||||
|
];
|
||||||
|
const managedModulePrefixes = [...new Set(moduleNamespaceRules.map((rule) => `${rule.prefix}:`))];
|
||||||
|
const orderedOtherLabelStyles = [
|
||||||
|
{ label: "health", color: "8EC9B8" },
|
||||||
|
{ label: "tool", color: "7FC4B6" },
|
||||||
|
{ label: "agent", color: "86C4A2" },
|
||||||
|
{ label: "memory", color: "8FCB99" },
|
||||||
|
{ label: "channel", color: "7EB6F2" },
|
||||||
|
{ label: "service", color: "95C7B6" },
|
||||||
|
{ label: "integration", color: "8DC9AE" },
|
||||||
|
{ label: "tunnel", color: "9FC8B3" },
|
||||||
|
{ label: "config", color: "AABCD0" },
|
||||||
|
{ label: "observability", color: "84C9D0" },
|
||||||
|
{ label: "docs", color: "8FBBE0" },
|
||||||
|
{ label: "dev", color: "B9C1CC" },
|
||||||
|
{ label: "tests", color: "9DC8C7" },
|
||||||
|
{ label: "skills", color: "BFC89B" },
|
||||||
|
{ label: "skillforge", color: "C9C39B" },
|
||||||
|
{ label: "provider", color: "958DF0" },
|
||||||
|
{ label: "runtime", color: "A3ADD8" },
|
||||||
|
{ label: "heartbeat", color: "C0C88D" },
|
||||||
|
{ label: "daemon", color: "C8C498" },
|
||||||
|
{ label: "doctor", color: "C1CF9D" },
|
||||||
|
{ label: "onboard", color: "D2BF86" },
|
||||||
|
{ label: "cron", color: "D2B490" },
|
||||||
|
{ label: "ci", color: "AEB4CE" },
|
||||||
|
{ label: "dependencies", color: "9FB1DE" },
|
||||||
|
{ label: "gateway", color: "B5A8E5" },
|
||||||
|
{ label: "security", color: "E58D85" },
|
||||||
|
{ label: "core", color: "C8A99B" },
|
||||||
|
{ label: "scripts", color: "C9B49F" },
|
||||||
|
];
|
||||||
|
const otherLabelDisplayOrder = orderedOtherLabelStyles.map((entry) => entry.label);
|
||||||
|
const modulePrefixSet = new Set(moduleNamespaceRules.map((rule) => rule.prefix));
|
||||||
|
const modulePrefixPriority = otherLabelDisplayOrder.filter((label) => modulePrefixSet.has(label));
|
||||||
|
const pathLabelPriority = [...otherLabelDisplayOrder];
|
||||||
|
const riskDisplayOrder = ["risk: high", "risk: medium", "risk: low", "risk: manual"];
|
||||||
|
const sizeDisplayOrder = ["size: XS", "size: S", "size: M", "size: L", "size: XL"];
|
||||||
|
const contributorDisplayOrder = [
|
||||||
|
"distinguished contributor",
|
||||||
|
"principal contributor",
|
||||||
|
"experienced contributor",
|
||||||
|
"trusted contributor",
|
||||||
|
];
|
||||||
|
const modulePrefixPriorityIndex = new Map(
|
||||||
|
modulePrefixPriority.map((prefix, index) => [prefix, index])
|
||||||
|
);
|
||||||
|
const pathLabelPriorityIndex = new Map(
|
||||||
|
pathLabelPriority.map((label, index) => [label, index])
|
||||||
|
);
|
||||||
|
const riskPriorityIndex = new Map(
|
||||||
|
riskDisplayOrder.map((label, index) => [label, index])
|
||||||
|
);
|
||||||
|
const sizePriorityIndex = new Map(
|
||||||
|
sizeDisplayOrder.map((label, index) => [label, index])
|
||||||
|
);
|
||||||
|
const contributorPriorityIndex = new Map(
|
||||||
|
contributorDisplayOrder.map((label, index) => [label, index])
|
||||||
|
);
|
||||||
|
|
||||||
|
const otherLabelColors = Object.fromEntries(
|
||||||
|
orderedOtherLabelStyles.map((entry) => [entry.label, entry.color])
|
||||||
|
);
|
||||||
|
const staticLabelColors = {
|
||||||
|
"size: XS": "E7CDD3",
|
||||||
|
"size: S": "E1BEC7",
|
||||||
|
"size: M": "DBB0BB",
|
||||||
|
"size: L": "D4A2AF",
|
||||||
|
"size: XL": "CE94A4",
|
||||||
|
"risk: low": "97D3A6",
|
||||||
|
"risk: medium": "E4C47B",
|
||||||
|
"risk: high": "E98E88",
|
||||||
|
"risk: manual": "B7A4E0",
|
||||||
|
...otherLabelColors,
|
||||||
|
};
|
||||||
|
const staticLabelDescriptions = {
|
||||||
|
"size: XS": "Auto size: <=80 non-doc changed lines.",
|
||||||
|
"size: S": "Auto size: 81-250 non-doc changed lines.",
|
||||||
|
"size: M": "Auto size: 251-500 non-doc changed lines.",
|
||||||
|
"size: L": "Auto size: 501-1000 non-doc changed lines.",
|
||||||
|
"size: XL": "Auto size: >1000 non-doc changed lines.",
|
||||||
|
"risk: low": "Auto risk: docs/chore-only paths.",
|
||||||
|
"risk: medium": "Auto risk: src/** or dependency/config changes.",
|
||||||
|
"risk: high": "Auto risk: security/runtime/gateway/tools/workflows.",
|
||||||
|
"risk: manual": "Maintainer override: keep selected risk label.",
|
||||||
|
docs: "Auto scope: docs/markdown/template files changed.",
|
||||||
|
dependencies: "Auto scope: dependency manifest/lock/policy changed.",
|
||||||
|
ci: "Auto scope: CI/workflow/hook files changed.",
|
||||||
|
core: "Auto scope: root src/*.rs files changed.",
|
||||||
|
agent: "Auto scope: src/agent/** changed.",
|
||||||
|
channel: "Auto scope: src/channels/** changed.",
|
||||||
|
config: "Auto scope: src/config/** changed.",
|
||||||
|
cron: "Auto scope: src/cron/** changed.",
|
||||||
|
daemon: "Auto scope: src/daemon/** changed.",
|
||||||
|
doctor: "Auto scope: src/doctor/** changed.",
|
||||||
|
gateway: "Auto scope: src/gateway/** changed.",
|
||||||
|
health: "Auto scope: src/health/** changed.",
|
||||||
|
heartbeat: "Auto scope: src/heartbeat/** changed.",
|
||||||
|
integration: "Auto scope: src/integrations/** changed.",
|
||||||
|
memory: "Auto scope: src/memory/** changed.",
|
||||||
|
observability: "Auto scope: src/observability/** changed.",
|
||||||
|
onboard: "Auto scope: src/onboard/** changed.",
|
||||||
|
provider: "Auto scope: src/providers/** changed.",
|
||||||
|
runtime: "Auto scope: src/runtime/** changed.",
|
||||||
|
security: "Auto scope: src/security/** changed.",
|
||||||
|
service: "Auto scope: src/service/** changed.",
|
||||||
|
skillforge: "Auto scope: src/skillforge/** changed.",
|
||||||
|
skills: "Auto scope: src/skills/** changed.",
|
||||||
|
tool: "Auto scope: src/tools/** changed.",
|
||||||
|
tunnel: "Auto scope: src/tunnel/** changed.",
|
||||||
|
tests: "Auto scope: tests/** changed.",
|
||||||
|
scripts: "Auto scope: scripts/** changed.",
|
||||||
|
dev: "Auto scope: dev/** changed.",
|
||||||
|
};
|
||||||
|
for (const label of contributorTierLabels) {
|
||||||
|
staticLabelColors[label] = contributorTierColor;
|
||||||
|
const rule = contributorTierRules.find((entry) => entry.label === label);
|
||||||
|
if (rule) {
|
||||||
|
staticLabelDescriptions[label] = `Contributor with ${rule.minMergedPRs}+ merged PRs.`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const modulePrefixColors = Object.fromEntries(
|
||||||
|
modulePrefixPriority.map((prefix) => [
|
||||||
|
`${prefix}:`,
|
||||||
|
otherLabelColors[prefix] || "BFDADC",
|
||||||
|
])
|
||||||
|
);
|
||||||
|
|
||||||
|
const providerKeywordHints = [
|
||||||
|
"deepseek",
|
||||||
|
"moonshot",
|
||||||
|
"kimi",
|
||||||
|
"qwen",
|
||||||
|
"mistral",
|
||||||
|
"doubao",
|
||||||
|
"baichuan",
|
||||||
|
"yi",
|
||||||
|
"siliconflow",
|
||||||
|
"vertex",
|
||||||
|
"azure",
|
||||||
|
"perplexity",
|
||||||
|
"venice",
|
||||||
|
"vercel",
|
||||||
|
"cloudflare",
|
||||||
|
"synthetic",
|
||||||
|
"opencode",
|
||||||
|
"zai",
|
||||||
|
"glm",
|
||||||
|
"minimax",
|
||||||
|
"bedrock",
|
||||||
|
"qianfan",
|
||||||
|
"groq",
|
||||||
|
"together",
|
||||||
|
"fireworks",
|
||||||
|
"cohere",
|
||||||
|
"openai",
|
||||||
|
"openrouter",
|
||||||
|
"anthropic",
|
||||||
|
"gemini",
|
||||||
|
"ollama",
|
||||||
|
];
|
||||||
|
|
||||||
|
const channelKeywordHints = [
|
||||||
|
"telegram",
|
||||||
|
"discord",
|
||||||
|
"slack",
|
||||||
|
"whatsapp",
|
||||||
|
"matrix",
|
||||||
|
"irc",
|
||||||
|
"imessage",
|
||||||
|
"email",
|
||||||
|
"cli",
|
||||||
|
];
|
||||||
|
|
||||||
|
function isDocsLike(path) {
|
||||||
|
return (
|
||||||
|
path.startsWith("docs/") ||
|
||||||
|
path.endsWith(".md") ||
|
||||||
|
path.endsWith(".mdx") ||
|
||||||
|
path === "LICENSE" ||
|
||||||
|
path === ".markdownlint-cli2.yaml" ||
|
||||||
|
path === ".github/pull_request_template.md" ||
|
||||||
|
path.startsWith(".github/ISSUE_TEMPLATE/")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeLabelSegment(segment) {
|
||||||
|
return (segment || "")
|
||||||
|
.toLowerCase()
|
||||||
|
.replace(/\.rs$/g, "")
|
||||||
|
.replace(/[^a-z0-9_-]+/g, "-")
|
||||||
|
.replace(/^[-_]+|[-_]+$/g, "")
|
||||||
|
.slice(0, 40);
|
||||||
|
}
|
||||||
|
|
||||||
|
function containsKeyword(text, keyword) {
|
||||||
|
const escaped = keyword.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||||
|
const pattern = new RegExp(`(^|[^a-z0-9_])${escaped}([^a-z0-9_]|$)`, "i");
|
||||||
|
return pattern.test(text);
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatModuleLabel(prefix, segment) {
|
||||||
|
return `${prefix}: ${segment}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseModuleLabel(label) {
|
||||||
|
if (typeof label !== "string") return null;
|
||||||
|
const match = label.match(/^([^:]+):\s*(.+)$/);
|
||||||
|
if (!match) return null;
|
||||||
|
const prefix = match[1].trim().toLowerCase();
|
||||||
|
const segment = (match[2] || "").trim().toLowerCase();
|
||||||
|
if (!prefix || !segment) return null;
|
||||||
|
return { prefix, segment };
|
||||||
|
}
|
||||||
|
|
||||||
|
function sortByPriority(labels, priorityIndex) {
|
||||||
|
return [...new Set(labels)].sort((left, right) => {
|
||||||
|
const leftPriority = priorityIndex.has(left) ? priorityIndex.get(left) : Number.MAX_SAFE_INTEGER;
|
||||||
|
const rightPriority = priorityIndex.has(right)
|
||||||
|
? priorityIndex.get(right)
|
||||||
|
: Number.MAX_SAFE_INTEGER;
|
||||||
|
if (leftPriority !== rightPriority) return leftPriority - rightPriority;
|
||||||
|
return left.localeCompare(right);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function sortModuleLabels(labels) {
|
||||||
|
return [...new Set(labels)].sort((left, right) => {
|
||||||
|
const leftParsed = parseModuleLabel(left);
|
||||||
|
const rightParsed = parseModuleLabel(right);
|
||||||
|
if (!leftParsed || !rightParsed) return left.localeCompare(right);
|
||||||
|
|
||||||
|
const leftPrefixPriority = modulePrefixPriorityIndex.has(leftParsed.prefix)
|
||||||
|
? modulePrefixPriorityIndex.get(leftParsed.prefix)
|
||||||
|
: Number.MAX_SAFE_INTEGER;
|
||||||
|
const rightPrefixPriority = modulePrefixPriorityIndex.has(rightParsed.prefix)
|
||||||
|
? modulePrefixPriorityIndex.get(rightParsed.prefix)
|
||||||
|
: Number.MAX_SAFE_INTEGER;
|
||||||
|
|
||||||
|
if (leftPrefixPriority !== rightPrefixPriority) {
|
||||||
|
return leftPrefixPriority - rightPrefixPriority;
|
||||||
|
}
|
||||||
|
if (leftParsed.prefix !== rightParsed.prefix) {
|
||||||
|
return leftParsed.prefix.localeCompare(rightParsed.prefix);
|
||||||
|
}
|
||||||
|
|
||||||
|
const leftIsCore = leftParsed.segment === "core";
|
||||||
|
const rightIsCore = rightParsed.segment === "core";
|
||||||
|
if (leftIsCore !== rightIsCore) return leftIsCore ? 1 : -1;
|
||||||
|
|
||||||
|
return leftParsed.segment.localeCompare(rightParsed.segment);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function refineModuleLabels(rawLabels) {
|
||||||
|
const refined = new Set(rawLabels);
|
||||||
|
const segmentsByPrefix = new Map();
|
||||||
|
|
||||||
|
for (const label of rawLabels) {
|
||||||
|
const parsed = parseModuleLabel(label);
|
||||||
|
if (!parsed) continue;
|
||||||
|
if (!segmentsByPrefix.has(parsed.prefix)) {
|
||||||
|
segmentsByPrefix.set(parsed.prefix, new Set());
|
||||||
|
}
|
||||||
|
segmentsByPrefix.get(parsed.prefix).add(parsed.segment);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const [prefix, segments] of segmentsByPrefix) {
|
||||||
|
const hasSpecificSegment = [...segments].some((segment) => segment !== "core");
|
||||||
|
if (hasSpecificSegment) {
|
||||||
|
refined.delete(formatModuleLabel(prefix, "core"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return refined;
|
||||||
|
}
|
||||||
|
|
||||||
|
function compactModuleLabels(labels) {
|
||||||
|
const groupedSegments = new Map();
|
||||||
|
const compactedModuleLabels = new Set();
|
||||||
|
const forcePathPrefixes = new Set();
|
||||||
|
|
||||||
|
for (const label of labels) {
|
||||||
|
const parsed = parseModuleLabel(label);
|
||||||
|
if (!parsed) {
|
||||||
|
compactedModuleLabels.add(label);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (!groupedSegments.has(parsed.prefix)) {
|
||||||
|
groupedSegments.set(parsed.prefix, new Set());
|
||||||
|
}
|
||||||
|
groupedSegments.get(parsed.prefix).add(parsed.segment);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const [prefix, segments] of groupedSegments) {
|
||||||
|
const uniqueSegments = [...new Set([...segments].filter(Boolean))];
|
||||||
|
if (uniqueSegments.length === 0) continue;
|
||||||
|
|
||||||
|
if (uniqueSegments.length === 1) {
|
||||||
|
compactedModuleLabels.add(formatModuleLabel(prefix, uniqueSegments[0]));
|
||||||
|
} else {
|
||||||
|
forcePathPrefixes.add(prefix);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
moduleLabels: compactedModuleLabels,
|
||||||
|
forcePathPrefixes,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function colorForLabel(label) {
|
||||||
|
if (staticLabelColors[label]) return staticLabelColors[label];
|
||||||
|
const matchedPrefix = Object.keys(modulePrefixColors).find((prefix) => label.startsWith(prefix));
|
||||||
|
if (matchedPrefix) return modulePrefixColors[matchedPrefix];
|
||||||
|
return "BFDADC";
|
||||||
|
}
|
||||||
|
|
||||||
|
function descriptionForLabel(label) {
|
||||||
|
if (staticLabelDescriptions[label]) return staticLabelDescriptions[label];
|
||||||
|
|
||||||
|
const parsed = parseModuleLabel(label);
|
||||||
|
if (parsed) {
|
||||||
|
if (parsed.segment === "core") {
|
||||||
|
return `Auto module: ${parsed.prefix} core files changed.`;
|
||||||
|
}
|
||||||
|
return `Auto module: ${parsed.prefix}/${parsed.segment} changed.`;
|
||||||
|
}
|
||||||
|
|
||||||
|
return "Auto-managed label.";
|
||||||
|
}
|
||||||
|
|
||||||
|
async function ensureLabel(name, existing = null) {
|
||||||
|
const expectedColor = colorForLabel(name);
|
||||||
|
const expectedDescription = descriptionForLabel(name);
|
||||||
|
try {
|
||||||
|
const current = existing || (await github.rest.issues.getLabel({ owner, repo, name })).data;
|
||||||
|
const currentColor = (current.color || "").toUpperCase();
|
||||||
|
const currentDescription = (current.description || "").trim();
|
||||||
|
if (currentColor !== expectedColor || currentDescription !== expectedDescription) {
|
||||||
|
await github.rest.issues.updateLabel({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
name,
|
||||||
|
new_name: name,
|
||||||
|
color: expectedColor,
|
||||||
|
description: expectedDescription,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
if (error.status !== 404) throw error;
|
||||||
|
await github.rest.issues.createLabel({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
name,
|
||||||
|
color: expectedColor,
|
||||||
|
description: expectedDescription,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function isManagedLabel(label) {
|
||||||
|
if (label === manualRiskOverrideLabel) return true;
|
||||||
|
if (sizeLabels.includes(label) || computedRiskLabels.includes(label)) return true;
|
||||||
|
if (managedPathLabelSet.has(label)) return true;
|
||||||
|
if (contributorTierLabels.includes(label)) return true;
|
||||||
|
if (managedModulePrefixes.some((prefix) => label.startsWith(prefix))) return true;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function ensureManagedRepoLabelsMetadata() {
|
||||||
|
const repoLabels = await github.paginate(github.rest.issues.listLabelsForRepo, {
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
per_page: 100,
|
||||||
|
});
|
||||||
|
|
||||||
|
for (const existingLabel of repoLabels) {
|
||||||
|
const labelName = existingLabel.name || "";
|
||||||
|
if (!isManagedLabel(labelName)) continue;
|
||||||
|
await ensureLabel(labelName, existingLabel);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function selectContributorTier(mergedCount) {
|
||||||
|
const matchedTier = contributorTierRules.find((rule) => mergedCount >= rule.minMergedPRs);
|
||||||
|
return matchedTier ? matchedTier.label : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (context.eventName === "workflow_dispatch") {
|
||||||
|
const mode = (context.payload.inputs?.mode || "audit").toLowerCase();
|
||||||
|
const shouldRepair = mode === "repair";
|
||||||
|
const repoLabels = await github.paginate(github.rest.issues.listLabelsForRepo, {
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
per_page: 100,
|
||||||
|
});
|
||||||
|
|
||||||
|
let managedScanned = 0;
|
||||||
|
const drifts = [];
|
||||||
|
|
||||||
|
for (const existingLabel of repoLabels) {
|
||||||
|
const labelName = existingLabel.name || "";
|
||||||
|
if (!isManagedLabel(labelName)) continue;
|
||||||
|
managedScanned += 1;
|
||||||
|
|
||||||
|
const expectedColor = colorForLabel(labelName);
|
||||||
|
const expectedDescription = descriptionForLabel(labelName);
|
||||||
|
const currentColor = (existingLabel.color || "").toUpperCase();
|
||||||
|
const currentDescription = (existingLabel.description || "").trim();
|
||||||
|
if (currentColor !== expectedColor || currentDescription !== expectedDescription) {
|
||||||
|
drifts.push({
|
||||||
|
name: labelName,
|
||||||
|
currentColor,
|
||||||
|
expectedColor,
|
||||||
|
currentDescription,
|
||||||
|
expectedDescription,
|
||||||
|
});
|
||||||
|
if (shouldRepair) {
|
||||||
|
await ensureLabel(labelName, existingLabel);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
core.summary
|
||||||
|
.addHeading("Managed Label Governance", 2)
|
||||||
|
.addRaw(`Mode: ${shouldRepair ? "repair" : "audit"}`)
|
||||||
|
.addEOL()
|
||||||
|
.addRaw(`Managed labels scanned: ${managedScanned}`)
|
||||||
|
.addEOL()
|
||||||
|
.addRaw(`Drifts found: ${drifts.length}`)
|
||||||
|
.addEOL();
|
||||||
|
|
||||||
|
if (drifts.length > 0) {
|
||||||
|
const sample = drifts.slice(0, 30).map((entry) => [
|
||||||
|
entry.name,
|
||||||
|
`${entry.currentColor} -> ${entry.expectedColor}`,
|
||||||
|
`${entry.currentDescription || "(blank)"} -> ${entry.expectedDescription}`,
|
||||||
|
]);
|
||||||
|
core.summary.addTable([
|
||||||
|
[{ data: "Label", header: true }, { data: "Color", header: true }, { data: "Description", header: true }],
|
||||||
|
...sample,
|
||||||
|
]);
|
||||||
|
if (drifts.length > sample.length) {
|
||||||
|
core.summary
|
||||||
|
.addRaw(`Additional drifts not shown: ${drifts.length - sample.length}`)
|
||||||
|
.addEOL();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await core.summary.write();
|
||||||
|
|
||||||
|
if (!shouldRepair && drifts.length > 0) {
|
||||||
|
core.info(`Managed-label metadata drifts detected: ${drifts.length}. Re-run with mode=repair to auto-fix.`);
|
||||||
|
} else if (shouldRepair) {
|
||||||
|
core.info(`Managed-label metadata repair applied to ${drifts.length} labels.`);
|
||||||
|
} else {
|
||||||
|
core.info("No managed-label metadata drift detected.");
|
||||||
|
}
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const files = await github.paginate(github.rest.pulls.listFiles, {
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
pull_number: pr.number,
|
||||||
|
per_page: 100,
|
||||||
|
});
|
||||||
|
|
||||||
|
const detectedModuleLabels = new Set();
|
||||||
|
for (const file of files) {
|
||||||
|
const path = (file.filename || "").toLowerCase();
|
||||||
|
for (const rule of moduleNamespaceRules) {
|
||||||
|
if (!path.startsWith(rule.root)) continue;
|
||||||
|
|
||||||
|
const relative = path.slice(rule.root.length);
|
||||||
|
if (!relative) continue;
|
||||||
|
|
||||||
|
const first = relative.split("/")[0];
|
||||||
|
const firstStem = first.endsWith(".rs") ? first.slice(0, -3) : first;
|
||||||
|
let segment = firstStem;
|
||||||
|
|
||||||
|
if (rule.coreEntries.has(first) || rule.coreEntries.has(firstStem)) {
|
||||||
|
segment = "core";
|
||||||
|
}
|
||||||
|
|
||||||
|
segment = normalizeLabelSegment(segment);
|
||||||
|
if (!segment) continue;
|
||||||
|
|
||||||
|
detectedModuleLabels.add(formatModuleLabel(rule.prefix, segment));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const providerRelevantFiles = files.filter((file) => {
|
||||||
|
const path = file.filename || "";
|
||||||
|
return (
|
||||||
|
path.startsWith("src/providers/") ||
|
||||||
|
path.startsWith("src/integrations/") ||
|
||||||
|
path.startsWith("src/onboard/") ||
|
||||||
|
path.startsWith("src/config/")
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
if (providerRelevantFiles.length > 0) {
|
||||||
|
const searchableText = [
|
||||||
|
pr.title || "",
|
||||||
|
pr.body || "",
|
||||||
|
...providerRelevantFiles.map((file) => file.filename || ""),
|
||||||
|
...providerRelevantFiles.map((file) => file.patch || ""),
|
||||||
|
]
|
||||||
|
.join("\n")
|
||||||
|
.toLowerCase();
|
||||||
|
|
||||||
|
for (const keyword of providerKeywordHints) {
|
||||||
|
if (containsKeyword(searchableText, keyword)) {
|
||||||
|
detectedModuleLabels.add(formatModuleLabel("provider", keyword));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const channelRelevantFiles = files.filter((file) => {
|
||||||
|
const path = file.filename || "";
|
||||||
|
return (
|
||||||
|
path.startsWith("src/channels/") ||
|
||||||
|
path.startsWith("src/onboard/") ||
|
||||||
|
path.startsWith("src/config/")
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
if (channelRelevantFiles.length > 0) {
|
||||||
|
const searchableText = [
|
||||||
|
pr.title || "",
|
||||||
|
pr.body || "",
|
||||||
|
...channelRelevantFiles.map((file) => file.filename || ""),
|
||||||
|
...channelRelevantFiles.map((file) => file.patch || ""),
|
||||||
|
]
|
||||||
|
.join("\n")
|
||||||
|
.toLowerCase();
|
||||||
|
|
||||||
|
for (const keyword of channelKeywordHints) {
|
||||||
|
if (containsKeyword(searchableText, keyword)) {
|
||||||
|
detectedModuleLabels.add(formatModuleLabel("channel", keyword));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const refinedModuleLabels = refineModuleLabels(detectedModuleLabels);
|
||||||
|
const compactedModuleState = compactModuleLabels(refinedModuleLabels);
|
||||||
|
const selectedModuleLabels = compactedModuleState.moduleLabels;
|
||||||
|
const forcePathPrefixes = compactedModuleState.forcePathPrefixes;
|
||||||
|
const modulePrefixesWithLabels = new Set(
|
||||||
|
[...selectedModuleLabels]
|
||||||
|
.map((label) => parseModuleLabel(label)?.prefix)
|
||||||
|
.filter(Boolean)
|
||||||
|
);
|
||||||
|
|
||||||
|
const { data: currentLabels } = await github.rest.issues.listLabelsOnIssue({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
issue_number: pr.number,
|
||||||
|
});
|
||||||
|
const currentLabelNames = currentLabels.map((label) => label.name);
|
||||||
|
const currentPathLabels = currentLabelNames.filter((label) => managedPathLabelSet.has(label));
|
||||||
|
const candidatePathLabels = new Set([...currentPathLabels, ...forcePathPrefixes]);
|
||||||
|
|
||||||
|
const dedupedPathLabels = [...candidatePathLabels].filter((label) => {
|
||||||
|
if (label === "core") return true;
|
||||||
|
if (forcePathPrefixes.has(label)) return true;
|
||||||
|
return !modulePrefixesWithLabels.has(label);
|
||||||
|
});
|
||||||
|
|
||||||
|
const excludedLockfiles = new Set(["Cargo.lock"]);
|
||||||
|
const changedLines = files.reduce((total, file) => {
|
||||||
|
const path = file.filename || "";
|
||||||
|
if (isDocsLike(path) || excludedLockfiles.has(path)) {
|
||||||
|
return total;
|
||||||
|
}
|
||||||
|
return total + (file.additions || 0) + (file.deletions || 0);
|
||||||
|
}, 0);
|
||||||
|
|
||||||
|
let sizeLabel = "size: XL";
|
||||||
|
if (changedLines <= 80) sizeLabel = "size: XS";
|
||||||
|
else if (changedLines <= 250) sizeLabel = "size: S";
|
||||||
|
else if (changedLines <= 500) sizeLabel = "size: M";
|
||||||
|
else if (changedLines <= 1000) sizeLabel = "size: L";
|
||||||
|
|
||||||
|
const hasHighRiskPath = files.some((file) => {
|
||||||
|
const path = file.filename || "";
|
||||||
|
return (
|
||||||
|
path.startsWith("src/security/") ||
|
||||||
|
path.startsWith("src/runtime/") ||
|
||||||
|
path.startsWith("src/gateway/") ||
|
||||||
|
path.startsWith("src/tools/") ||
|
||||||
|
path.startsWith(".github/workflows/")
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
const hasMediumRiskPath = files.some((file) => {
|
||||||
|
const path = file.filename || "";
|
||||||
|
return (
|
||||||
|
path.startsWith("src/") ||
|
||||||
|
path === "Cargo.toml" ||
|
||||||
|
path === "Cargo.lock" ||
|
||||||
|
path === "deny.toml" ||
|
||||||
|
path.startsWith(".githooks/")
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
let riskLabel = "risk: low";
|
||||||
|
if (hasHighRiskPath) {
|
||||||
|
riskLabel = "risk: high";
|
||||||
|
} else if (hasMediumRiskPath) {
|
||||||
|
riskLabel = "risk: medium";
|
||||||
|
}
|
||||||
|
|
||||||
|
await ensureManagedRepoLabelsMetadata();
|
||||||
|
|
||||||
|
const labelsToEnsure = new Set([
|
||||||
|
...sizeLabels,
|
||||||
|
...computedRiskLabels,
|
||||||
|
manualRiskOverrideLabel,
|
||||||
|
...managedPathLabels,
|
||||||
|
...contributorTierLabels,
|
||||||
|
...selectedModuleLabels,
|
||||||
|
]);
|
||||||
|
|
||||||
|
for (const label of labelsToEnsure) {
|
||||||
|
await ensureLabel(label);
|
||||||
|
}
|
||||||
|
|
||||||
|
let contributorTierLabel = null;
|
||||||
|
const authorLogin = pr.user?.login;
|
||||||
|
if (authorLogin && pr.user?.type !== "Bot") {
|
||||||
|
try {
|
||||||
|
const { data: mergedSearch } = await github.rest.search.issuesAndPullRequests({
|
||||||
|
q: `repo:${owner}/${repo} is:pr is:merged author:${authorLogin}`,
|
||||||
|
per_page: 1,
|
||||||
|
});
|
||||||
|
const mergedCount = mergedSearch.total_count || 0;
|
||||||
|
contributorTierLabel = selectContributorTier(mergedCount);
|
||||||
|
} catch (error) {
|
||||||
|
core.warning(`failed to compute contributor tier label: ${error.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const hasManualRiskOverride = currentLabelNames.includes(manualRiskOverrideLabel);
|
||||||
|
const keepNonManagedLabels = currentLabelNames.filter((label) => {
|
||||||
|
if (label === manualRiskOverrideLabel) return true;
|
||||||
|
if (contributorTierLabels.includes(label)) return false;
|
||||||
|
if (sizeLabels.includes(label) || computedRiskLabels.includes(label)) return false;
|
||||||
|
if (managedPathLabelSet.has(label)) return false;
|
||||||
|
if (managedModulePrefixes.some((prefix) => label.startsWith(prefix))) return false;
|
||||||
|
return true;
|
||||||
|
});
|
||||||
|
|
||||||
|
const manualRiskSelection =
|
||||||
|
currentLabelNames.find((label) => computedRiskLabels.includes(label)) || riskLabel;
|
||||||
|
|
||||||
|
const moduleLabelList = sortModuleLabels([...selectedModuleLabels]);
|
||||||
|
const contributorLabelList = contributorTierLabel ? [contributorTierLabel] : [];
|
||||||
|
const selectedRiskLabels = hasManualRiskOverride
|
||||||
|
? sortByPriority([manualRiskSelection, manualRiskOverrideLabel], riskPriorityIndex)
|
||||||
|
: sortByPriority([riskLabel], riskPriorityIndex);
|
||||||
|
const selectedSizeLabels = sortByPriority([sizeLabel], sizePriorityIndex);
|
||||||
|
const sortedContributorLabels = sortByPriority(contributorLabelList, contributorPriorityIndex);
|
||||||
|
const sortedPathLabels = sortByPriority(dedupedPathLabels, pathLabelPriorityIndex);
|
||||||
|
const sortedKeepNonManagedLabels = [...new Set(keepNonManagedLabels)].sort((left, right) =>
|
||||||
|
left.localeCompare(right)
|
||||||
|
);
|
||||||
|
|
||||||
|
const nextLabels = [
|
||||||
|
...new Set([
|
||||||
|
...selectedRiskLabels,
|
||||||
|
...selectedSizeLabels,
|
||||||
|
...sortedContributorLabels,
|
||||||
|
...moduleLabelList,
|
||||||
|
...sortedPathLabels,
|
||||||
|
...sortedKeepNonManagedLabels,
|
||||||
|
]),
|
||||||
|
];
|
||||||
|
|
||||||
|
await github.rest.issues.setLabels({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
issue_number: pr.number,
|
||||||
|
labels: nextLabels,
|
||||||
|
});
|
||||||
|
};
|
||||||
57
.github/workflows/scripts/test_benchmarks_pr_comment.js
vendored
Normal file
57
.github/workflows/scripts/test_benchmarks_pr_comment.js
vendored
Normal file
|
|
@ -0,0 +1,57 @@
|
||||||
|
// Extracted from test-benchmarks.yml step: Post benchmark summary on PR
|
||||||
|
|
||||||
|
module.exports = async ({ github, context, core }) => {
|
||||||
|
const fs = require('fs');
|
||||||
|
const output = fs.readFileSync('benchmark_output.txt', 'utf8');
|
||||||
|
|
||||||
|
// Extract Criterion result lines
|
||||||
|
const lines = output.split('\n').filter(l =>
|
||||||
|
l.includes('time:') || l.includes('change:') || l.includes('Performance')
|
||||||
|
);
|
||||||
|
|
||||||
|
if (lines.length === 0) {
|
||||||
|
core.info('No benchmark results to post.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = [
|
||||||
|
'## 📊 Benchmark Results',
|
||||||
|
'',
|
||||||
|
'```',
|
||||||
|
lines.join('\n'),
|
||||||
|
'```',
|
||||||
|
'',
|
||||||
|
'<details><summary>Full output</summary>',
|
||||||
|
'',
|
||||||
|
'```',
|
||||||
|
output.substring(0, 60000),
|
||||||
|
'```',
|
||||||
|
'</details>',
|
||||||
|
].join('\n');
|
||||||
|
|
||||||
|
// Find and update or create comment
|
||||||
|
const { data: comments } = await github.rest.issues.listComments({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
issue_number: context.payload.pull_request.number,
|
||||||
|
});
|
||||||
|
|
||||||
|
const marker = '## 📊 Benchmark Results';
|
||||||
|
const existing = comments.find(c => c.body && c.body.startsWith(marker));
|
||||||
|
|
||||||
|
if (existing) {
|
||||||
|
await github.rest.issues.updateComment({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
comment_id: existing.id,
|
||||||
|
body,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
await github.rest.issues.createComment({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
issue_number: context.payload.pull_request.number,
|
||||||
|
body,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
44
.github/workflows/stale.yml
vendored
44
.github/workflows/stale.yml
vendored
|
|
@ -1,44 +0,0 @@
|
||||||
name: Stale
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: "20 2 * * *"
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
permissions: {}
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
stale:
|
|
||||||
permissions:
|
|
||||||
issues: write
|
|
||||||
pull-requests: write
|
|
||||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
|
||||||
steps:
|
|
||||||
- name: Mark stale issues and pull requests
|
|
||||||
uses: actions/stale@b5d41d4e1d5dceea10e7104786b73624c18a190f # v10.2.0
|
|
||||||
with:
|
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
days-before-issue-stale: 21
|
|
||||||
days-before-issue-close: 7
|
|
||||||
days-before-pr-stale: 14
|
|
||||||
days-before-pr-close: 7
|
|
||||||
stale-issue-label: stale
|
|
||||||
stale-pr-label: stale
|
|
||||||
exempt-issue-labels: security,pinned,no-stale,no-pr-hygiene,maintainer
|
|
||||||
exempt-pr-labels: no-stale,no-pr-hygiene,maintainer
|
|
||||||
remove-stale-when-updated: true
|
|
||||||
exempt-all-assignees: true
|
|
||||||
operations-per-run: 300
|
|
||||||
stale-issue-message: |
|
|
||||||
This issue was automatically marked as stale due to inactivity.
|
|
||||||
Please provide an update, reproduction details, or current status to keep it open.
|
|
||||||
close-issue-message: |
|
|
||||||
Closing this issue due to inactivity.
|
|
||||||
If the problem still exists on the latest `main`, please open a new issue with fresh repro steps.
|
|
||||||
close-issue-reason: not_planned
|
|
||||||
stale-pr-message: |
|
|
||||||
This PR was automatically marked as stale due to inactivity.
|
|
||||||
Please rebase/update and post the latest validation results.
|
|
||||||
close-pr-message: |
|
|
||||||
Closing this PR due to inactivity.
|
|
||||||
Maintainers can reopen once the branch is updated and validation is provided.
|
|
||||||
50
.github/workflows/test-benchmarks.yml
vendored
Normal file
50
.github/workflows/test-benchmarks.yml
vendored
Normal file
|
|
@ -0,0 +1,50 @@
|
||||||
|
name: Performance Benchmarks
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: bench-${{ github.event.pull_request.number || github.sha }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
env:
|
||||||
|
CARGO_TERM_COLOR: always
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
benchmarks:
|
||||||
|
name: Criterion Benchmarks
|
||||||
|
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||||
|
timeout-minutes: 30
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||||
|
- uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||||
|
with:
|
||||||
|
toolchain: 1.92.0
|
||||||
|
- uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3
|
||||||
|
|
||||||
|
- name: Run benchmarks
|
||||||
|
run: cargo bench --locked 2>&1 | tee benchmark_output.txt
|
||||||
|
|
||||||
|
- name: Upload benchmark results
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||||
|
with:
|
||||||
|
name: benchmark-results
|
||||||
|
path: |
|
||||||
|
target/criterion/
|
||||||
|
benchmark_output.txt
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
- name: Post benchmark summary on PR
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
|
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const script = require('./.github/workflows/scripts/test_benchmarks_pr_comment.js');
|
||||||
|
await script({ github, context, core });
|
||||||
|
|
@ -2,13 +2,15 @@
|
||||||
|
|
||||||
This document explains what each GitHub workflow does, when it runs, and whether it should block merges.
|
This document explains what each GitHub workflow does, when it runs, and whether it should block merges.
|
||||||
|
|
||||||
|
For event-by-event delivery behavior across PR, merge, push, and release, see [`.github/workflows/main-branch-flow.md`](../.github/workflows/main-branch-flow.md).
|
||||||
|
|
||||||
## Merge-Blocking vs Optional
|
## Merge-Blocking vs Optional
|
||||||
|
|
||||||
Merge-blocking checks should stay small and deterministic. Optional checks are useful for automation and maintenance, but should not block normal development.
|
Merge-blocking checks should stay small and deterministic. Optional checks are useful for automation and maintenance, but should not block normal development.
|
||||||
|
|
||||||
### Merge-Blocking
|
### Merge-Blocking
|
||||||
|
|
||||||
- `.github/workflows/ci.yml` (`CI`)
|
- `.github/workflows/ci-run.yml` (`CI`)
|
||||||
- Purpose: Rust validation (`cargo fmt --all -- --check`, `cargo clippy --locked --all-targets -- -D clippy::correctness`, strict delta lint gate on changed Rust lines, `test`, release build smoke) + docs quality checks when docs change (`markdownlint` blocks only issues on changed lines; link check scans only links added on changed lines)
|
- Purpose: Rust validation (`cargo fmt --all -- --check`, `cargo clippy --locked --all-targets -- -D clippy::correctness`, strict delta lint gate on changed Rust lines, `test`, release build smoke) + docs quality checks when docs change (`markdownlint` blocks only issues on changed lines; link check scans only links added on changed lines)
|
||||||
- Additional behavior: PRs that change `.github/workflows/**` require at least one approving review from a login in `WORKFLOW_OWNER_LOGINS` (repository variable fallback: `theonlyhennygod,willsarg`)
|
- Additional behavior: PRs that change `.github/workflows/**` require at least one approving review from a login in `WORKFLOW_OWNER_LOGINS` (repository variable fallback: `theonlyhennygod,willsarg`)
|
||||||
- Additional behavior: lint gates run before `test`/`build`; when lint/docs gates fail on PRs, CI posts an actionable feedback comment with failing gate names and local fix commands
|
- Additional behavior: lint gates run before `test`/`build`; when lint/docs gates fail on PRs, CI posts an actionable feedback comment with failing gate names and local fix commands
|
||||||
|
|
@ -16,25 +18,27 @@ Merge-blocking checks should stay small and deterministic. Optional checks are u
|
||||||
- `.github/workflows/workflow-sanity.yml` (`Workflow Sanity`)
|
- `.github/workflows/workflow-sanity.yml` (`Workflow Sanity`)
|
||||||
- Purpose: lint GitHub workflow files (`actionlint`, tab checks)
|
- Purpose: lint GitHub workflow files (`actionlint`, tab checks)
|
||||||
- Recommended for workflow-changing PRs
|
- Recommended for workflow-changing PRs
|
||||||
- `.github/workflows/pr-intake-sanity.yml` (`PR Intake Sanity`)
|
- `.github/workflows/pr-intake-checks.yml` (`PR Intake Checks`)
|
||||||
- Purpose: safe pre-CI PR checks (template completeness, added-line tabs/trailing-whitespace/conflict markers) with immediate sticky feedback comment
|
- Purpose: safe pre-CI PR checks (template completeness, added-line tabs/trailing-whitespace/conflict markers) with immediate sticky feedback comment
|
||||||
|
|
||||||
### Non-Blocking but Important
|
### Non-Blocking but Important
|
||||||
|
|
||||||
- `.github/workflows/docker.yml` (`Docker`)
|
- `.github/workflows/pub-docker-img.yml` (`Docker`)
|
||||||
- Purpose: PR docker smoke check and publish images on `main`/tag pushes
|
- Purpose: PR Docker smoke check and publish images on tag pushes (`v*`)
|
||||||
- `.github/workflows/security.yml` (`Security Audit`)
|
- `.github/workflows/sec-audit.yml` (`Security Audit`)
|
||||||
- Purpose: dependency advisories (`rustsec/audit-check`, pinned SHA) and policy/license checks (`cargo deny`)
|
- Purpose: dependency advisories (`rustsec/audit-check`, pinned SHA) and policy/license checks (`cargo deny`)
|
||||||
- `.github/workflows/release.yml` (`Release`)
|
- `.github/workflows/sec-codeql.yml` (`CodeQL Analysis`)
|
||||||
|
- Purpose: scheduled/manual static analysis for security findings
|
||||||
|
- `.github/workflows/pub-release.yml` (`Release`)
|
||||||
- Purpose: build tagged release artifacts and publish GitHub releases
|
- Purpose: build tagged release artifacts and publish GitHub releases
|
||||||
- `.github/workflows/label-policy-sanity.yml` (`Label Policy Sanity`)
|
- `.github/workflows/pr-label-policy-check.yml` (`Label Policy Sanity`)
|
||||||
- Purpose: validate shared contributor-tier policy in `.github/label-policy.json` and ensure label workflows consume that policy
|
- Purpose: validate shared contributor-tier policy in `.github/label-policy.json` and ensure label workflows consume that policy
|
||||||
- `.github/workflows/rust-reusable.yml` (`Rust Reusable Job`)
|
- `.github/workflows/test-rust-build.yml` (`Rust Reusable Job`)
|
||||||
- Purpose: reusable Rust setup/cache + command runner for workflow-call consumers
|
- Purpose: reusable Rust setup/cache + command runner for workflow-call consumers
|
||||||
|
|
||||||
### Optional Repository Automation
|
### Optional Repository Automation
|
||||||
|
|
||||||
- `.github/workflows/labeler.yml` (`PR Labeler`)
|
- `.github/workflows/pr-labeler.yml` (`PR Labeler`)
|
||||||
- Purpose: scope/path labels + size/risk labels + fine-grained module labels (`<module>: <component>`)
|
- Purpose: scope/path labels + size/risk labels + fine-grained module labels (`<module>: <component>`)
|
||||||
- Additional behavior: label descriptions are auto-managed as hover tooltips to explain each auto-judgment rule
|
- Additional behavior: label descriptions are auto-managed as hover tooltips to explain each auto-judgment rule
|
||||||
- Additional behavior: provider-related keywords in provider/config/onboard/integration changes are promoted to `provider:*` labels (for example `provider:kimi`, `provider:deepseek`)
|
- Additional behavior: provider-related keywords in provider/config/onboard/integration changes are promoted to `provider:*` labels (for example `provider:kimi`, `provider:deepseek`)
|
||||||
|
|
@ -47,49 +51,49 @@ Merge-blocking checks should stay small and deterministic. Optional checks are u
|
||||||
- Additional behavior: risk + size labels are auto-corrected on manual PR label edits (`labeled`/`unlabeled` events); apply `risk: manual` when maintainers intentionally override automated risk selection
|
- Additional behavior: risk + size labels are auto-corrected on manual PR label edits (`labeled`/`unlabeled` events); apply `risk: manual` when maintainers intentionally override automated risk selection
|
||||||
- High-risk heuristic paths: `src/security/**`, `src/runtime/**`, `src/gateway/**`, `src/tools/**`, `.github/workflows/**`
|
- High-risk heuristic paths: `src/security/**`, `src/runtime/**`, `src/gateway/**`, `src/tools/**`, `.github/workflows/**`
|
||||||
- Guardrail: maintainers can apply `risk: manual` to freeze automated risk recalculation
|
- Guardrail: maintainers can apply `risk: manual` to freeze automated risk recalculation
|
||||||
- `.github/workflows/auto-response.yml` (`PR Auto Responder`)
|
- `.github/workflows/pr-auto-response.yml` (`PR Auto Responder`)
|
||||||
- Purpose: first-time contributor onboarding + label-driven response routing (`r:support`, `r:needs-repro`, etc.)
|
- Purpose: first-time contributor onboarding + label-driven response routing (`r:support`, `r:needs-repro`, etc.)
|
||||||
- Additional behavior: applies contributor tiers on issues by merged PR count (`trusted` >=5, `experienced` >=10, `principal` >=20, `distinguished` >=50), matching PR tier thresholds exactly
|
- Additional behavior: applies contributor tiers on issues by merged PR count (`trusted` >=5, `experienced` >=10, `principal` >=20, `distinguished` >=50), matching PR tier thresholds exactly
|
||||||
- Additional behavior: contributor-tier labels are treated as automation-managed (manual add/remove on PR/issue is auto-corrected)
|
- Additional behavior: contributor-tier labels are treated as automation-managed (manual add/remove on PR/issue is auto-corrected)
|
||||||
- Guardrail: label-based close routes are issue-only; PRs are never auto-closed by route labels
|
- Guardrail: label-based close routes are issue-only; PRs are never auto-closed by route labels
|
||||||
- `.github/workflows/stale.yml` (`Stale`)
|
- `.github/workflows/pr-check-stale.yml` (`Stale`)
|
||||||
- Purpose: stale issue/PR lifecycle automation
|
- Purpose: stale issue/PR lifecycle automation
|
||||||
- `.github/dependabot.yml` (`Dependabot`)
|
- `.github/dependabot.yml` (`Dependabot`)
|
||||||
- Purpose: grouped, rate-limited dependency update PRs (Cargo + GitHub Actions)
|
- Purpose: grouped, rate-limited dependency update PRs (Cargo + GitHub Actions)
|
||||||
- `.github/workflows/pr-hygiene.yml` (`PR Hygiene`)
|
- `.github/workflows/pr-check-status.yml` (`PR Hygiene`)
|
||||||
- Purpose: nudge stale-but-active PRs to rebase/re-run required checks before queue starvation
|
- Purpose: nudge stale-but-active PRs to rebase/re-run required checks before queue starvation
|
||||||
|
|
||||||
## Trigger Map
|
## Trigger Map
|
||||||
|
|
||||||
- `CI`: push to `main`, PRs to `main`
|
- `CI`: push to `main`, PRs to `main`
|
||||||
- `Docker`: push to `main`, tag push (`v*`), PRs touching docker/workflow files, manual dispatch
|
- `Docker`: tag push (`v*`), PRs touching docker/workflow files, manual dispatch
|
||||||
- `Release`: tag push (`v*`)
|
- `Release`: tag push (`v*`)
|
||||||
- `Security Audit`: push to `main`, PRs to `main`, weekly schedule
|
- `Security Audit`: push to `main`, PRs to `main`, weekly schedule
|
||||||
- `Workflow Sanity`: PR/push when `.github/workflows/**`, `.github/*.yml`, or `.github/*.yaml` change
|
- `Workflow Sanity`: PR/push when `.github/workflows/**`, `.github/*.yml`, or `.github/*.yaml` change
|
||||||
- `PR Intake Sanity`: `pull_request_target` on opened/reopened/synchronize/edited/ready_for_review
|
- `PR Intake Checks`: `pull_request_target` on opened/reopened/synchronize/edited/ready_for_review
|
||||||
- `Label Policy Sanity`: PR/push when `.github/label-policy.json`, `.github/workflows/labeler.yml`, or `.github/workflows/auto-response.yml` changes
|
- `Label Policy Sanity`: PR/push when `.github/label-policy.json`, `.github/workflows/pr-labeler.yml`, or `.github/workflows/pr-auto-response.yml` changes
|
||||||
- `PR Labeler`: `pull_request_target` lifecycle events
|
- `PR Labeler`: `pull_request_target` lifecycle events
|
||||||
- `PR Auto Responder`: issue opened/labeled, `pull_request_target` opened/labeled
|
- `PR Auto Responder`: issue opened/labeled, `pull_request_target` opened/labeled
|
||||||
- `Stale`: daily schedule, manual dispatch
|
- `Stale PR Check`: daily schedule, manual dispatch
|
||||||
- `Dependabot`: weekly dependency maintenance windows
|
- `Dependabot`: weekly dependency maintenance windows
|
||||||
- `PR Hygiene`: every 12 hours schedule, manual dispatch
|
- `PR Hygiene`: every 12 hours schedule, manual dispatch
|
||||||
|
|
||||||
## Fast Triage Guide
|
## Fast Triage Guide
|
||||||
|
|
||||||
1. `CI Required Gate` failing: start with `.github/workflows/ci.yml`.
|
1. `CI Required Gate` failing: start with `.github/workflows/ci-run.yml`.
|
||||||
2. Docker failures on PRs: inspect `.github/workflows/docker.yml` `pr-smoke` job.
|
2. Docker failures on PRs: inspect `.github/workflows/pub-docker-img.yml` `pr-smoke` job.
|
||||||
3. Release failures on tags: inspect `.github/workflows/release.yml`.
|
3. Release failures on tags: inspect `.github/workflows/pub-release.yml`.
|
||||||
4. Security failures: inspect `.github/workflows/security.yml` and `deny.toml`.
|
4. Security failures: inspect `.github/workflows/sec-audit.yml` and `deny.toml`.
|
||||||
5. Workflow syntax/lint failures: inspect `.github/workflows/workflow-sanity.yml`.
|
5. Workflow syntax/lint failures: inspect `.github/workflows/workflow-sanity.yml`.
|
||||||
6. PR intake failures: inspect `.github/workflows/pr-intake-sanity.yml` sticky comment and run logs.
|
6. PR intake failures: inspect `.github/workflows/pr-intake-checks.yml` sticky comment and run logs.
|
||||||
7. Label policy parity failures: inspect `.github/workflows/label-policy-sanity.yml`.
|
7. Label policy parity failures: inspect `.github/workflows/pr-label-policy-check.yml`.
|
||||||
8. Docs failures in CI: inspect `docs-quality` job logs in `.github/workflows/ci.yml`.
|
8. Docs failures in CI: inspect `docs-quality` job logs in `.github/workflows/ci-run.yml`.
|
||||||
9. Strict delta lint failures in CI: inspect `lint-strict-delta` job logs and compare with `BASE_SHA` diff scope.
|
9. Strict delta lint failures in CI: inspect `lint-strict-delta` job logs and compare with `BASE_SHA` diff scope.
|
||||||
|
|
||||||
## Maintenance Rules
|
## Maintenance Rules
|
||||||
|
|
||||||
- Keep merge-blocking checks deterministic and reproducible (`--locked` where applicable).
|
- Keep merge-blocking checks deterministic and reproducible (`--locked` where applicable).
|
||||||
- Keep merge-blocking rust quality policy aligned across `.github/workflows/ci.yml`, `dev/ci.sh`, and `.githooks/pre-push` (`./scripts/ci/rust_quality_gate.sh` + `./scripts/ci/rust_strict_delta_gate.sh`).
|
- Keep merge-blocking rust quality policy aligned across `.github/workflows/ci-run.yml`, `dev/ci.sh`, and `.githooks/pre-push` (`./scripts/ci/rust_quality_gate.sh` + `./scripts/ci/rust_strict_delta_gate.sh`).
|
||||||
- Use `./scripts/ci/rust_strict_delta_gate.sh` (or `./dev/ci.sh lint-delta`) as the incremental strict merge gate for changed Rust lines.
|
- Use `./scripts/ci/rust_strict_delta_gate.sh` (or `./dev/ci.sh lint-delta`) as the incremental strict merge gate for changed Rust lines.
|
||||||
- Run full strict lint audits regularly via `./scripts/ci/rust_quality_gate.sh --strict` (for example through `./dev/ci.sh lint-strict`) and track cleanup in focused PRs.
|
- Run full strict lint audits regularly via `./scripts/ci/rust_quality_gate.sh --strict` (for example through `./dev/ci.sh lint-strict`) and track cleanup in focused PRs.
|
||||||
- Keep docs markdown gating incremental via `./scripts/ci/docs_quality_gate.sh` (block changed-line issues, report baseline issues separately).
|
- Keep docs markdown gating incremental via `./scripts/ci/docs_quality_gate.sh` (block changed-line issues, report baseline issues separately).
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue