ci: unify rust quality gate and add incremental docs/link checks
This commit is contained in:
parent
8a6273b988
commit
6528613c8d
12 changed files with 514 additions and 47 deletions
|
|
@ -6,29 +6,38 @@
|
|||
|
||||
set -euo pipefail
|
||||
|
||||
echo "==> pre-push: checking formatting..."
|
||||
cargo fmt --all -- --check || {
|
||||
echo "FAIL: cargo fmt --all -- --check found unformatted code."
|
||||
echo "Run 'cargo fmt' and try again."
|
||||
exit 1
|
||||
}
|
||||
|
||||
echo "==> pre-push: running clippy..."
|
||||
cargo clippy --all-targets -- -D clippy::correctness || {
|
||||
echo "FAIL: clippy correctness gate reported issues."
|
||||
echo "==> pre-push: running rust quality gate..."
|
||||
./scripts/ci/rust_quality_gate.sh || {
|
||||
echo "FAIL: rust quality gate failed."
|
||||
exit 1
|
||||
}
|
||||
|
||||
if [ "${ZEROCLAW_STRICT_LINT:-0}" = "1" ]; then
|
||||
echo "==> pre-push: running strict clippy warnings gate (ZEROCLAW_STRICT_LINT=1)..."
|
||||
cargo clippy --all-targets -- -D warnings || {
|
||||
./scripts/ci/rust_quality_gate.sh --strict || {
|
||||
echo "FAIL: strict clippy warnings gate reported issues."
|
||||
exit 1
|
||||
}
|
||||
fi
|
||||
|
||||
if [ "${ZEROCLAW_DOCS_LINT:-0}" = "1" ]; then
|
||||
echo "==> pre-push: running docs quality gate (ZEROCLAW_DOCS_LINT=1)..."
|
||||
./scripts/ci/docs_quality_gate.sh || {
|
||||
echo "FAIL: docs quality gate reported issues."
|
||||
exit 1
|
||||
}
|
||||
fi
|
||||
|
||||
if [ "${ZEROCLAW_DOCS_LINKS:-0}" = "1" ]; then
|
||||
echo "==> pre-push: running docs links gate (ZEROCLAW_DOCS_LINKS=1)..."
|
||||
./scripts/ci/docs_links_gate.sh || {
|
||||
echo "FAIL: docs links gate reported issues."
|
||||
exit 1
|
||||
}
|
||||
fi
|
||||
|
||||
echo "==> pre-push: running tests..."
|
||||
cargo test || {
|
||||
cargo test --locked || {
|
||||
echo "FAIL: some tests did not pass."
|
||||
exit 1
|
||||
}
|
||||
|
|
|
|||
57
.github/workflows/ci.yml
vendored
57
.github/workflows/ci.yml
vendored
|
|
@ -25,6 +25,7 @@ jobs:
|
|||
docs_changed: ${{ steps.scope.outputs.docs_changed }}
|
||||
rust_changed: ${{ steps.scope.outputs.rust_changed }}
|
||||
docs_files: ${{ steps.scope.outputs.docs_files }}
|
||||
base_sha: ${{ steps.scope.outputs.base_sha }}
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
with:
|
||||
|
|
@ -54,6 +55,7 @@ jobs:
|
|||
echo "docs_only=false"
|
||||
echo "docs_changed=false"
|
||||
echo "rust_changed=true"
|
||||
echo "base_sha="
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
write_empty_docs_files
|
||||
exit 0
|
||||
|
|
@ -65,6 +67,7 @@ jobs:
|
|||
echo "docs_only=false"
|
||||
echo "docs_changed=false"
|
||||
echo "rust_changed=false"
|
||||
echo "base_sha=$BASE"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
write_empty_docs_files
|
||||
exit 0
|
||||
|
|
@ -109,6 +112,7 @@ jobs:
|
|||
echo "docs_only=$docs_only"
|
||||
echo "docs_changed=$docs_changed"
|
||||
echo "rust_changed=$rust_changed"
|
||||
echo "base_sha=$BASE"
|
||||
echo "docs_files<<EOF"
|
||||
printf '%s\n' "${docs_files[@]}"
|
||||
echo "EOF"
|
||||
|
|
@ -126,13 +130,11 @@ jobs:
|
|||
fetch-depth: 0
|
||||
- uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||
with:
|
||||
toolchain: 1.92
|
||||
toolchain: 1.92.0
|
||||
components: rustfmt, clippy
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2
|
||||
- name: Run rustfmt
|
||||
run: cargo fmt --all -- --check
|
||||
- name: Run clippy
|
||||
run: cargo clippy --locked --all-targets -- -D clippy::correctness
|
||||
- name: Run rust quality gate
|
||||
run: ./scripts/ci/rust_quality_gate.sh
|
||||
|
||||
test:
|
||||
name: Test
|
||||
|
|
@ -144,7 +146,7 @@ jobs:
|
|||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
- uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||
with:
|
||||
toolchain: 1.92
|
||||
toolchain: 1.92.0
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2
|
||||
- name: Run tests
|
||||
run: cargo test --locked --verbose
|
||||
|
|
@ -160,7 +162,7 @@ jobs:
|
|||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
- uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable
|
||||
with:
|
||||
toolchain: 1.92
|
||||
toolchain: 1.92.0
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2
|
||||
- name: Build release binary
|
||||
run: cargo build --release --locked --verbose
|
||||
|
|
@ -191,13 +193,38 @@ jobs:
|
|||
timeout-minutes: 15
|
||||
steps:
|
||||
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
||||
|
||||
- name: Markdown lint
|
||||
uses: DavidAnson/markdownlint-cli2-action@07035fd053f7be764496c0f8d8f9f41f98305101 # v22
|
||||
with:
|
||||
globs: ${{ needs.changes.outputs.docs_files }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Link check (offline)
|
||||
- name: Markdown lint (changed lines only)
|
||||
env:
|
||||
BASE_SHA: ${{ needs.changes.outputs.base_sha }}
|
||||
DOCS_FILES: ${{ needs.changes.outputs.docs_files }}
|
||||
run: ./scripts/ci/docs_quality_gate.sh
|
||||
|
||||
- name: Collect added links
|
||||
id: collect_links
|
||||
shell: bash
|
||||
env:
|
||||
BASE_SHA: ${{ needs.changes.outputs.base_sha }}
|
||||
DOCS_FILES: ${{ needs.changes.outputs.docs_files }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
python3 ./scripts/ci/collect_changed_links.py \
|
||||
--base "$BASE_SHA" \
|
||||
--docs-files "$DOCS_FILES" \
|
||||
--output .ci-added-links.txt
|
||||
count=$(wc -l < .ci-added-links.txt | tr -d ' ')
|
||||
echo "count=$count" >> "$GITHUB_OUTPUT"
|
||||
if [ "$count" -gt 0 ]; then
|
||||
echo "Added links queued for check:"
|
||||
cat .ci-added-links.txt
|
||||
else
|
||||
echo "No added links found in changed docs lines."
|
||||
fi
|
||||
|
||||
- name: Link check (offline, added links only)
|
||||
if: steps.collect_links.outputs.count != '0'
|
||||
uses: lycheeverse/lychee-action@a8c4c7cb88f0c7386610c35eb25108e448569cb0 # v2
|
||||
with:
|
||||
fail: true
|
||||
|
|
@ -205,10 +232,14 @@ jobs:
|
|||
--offline
|
||||
--no-progress
|
||||
--format detailed
|
||||
${{ needs.changes.outputs.docs_files }}
|
||||
.ci-added-links.txt
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Skip link check (no added links)
|
||||
if: steps.collect_links.outputs.count == '0'
|
||||
run: echo "No added links in changed docs lines. Link check skipped."
|
||||
|
||||
ci-required:
|
||||
name: CI Required Gate
|
||||
if: always()
|
||||
|
|
|
|||
|
|
@ -16,22 +16,27 @@ git config core.hooksPath .githooks
|
|||
cargo build
|
||||
|
||||
# Run tests (all must pass)
|
||||
cargo test
|
||||
cargo test --locked
|
||||
|
||||
# Format & lint (required before PR)
|
||||
cargo fmt --all -- --check
|
||||
cargo clippy --all-targets -- -D clippy::correctness
|
||||
./scripts/ci/rust_quality_gate.sh
|
||||
|
||||
# Optional strict lint audit (recommended periodically)
|
||||
cargo clippy --all-targets -- -D warnings
|
||||
./scripts/ci/rust_quality_gate.sh --strict
|
||||
|
||||
# Optional docs lint gate (blocks only markdown issues on changed lines)
|
||||
./scripts/ci/docs_quality_gate.sh
|
||||
|
||||
# Optional docs links gate (checks only links added on changed lines)
|
||||
./scripts/ci/docs_links_gate.sh
|
||||
|
||||
# Release build (~3.4MB)
|
||||
cargo build --release
|
||||
cargo build --release --locked
|
||||
```
|
||||
|
||||
### Pre-push hook
|
||||
|
||||
The repo includes a pre-push hook in `.githooks/` that enforces `cargo fmt --all -- --check`, `cargo clippy --all-targets -- -D clippy::correctness`, and `cargo test` before every push. Enable it with `git config core.hooksPath .githooks`.
|
||||
The repo includes a pre-push hook in `.githooks/` that enforces `./scripts/ci/rust_quality_gate.sh` and `cargo test --locked` before every push. Enable it with `git config core.hooksPath .githooks`.
|
||||
|
||||
For an opt-in strict lint pass during pre-push, set:
|
||||
|
||||
|
|
@ -39,6 +44,18 @@ For an opt-in strict lint pass during pre-push, set:
|
|||
ZEROCLAW_STRICT_LINT=1 git push
|
||||
```
|
||||
|
||||
For an opt-in docs quality pass during pre-push (changed-line markdown gate), set:
|
||||
|
||||
```bash
|
||||
ZEROCLAW_DOCS_LINT=1 git push
|
||||
```
|
||||
|
||||
For an opt-in docs links pass during pre-push (added-links gate), set:
|
||||
|
||||
```bash
|
||||
ZEROCLAW_DOCS_LINKS=1 git push
|
||||
```
|
||||
|
||||
For full CI parity in Docker, run:
|
||||
|
||||
```bash
|
||||
|
|
@ -340,10 +357,9 @@ impl Tool for YourTool {
|
|||
## Pull Request Checklist
|
||||
|
||||
- [ ] PR template sections are completed (including security + rollback)
|
||||
- [ ] `cargo fmt --all -- --check` — code is formatted
|
||||
- [ ] `cargo clippy --all-targets -- -D clippy::correctness` — merge gate lint baseline passes
|
||||
- [ ] `cargo test` — all tests pass locally or skipped tests are explained
|
||||
- [ ] Optional strict audit: `cargo clippy --all-targets -- -D warnings` (run when doing lint cleanup or before release-hardening work)
|
||||
- [ ] `./scripts/ci/rust_quality_gate.sh` — merge gate formatter/lint baseline passes
|
||||
- [ ] `cargo test --locked` — all tests pass locally or skipped tests are explained
|
||||
- [ ] Optional strict audit: `./scripts/ci/rust_quality_gate.sh --strict` (run when doing lint cleanup or before release-hardening work)
|
||||
- [ ] New code has inline `#[cfg(test)]` tests
|
||||
- [ ] No new dependencies unless absolutely necessary (we optimize for binary size)
|
||||
- [ ] README updated if adding user-facing features
|
||||
|
|
|
|||
|
|
@ -102,8 +102,7 @@ Use this when you want CI-style validation without relying on GitHub Actions and
|
|||
|
||||
This runs inside a container:
|
||||
|
||||
- `cargo fmt --all -- --check`
|
||||
- `cargo clippy --locked --all-targets -- -D clippy::correctness`
|
||||
- `./scripts/ci/rust_quality_gate.sh`
|
||||
- `cargo test --locked --verbose`
|
||||
- `cargo build --release --locked --verbose`
|
||||
- `cargo deny check licenses sources`
|
||||
|
|
@ -126,6 +125,10 @@ To run an opt-in strict lint audit locally:
|
|||
./dev/ci.sh audit
|
||||
./dev/ci.sh security
|
||||
./dev/ci.sh docker-smoke
|
||||
# Optional host-side docs gate (changed-line markdown lint)
|
||||
./scripts/ci/docs_quality_gate.sh
|
||||
# Optional host-side docs links gate (changed-line added links)
|
||||
./scripts/ci/docs_links_gate.sh
|
||||
```
|
||||
|
||||
Note: local `deny` focuses on license/source policy; advisory scanning is handled by `audit`.
|
||||
|
|
@ -154,4 +157,4 @@ Note: local `deny` focuses on license/source policy; advisory scanning is handle
|
|||
|
||||
- Both `Dockerfile` and `dev/ci/Dockerfile` use BuildKit cache mounts for Cargo registry/git data.
|
||||
- Local CI reuses named Docker volumes for Cargo registry/git and target outputs.
|
||||
- The CI image keeps Rust toolchain defaults from `rust:1.92-slim` (no custom `CARGO_HOME`/`RUSTUP_HOME` overrides), preventing repeated toolchain bootstrapping on each run.
|
||||
- The CI image keeps Rust toolchain defaults from `rust:1.92-slim` and installs pinned toolchain `1.92.0` (no custom `CARGO_HOME`/`RUSTUP_HOME` overrides), preventing repeated toolchain bootstrapping on each run.
|
||||
|
|
|
|||
|
|
@ -54,11 +54,11 @@ case "$1" in
|
|||
;;
|
||||
|
||||
lint)
|
||||
run_in_ci "cargo fmt --all -- --check && cargo clippy --locked --all-targets -- -D clippy::correctness"
|
||||
run_in_ci "./scripts/ci/rust_quality_gate.sh"
|
||||
;;
|
||||
|
||||
lint-strict)
|
||||
run_in_ci "cargo fmt --all -- --check && cargo clippy --locked --all-targets -- -D warnings"
|
||||
run_in_ci "./scripts/ci/rust_quality_gate.sh --strict"
|
||||
;;
|
||||
|
||||
test)
|
||||
|
|
@ -88,7 +88,7 @@ case "$1" in
|
|||
;;
|
||||
|
||||
all)
|
||||
run_in_ci "cargo fmt --all -- --check && cargo clippy --locked --all-targets -- -D clippy::correctness"
|
||||
run_in_ci "./scripts/ci/rust_quality_gate.sh"
|
||||
run_in_ci "cargo test --locked --verbose"
|
||||
run_in_ci "cargo build --release --locked --verbose"
|
||||
run_in_ci "cargo deny check licenses sources"
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
|||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN rustup toolchain install 1.92 --profile minimal --component rustfmt --component clippy
|
||||
RUN rustup toolchain install 1.92.0 --profile minimal --component rustfmt --component clippy
|
||||
|
||||
RUN --mount=type=cache,target=/usr/local/cargo/registry \
|
||||
--mount=type=cache,target=/usr/local/cargo/git \
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ Merge-blocking checks should stay small and deterministic. Optional checks are u
|
|||
### Merge-Blocking
|
||||
|
||||
- `.github/workflows/ci.yml` (`CI`)
|
||||
- Purpose: Rust validation (`cargo fmt --all -- --check`, `cargo clippy --locked --all-targets -- -D clippy::correctness`, `test`, release build smoke) + docs quality checks when docs change
|
||||
- Purpose: Rust validation (`cargo fmt --all -- --check`, `cargo clippy --locked --all-targets -- -D clippy::correctness`, `test`, release build smoke) + docs quality checks when docs change (`markdownlint` blocks only issues on changed lines; link check scans only links added on changed lines)
|
||||
- Merge gate: `CI Required Gate`
|
||||
- `.github/workflows/workflow-sanity.yml` (`Workflow Sanity`)
|
||||
- Purpose: lint GitHub workflow files (`actionlint`, tab checks)
|
||||
|
|
@ -75,12 +75,14 @@ Merge-blocking checks should stay small and deterministic. Optional checks are u
|
|||
## Maintenance Rules
|
||||
|
||||
- Keep merge-blocking checks deterministic and reproducible (`--locked` where applicable).
|
||||
- Keep merge-blocking clippy policy aligned across `.github/workflows/ci.yml`, `dev/ci.sh`, and `.githooks/pre-push` (`cargo clippy --all-targets -- -D clippy::correctness`).
|
||||
- Run strict lint audits regularly via `cargo clippy --all-targets -- -D warnings` (for example through `./dev/ci.sh lint-strict`) and track cleanup in focused PRs.
|
||||
- Keep merge-blocking rust quality policy aligned across `.github/workflows/ci.yml`, `dev/ci.sh`, and `.githooks/pre-push` (`./scripts/ci/rust_quality_gate.sh`).
|
||||
- Run strict lint audits regularly via `./scripts/ci/rust_quality_gate.sh --strict` (for example through `./dev/ci.sh lint-strict`) and track cleanup in focused PRs.
|
||||
- Keep docs markdown gating incremental via `./scripts/ci/docs_quality_gate.sh` (block changed-line issues, report baseline issues separately).
|
||||
- Keep docs link gating incremental via `./scripts/ci/collect_changed_links.py` + lychee (check only links added on changed lines).
|
||||
- Prefer explicit workflow permissions (least privilege).
|
||||
- Keep Actions source policy restricted to approved allowlist patterns (see `docs/actions-source-policy.md`).
|
||||
- Use path filters for expensive workflows when practical.
|
||||
- Keep docs quality checks low-noise (`markdownlint` + offline link checks).
|
||||
- Keep docs quality checks low-noise (incremental markdown + incremental added-link checks).
|
||||
- Keep dependency update volume controlled (grouping + PR limits).
|
||||
- Avoid mixing onboarding/community automation with merge-gating logic.
|
||||
|
||||
|
|
|
|||
|
|
@ -1,2 +1,2 @@
|
|||
[toolchain]
|
||||
channel = "1.92"
|
||||
channel = "1.92.0"
|
||||
|
|
|
|||
178
scripts/ci/collect_changed_links.py
Executable file
178
scripts/ci/collect_changed_links.py
Executable file
|
|
@ -0,0 +1,178 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
DOC_PATH_RE = re.compile(r"\.mdx?$")
|
||||
URL_RE = re.compile(r"https?://[^\s<>'\"]+")
|
||||
INLINE_LINK_RE = re.compile(r"!?\[[^\]]*\]\(([^)]+)\)")
|
||||
REF_LINK_RE = re.compile(r"^\s*\[[^\]]+\]:\s*(\S+)")
|
||||
TRAILING_PUNCTUATION = ").,;:!?]}'\""
|
||||
|
||||
|
||||
def run_git(args: list[str]) -> subprocess.CompletedProcess[str]:
|
||||
return subprocess.run(["git", *args], check=False, capture_output=True, text=True)
|
||||
|
||||
|
||||
def commit_exists(rev: str) -> bool:
|
||||
if not rev:
|
||||
return False
|
||||
return run_git(["cat-file", "-e", f"{rev}^{{commit}}"]).returncode == 0
|
||||
|
||||
|
||||
def normalize_docs_files(raw: str) -> list[str]:
|
||||
if not raw:
|
||||
return []
|
||||
files: list[str] = []
|
||||
for line in raw.splitlines():
|
||||
path = line.strip()
|
||||
if path:
|
||||
files.append(path)
|
||||
return files
|
||||
|
||||
|
||||
def infer_base_sha(provided: str) -> str:
|
||||
if commit_exists(provided):
|
||||
return provided
|
||||
if run_git(["rev-parse", "--verify", "origin/main"]).returncode != 0:
|
||||
return ""
|
||||
proc = run_git(["merge-base", "origin/main", "HEAD"])
|
||||
candidate = proc.stdout.strip()
|
||||
return candidate if commit_exists(candidate) else ""
|
||||
|
||||
|
||||
def infer_docs_files(base_sha: str, provided: list[str]) -> list[str]:
|
||||
if provided:
|
||||
return provided
|
||||
if not base_sha:
|
||||
return []
|
||||
diff = run_git(["diff", "--name-only", base_sha, "HEAD"])
|
||||
files: list[str] = []
|
||||
for line in diff.stdout.splitlines():
|
||||
path = line.strip()
|
||||
if not path:
|
||||
continue
|
||||
if DOC_PATH_RE.search(path) or path in {"LICENSE", ".github/pull_request_template.md"}:
|
||||
files.append(path)
|
||||
return files
|
||||
|
||||
|
||||
def normalize_link_target(raw_target: str, source_path: str) -> str | None:
|
||||
target = raw_target.strip()
|
||||
if target.startswith("<") and target.endswith(">"):
|
||||
target = target[1:-1].strip()
|
||||
|
||||
if not target:
|
||||
return None
|
||||
|
||||
if " " in target:
|
||||
target = target.split()[0].strip()
|
||||
|
||||
if not target or target.startswith("#"):
|
||||
return None
|
||||
|
||||
lower = target.lower()
|
||||
if lower.startswith(("mailto:", "tel:", "javascript:")):
|
||||
return None
|
||||
|
||||
if target.startswith(("http://", "https://")):
|
||||
return target.rstrip(TRAILING_PUNCTUATION)
|
||||
|
||||
path_without_fragment = target.split("#", 1)[0].split("?", 1)[0]
|
||||
if not path_without_fragment:
|
||||
return None
|
||||
|
||||
if path_without_fragment.startswith("/"):
|
||||
resolved = path_without_fragment.lstrip("/")
|
||||
else:
|
||||
resolved = os.path.normpath(
|
||||
os.path.join(os.path.dirname(source_path) or ".", path_without_fragment)
|
||||
)
|
||||
|
||||
if not resolved or resolved == ".":
|
||||
return None
|
||||
|
||||
return resolved
|
||||
|
||||
|
||||
def extract_links(text: str, source_path: str) -> list[str]:
|
||||
links: list[str] = []
|
||||
for match in URL_RE.findall(text):
|
||||
url = match.rstrip(TRAILING_PUNCTUATION)
|
||||
if url:
|
||||
links.append(url)
|
||||
|
||||
for match in INLINE_LINK_RE.findall(text):
|
||||
normalized = normalize_link_target(match, source_path)
|
||||
if normalized:
|
||||
links.append(normalized)
|
||||
|
||||
ref_match = REF_LINK_RE.match(text)
|
||||
if ref_match:
|
||||
normalized = normalize_link_target(ref_match.group(1), source_path)
|
||||
if normalized:
|
||||
links.append(normalized)
|
||||
|
||||
return links
|
||||
|
||||
|
||||
def added_lines_for_file(base_sha: str, path: str) -> list[str]:
|
||||
if base_sha:
|
||||
diff = run_git(["diff", "--unified=0", base_sha, "HEAD", "--", path])
|
||||
lines: list[str] = []
|
||||
for raw_line in diff.stdout.splitlines():
|
||||
if raw_line.startswith("+++"):
|
||||
continue
|
||||
if raw_line.startswith("+"):
|
||||
lines.append(raw_line[1:])
|
||||
return lines
|
||||
|
||||
file_path = Path(path)
|
||||
if not file_path.is_file():
|
||||
return []
|
||||
return file_path.read_text(encoding="utf-8", errors="ignore").splitlines()
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description="Collect HTTP(S) links added in changed docs lines")
|
||||
parser.add_argument("--base", default="", help="Base commit SHA")
|
||||
parser.add_argument(
|
||||
"--docs-files",
|
||||
default="",
|
||||
help="Newline-separated docs files list",
|
||||
)
|
||||
parser.add_argument("--output", required=True, help="Output file for unique URLs")
|
||||
args = parser.parse_args()
|
||||
|
||||
base_sha = infer_base_sha(args.base)
|
||||
docs_files = infer_docs_files(base_sha, normalize_docs_files(args.docs_files))
|
||||
|
||||
existing_files = [path for path in docs_files if Path(path).is_file()]
|
||||
if not existing_files:
|
||||
Path(args.output).write_text("", encoding="utf-8")
|
||||
print("No docs files available for link collection.")
|
||||
return 0
|
||||
|
||||
unique_urls: list[str] = []
|
||||
seen: set[str] = set()
|
||||
for path in existing_files:
|
||||
for line in added_lines_for_file(base_sha, path):
|
||||
for link in extract_links(line, path):
|
||||
if link not in seen:
|
||||
seen.add(link)
|
||||
unique_urls.append(link)
|
||||
|
||||
Path(args.output).write_text("\n".join(unique_urls) + ("\n" if unique_urls else ""), encoding="utf-8")
|
||||
print(f"Collected {len(unique_urls)} added link(s) from {len(existing_files)} docs file(s).")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
28
scripts/ci/docs_links_gate.sh
Executable file
28
scripts/ci/docs_links_gate.sh
Executable file
|
|
@ -0,0 +1,28 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
BASE_SHA="${BASE_SHA:-}"
|
||||
DOCS_FILES_RAW="${DOCS_FILES:-}"
|
||||
|
||||
LINKS_FILE="$(mktemp)"
|
||||
trap 'rm -f "$LINKS_FILE"' EXIT
|
||||
|
||||
python3 ./scripts/ci/collect_changed_links.py \
|
||||
--base "$BASE_SHA" \
|
||||
--docs-files "$DOCS_FILES_RAW" \
|
||||
--output "$LINKS_FILE"
|
||||
|
||||
if [ ! -s "$LINKS_FILE" ]; then
|
||||
echo "No added links detected in changed docs lines."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if ! command -v lychee >/dev/null 2>&1; then
|
||||
echo "lychee is required to run docs link gate locally."
|
||||
echo "Install via: cargo install lychee"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Checking added links with lychee (offline mode)..."
|
||||
lychee --offline --no-progress --format detailed "$LINKS_FILE"
|
||||
181
scripts/ci/docs_quality_gate.sh
Executable file
181
scripts/ci/docs_quality_gate.sh
Executable file
|
|
@ -0,0 +1,181 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
BASE_SHA="${BASE_SHA:-}"
|
||||
DOCS_FILES_RAW="${DOCS_FILES:-}"
|
||||
|
||||
if [ -z "$BASE_SHA" ] && git rev-parse --verify origin/main >/dev/null 2>&1; then
|
||||
BASE_SHA="$(git merge-base origin/main HEAD)"
|
||||
fi
|
||||
|
||||
if [ -z "$DOCS_FILES_RAW" ] && [ -n "$BASE_SHA" ] && git cat-file -e "$BASE_SHA^{commit}" 2>/dev/null; then
|
||||
DOCS_FILES_RAW="$(git diff --name-only "$BASE_SHA" HEAD | awk '
|
||||
/\.md$/ || /\.mdx$/ || $0 == "LICENSE" || $0 == ".github/pull_request_template.md" {
|
||||
print
|
||||
}
|
||||
')"
|
||||
fi
|
||||
|
||||
if [ -z "$DOCS_FILES_RAW" ]; then
|
||||
echo "No docs files detected; skipping docs quality gate."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ -z "$BASE_SHA" ] || ! git cat-file -e "$BASE_SHA^{commit}" 2>/dev/null; then
|
||||
echo "BASE_SHA is missing or invalid; falling back to full-file markdown lint."
|
||||
BASE_SHA=""
|
||||
fi
|
||||
|
||||
ALL_FILES=()
|
||||
while IFS= read -r file; do
|
||||
if [ -n "$file" ]; then
|
||||
ALL_FILES+=("$file")
|
||||
fi
|
||||
done < <(printf '%s\n' "$DOCS_FILES_RAW")
|
||||
|
||||
if [ "${#ALL_FILES[@]}" -eq 0 ]; then
|
||||
echo "No docs files detected after normalization; skipping docs quality gate."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
EXISTING_FILES=()
|
||||
for file in "${ALL_FILES[@]}"; do
|
||||
if [ -f "$file" ]; then
|
||||
EXISTING_FILES+=("$file")
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "${#EXISTING_FILES[@]}" -eq 0 ]; then
|
||||
echo "No existing docs files to lint; skipping docs quality gate."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if command -v npx >/dev/null 2>&1; then
|
||||
MD_CMD=(npx --yes markdownlint-cli2@0.20.0)
|
||||
elif command -v markdownlint-cli2 >/dev/null 2>&1; then
|
||||
MD_CMD=(markdownlint-cli2)
|
||||
else
|
||||
echo "markdownlint-cli2 is required (via npx or local binary)."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Linting docs files: ${EXISTING_FILES[*]}"
|
||||
|
||||
LINT_OUTPUT_FILE="$(mktemp)"
|
||||
set +e
|
||||
"${MD_CMD[@]}" "${EXISTING_FILES[@]}" >"$LINT_OUTPUT_FILE" 2>&1
|
||||
LINT_EXIT=$?
|
||||
set -e
|
||||
|
||||
if [ "$LINT_EXIT" -eq 0 ]; then
|
||||
cat "$LINT_OUTPUT_FILE"
|
||||
rm -f "$LINT_OUTPUT_FILE"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ -z "$BASE_SHA" ]; then
|
||||
cat "$LINT_OUTPUT_FILE"
|
||||
rm -f "$LINT_OUTPUT_FILE"
|
||||
exit "$LINT_EXIT"
|
||||
fi
|
||||
|
||||
CHANGED_LINES_JSON_FILE="$(mktemp)"
|
||||
python3 - "$BASE_SHA" "${EXISTING_FILES[@]}" >"$CHANGED_LINES_JSON_FILE" <<'PY'
|
||||
import json
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
base = sys.argv[1]
|
||||
files = sys.argv[2:]
|
||||
|
||||
changed = {}
|
||||
hunk = re.compile(r"^@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@")
|
||||
|
||||
for path in files:
|
||||
proc = subprocess.run(
|
||||
["git", "diff", "--unified=0", base, "HEAD", "--", path],
|
||||
check=False,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
ranges = []
|
||||
for line in proc.stdout.splitlines():
|
||||
m = hunk.match(line)
|
||||
if not m:
|
||||
continue
|
||||
start = int(m.group(1))
|
||||
count = int(m.group(2) or "1")
|
||||
if count > 0:
|
||||
ranges.append([start, start + count - 1])
|
||||
changed[path] = ranges
|
||||
|
||||
print(json.dumps(changed))
|
||||
PY
|
||||
|
||||
FILTERED_OUTPUT_FILE="$(mktemp)"
|
||||
set +e
|
||||
python3 - "$LINT_OUTPUT_FILE" "$CHANGED_LINES_JSON_FILE" >"$FILTERED_OUTPUT_FILE" <<'PY'
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
|
||||
lint_file = sys.argv[1]
|
||||
changed_file = sys.argv[2]
|
||||
|
||||
with open(changed_file, "r", encoding="utf-8") as f:
|
||||
changed = json.load(f)
|
||||
|
||||
line_re = re.compile(r"^(.+?):(\d+)\s+error\s+(MD\d+(?:/[^\s]+)?)\s+(.*)$")
|
||||
|
||||
blocking = []
|
||||
baseline = []
|
||||
other_lines = []
|
||||
|
||||
with open(lint_file, "r", encoding="utf-8") as f:
|
||||
for raw_line in f:
|
||||
line = raw_line.rstrip("\n")
|
||||
m = line_re.match(line)
|
||||
if not m:
|
||||
other_lines.append(line)
|
||||
continue
|
||||
|
||||
path, line_no_s, rule, msg = m.groups()
|
||||
line_no = int(line_no_s)
|
||||
ranges = changed.get(path, [])
|
||||
|
||||
is_changed_line = any(start <= line_no <= end for start, end in ranges)
|
||||
entry = f"{path}:{line_no} {rule} {msg}"
|
||||
if is_changed_line:
|
||||
blocking.append(entry)
|
||||
else:
|
||||
baseline.append(entry)
|
||||
|
||||
if baseline:
|
||||
print("Existing markdown issues outside changed lines (non-blocking):")
|
||||
for entry in baseline:
|
||||
print(f" - {entry}")
|
||||
|
||||
if blocking:
|
||||
print("Markdown issues introduced on changed lines (blocking):")
|
||||
for entry in blocking:
|
||||
print(f" - {entry}")
|
||||
print(f"Blocking markdown issues: {len(blocking)}")
|
||||
sys.exit(1)
|
||||
|
||||
if baseline:
|
||||
print("No blocking markdown issues on changed lines.")
|
||||
sys.exit(0)
|
||||
|
||||
for line in other_lines:
|
||||
print(line)
|
||||
print("No blocking markdown issues on changed lines.")
|
||||
PY
|
||||
SCRIPT_EXIT=$?
|
||||
set -e
|
||||
|
||||
cat "$FILTERED_OUTPUT_FILE"
|
||||
|
||||
rm -f "$LINT_OUTPUT_FILE" "$CHANGED_LINES_JSON_FILE" "$FILTERED_OUTPUT_FILE"
|
||||
exit "$SCRIPT_EXIT"
|
||||
19
scripts/ci/rust_quality_gate.sh
Executable file
19
scripts/ci/rust_quality_gate.sh
Executable file
|
|
@ -0,0 +1,19 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
MODE="correctness"
|
||||
if [ "${1:-}" = "--strict" ]; then
|
||||
MODE="strict"
|
||||
fi
|
||||
|
||||
echo "==> rust quality: cargo fmt --all -- --check"
|
||||
cargo fmt --all -- --check
|
||||
|
||||
if [ "$MODE" = "strict" ]; then
|
||||
echo "==> rust quality: cargo clippy --locked --all-targets -- -D warnings"
|
||||
cargo clippy --locked --all-targets -- -D warnings
|
||||
else
|
||||
echo "==> rust quality: cargo clippy --locked --all-targets -- -D clippy::correctness"
|
||||
cargo clippy --locked --all-targets -- -D clippy::correctness
|
||||
fi
|
||||
Loading…
Add table
Add a link
Reference in a new issue