Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .github/workflows/dependabot-auto-merge.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,10 @@ jobs:
name: Enable auto-merge for Dependabot PRs
if: github.event.pull_request.user.login == 'dependabot[bot]' && !github.event.pull_request.draft && github.event.pull_request.head.repo.full_name == github.repository
permissions:
checks: read
contents: write
pull-requests: write
statuses: read
runs-on: ubuntu-latest
steps:
- name: Log workflow source context
Expand Down
2 changes: 1 addition & 1 deletion .llm/context.md
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ Pre-commit validates registration only, NOT that proofs pass. Run affected proof

Also: `ci-rust.yml` (Miri), `ci-security.yml` (cargo-geiger, cargo-deny).

Dependabot auto-merge policy: this repository is squash-only. Use `scripts/ci/enable-dependabot-automerge.sh` (which enforces `--squash` and checks policy drift) instead of inline merge commands in workflows.
Dependabot auto-merge policy: this repository is squash-only. Use `scripts/ci/enable-dependabot-automerge.sh` (which enforces `--squash`, waits for required checks to pass, and checks policy drift) instead of inline merge commands in workflows.

**CI fails on:** unformatted code, clippy warnings, broken doc links, markdown lint errors, workflow syntax errors, unregistered Kani proofs.

Expand Down
12 changes: 10 additions & 2 deletions scripts/ci/docker-network-tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,8 @@ run_test() {
# Wait for containers to exit (poll their status)
local elapsed=0
local poll_interval=2
local remaining
local sleep_for
while [ $elapsed -lt $wait_timeout ]; do
# Check if both containers have exited
local peer1_running peer2_running
Expand All @@ -151,8 +153,14 @@ run_test() {
break
fi

sleep $poll_interval
elapsed=$((elapsed + poll_interval))
remaining=$((wait_timeout - elapsed))
sleep_for=$poll_interval
if [ "$sleep_for" -gt "$remaining" ]; then
sleep_for=$remaining
fi

sleep "$sleep_for"
elapsed=$((elapsed + sleep_for))
done

# Collect logs from both containers
Expand Down
84 changes: 84 additions & 0 deletions scripts/ci/enable-dependabot-automerge.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,25 @@ set -euo pipefail
: "${PR_HEAD_SHA:?PR_HEAD_SHA is required}"
: "${GITHUB_REPOSITORY:?GITHUB_REPOSITORY is required}"

REQUIRED_CHECKS_APPEAR_TIMEOUT_SECONDS="${REQUIRED_CHECKS_APPEAR_TIMEOUT_SECONDS:-120}"
REQUIRED_CHECKS_POLL_INTERVAL_SECONDS="${REQUIRED_CHECKS_POLL_INTERVAL_SECONDS:-10}"
REQUIRED_CHECKS_WATCH_INTERVAL_SECONDS="${REQUIRED_CHECKS_WATCH_INTERVAL_SECONDS:-10}"

if ! [[ "$REQUIRED_CHECKS_APPEAR_TIMEOUT_SECONDS" =~ ^[0-9]+$ ]]; then
echo "REQUIRED_CHECKS_APPEAR_TIMEOUT_SECONDS must be a non-negative integer." >&2
exit 1
fi

if ! [[ "$REQUIRED_CHECKS_POLL_INTERVAL_SECONDS" =~ ^[1-9][0-9]*$ ]]; then
echo "REQUIRED_CHECKS_POLL_INTERVAL_SECONDS must be a positive integer." >&2
exit 1
fi

if ! [[ "$REQUIRED_CHECKS_WATCH_INTERVAL_SECONDS" =~ ^[1-9][0-9]*$ ]]; then
echo "REQUIRED_CHECKS_WATCH_INTERVAL_SECONDS must be a positive integer." >&2
exit 1
fi

get_pr_field() {
local jq_expr="$1"
local output
Expand Down Expand Up @@ -35,6 +54,54 @@ attempt_automerge() {
return 1
}

required_checks_count() {
gh pr checks "$PR_URL" --required --json name --jq 'length'
}

wait_for_required_checks() {
local elapsed=0
local remaining
local sleep_for
local required_count

while ((elapsed <= REQUIRED_CHECKS_APPEAR_TIMEOUT_SECONDS)); do
if is_stale_event; then
echo "PR head moved while waiting for required checks; skipping stale auto-merge attempt."
return 2
fi
Comment thread
wallstop marked this conversation as resolved.

required_count="$(required_checks_count)"
if [[ "$required_count" =~ ^[0-9]+$ ]] && ((required_count > 0)); then
if is_stale_event; then
echo "PR head moved after required checks appeared; skipping stale auto-merge attempt."
return 2
fi
echo "Waiting for $required_count required checks to pass before enabling auto-merge."
if ! gh pr checks "$PR_URL" --required --watch --fail-fast --interval "$REQUIRED_CHECKS_WATCH_INTERVAL_SECONDS"; then
echo "Required checks did not pass; refusing to enable auto-merge." >&2
return 1
fi
return 0
fi

remaining=$((REQUIRED_CHECKS_APPEAR_TIMEOUT_SECONDS - elapsed))
if ((remaining <= 0)); then
break
fi

sleep_for="$REQUIRED_CHECKS_POLL_INTERVAL_SECONDS"
if ((sleep_for > remaining)); then
sleep_for="$remaining"
fi

sleep "$sleep_for"
elapsed=$((elapsed + sleep_for))
done

echo "No required checks detected for PR within timeout; refusing to enable auto-merge." >&2
return 1
}

if [[ "$(get_pr_field '.state')" != "OPEN" ]]; then
echo "PR is not open; skipping auto-merge enable."
exit 0
Expand Down Expand Up @@ -69,6 +136,23 @@ if [[ "$allow_rebase_merge" == "true" || "$allow_merge_commit" == "true" ]]; the
exit 1
fi

if wait_for_required_checks; then
wait_status=0
else
wait_status=$?
fi
if [[ "$wait_status" -eq 2 ]]; then
exit 0
fi
if [[ "$wait_status" -ne 0 ]]; then
exit 1
fi

if is_stale_event; then
echo "PR head moved after required checks completed; skipping stale auto-merge attempt."
Copy link

Copilot AI Apr 10, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This post-check stale guard runs unconditionally after wait_for_required_checks returns 0. Because wait_for_required_checks can also return 0 for “stale while waiting / after checks appeared”, this branch can produce an inaccurate "after required checks completed" message. Handle the stale return from wait_for_required_checks separately (e.g., distinct return code/flag) so the emitted message matches the actual stale transition point.

Suggested change
echo "PR head moved after required checks completed; skipping stale auto-merge attempt."
echo "PR head moved before auto-merge could be enabled; skipping stale auto-merge attempt."

Copilot uses AI. Check for mistakes.
exit 0
fi

if attempt_automerge; then
echo "Auto-merge enabled with squash strategy."
exit 0
Expand Down
209 changes: 204 additions & 5 deletions scripts/tests/test_enable_dependabot_automerge.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,33 @@ def _write_stub_gh(path: Path) -> None:
shift
shift || true

next_sequence_value() {
local sequence="$1"
local default_value="$2"
local counter_key="$3"
local state_dir="${GH_STATE_DIR:?GH_STATE_DIR is required}"
local index_file="$state_dir/$counter_key.idx"
local index=0

if [[ -z "$sequence" ]]; then
printf '%s\\n' "$default_value"
return
fi

if [[ -f "$index_file" ]]; then
index="$(cat "$index_file")"
fi

IFS=',' read -r -a values <<< "$sequence"
if ((index < ${#values[@]})); then
printf '%s\\n' "${values[$index]}"
else
printf '%s\\n' "${values[$((${#values[@]} - 1))]}"
fi

printf '%s\\n' $((index + 1)) > "$index_file"
}

if [[ "$cmd" == "pr" && "$subcmd" == "view" ]]; then
jq_expr=""
while [[ $# -gt 0 ]]; do
Expand All @@ -37,7 +64,7 @@ def _write_stub_gh(path: Path) -> None:
".state") printf '%s\\n' "${GH_PR_STATE:-OPEN}" ;;
".isDraft") printf '%s\\n' "${GH_PR_DRAFT:-false}" ;;
".autoMergeRequest != null") printf '%s\\n' "${GH_PR_AUTO_MERGE:-false}" ;;
".headRefOid") printf '%s\\n' "${GH_PR_HEAD_OID:-head-sha}" ;;
".headRefOid") next_sequence_value "${GH_PR_HEAD_OID_SEQUENCE:-}" "${GH_PR_HEAD_OID:-head-sha}" "head_ref_oid" ;;
*) exit 1 ;;
esac
exit 0
Expand Down Expand Up @@ -76,6 +103,18 @@ def _write_stub_gh(path: Path) -> None:
exit 1
fi

if [[ "$cmd" == "pr" && "$subcmd" == "checks" ]]; then
printf '%s\\n' "pr checks $*" >> "${GH_LOG_PATH:?GH_LOG_PATH is required}"
if [[ "$*" == *"--json name --jq length"* ]]; then
next_sequence_value "${GH_REQUIRED_CHECKS_COUNT_SEQUENCE:-}" "${GH_REQUIRED_CHECKS_COUNT:-1}" "required_checks_count"
exit 0
fi
if [[ "$*" == *"--watch"* ]]; then
exit "${GH_CHECKS_WATCH_EXIT_CODE:-0}"
fi
exit 0
fi

exit 1
""",
encoding="utf-8",
Expand All @@ -96,6 +135,10 @@ def _run_script(tmp_path: Path, extra_env: dict[str, str]) -> subprocess.Complet
"GITHUB_REPOSITORY": "wallstop/fortress-rollback",
"GH_TOKEN": "fake-token",
"GH_LOG_PATH": str(log_path),
"GH_STATE_DIR": str(tmp_path),
"REQUIRED_CHECKS_APPEAR_TIMEOUT_SECONDS": "0",
"REQUIRED_CHECKS_POLL_INTERVAL_SECONDS": "1",
"REQUIRED_CHECKS_WATCH_INTERVAL_SECONDS": "1",
}
)
env.update(extra_env)
Expand Down Expand Up @@ -129,10 +172,12 @@ def test_uses_squash_strategy_only(tmp_path: Path) -> None:
assert result.returncode == 0

log_lines = (tmp_path / "gh.log").read_text(encoding="utf-8").splitlines()
assert len(log_lines) == 1
assert "--squash" in log_lines[0]
assert "--rebase" not in log_lines[0]
assert "--merge" not in log_lines[0]
assert len(log_lines) == 3
assert "--json name --jq length" in log_lines[0]
assert "--watch" in log_lines[1]
assert "--squash" in log_lines[2]
assert "--rebase" not in log_lines[2]
assert "--merge" not in log_lines[2]


def test_skips_stale_event_without_merging(tmp_path: Path) -> None:
Expand All @@ -156,3 +201,157 @@ def test_fails_on_merge_policy_drift(tmp_path: Path) -> None:
assert "squash-only settings" in result.stderr
log_path = tmp_path / "gh.log"
assert not log_path.exists()


def test_fails_when_required_checks_are_missing(tmp_path: Path) -> None:
result = _run_script(
tmp_path,
{
"GH_REQUIRED_CHECKS_COUNT": "0",
"GH_ALLOW_SQUASH": "true",
"GH_ALLOW_REBASE": "false",
"GH_ALLOW_MERGE": "false",
},
)
assert result.returncode == 1
assert "No required checks detected for PR within timeout" in result.stderr

log_lines = (tmp_path / "gh.log").read_text(encoding="utf-8").splitlines()
assert len(log_lines) == 1
assert "--json name --jq length" in log_lines[0]
assert "--watch" not in log_lines[0]
assert "pr merge" not in log_lines[0]


def test_fails_when_required_checks_fail(tmp_path: Path) -> None:
result = _run_script(
tmp_path,
{
"GH_CHECKS_WATCH_EXIT_CODE": "1",
"GH_ALLOW_SQUASH": "true",
"GH_ALLOW_REBASE": "false",
"GH_ALLOW_MERGE": "false",
},
)
assert result.returncode == 1
assert "Required checks did not pass" in result.stderr

log_lines = (tmp_path / "gh.log").read_text(encoding="utf-8").splitlines()
assert len(log_lines) == 2
assert "--json name --jq length" in log_lines[0]
assert "--watch" in log_lines[1]
assert "pr merge" not in "\n".join(log_lines)


def test_waits_for_required_checks_to_appear_then_merges(tmp_path: Path) -> None:
result = _run_script(
tmp_path,
{
"GH_REQUIRED_CHECKS_COUNT_SEQUENCE": "0,0,1",
"GH_MERGE_SUCCESS_FLAG": "--squash",
"GH_ALLOW_SQUASH": "true",
"GH_ALLOW_REBASE": "false",
"GH_ALLOW_MERGE": "false",
"REQUIRED_CHECKS_APPEAR_TIMEOUT_SECONDS": "3",
"REQUIRED_CHECKS_POLL_INTERVAL_SECONDS": "1",
},
)
assert result.returncode == 0

log_lines = (tmp_path / "gh.log").read_text(encoding="utf-8").splitlines()
assert len(log_lines) == 5
assert "--json name --jq length" in log_lines[0]
assert "--json name --jq length" in log_lines[1]
assert "--json name --jq length" in log_lines[2]
assert "--watch" in log_lines[3]
assert "--squash" in log_lines[4]


def test_skips_when_head_becomes_stale_while_waiting(tmp_path: Path) -> None:
result = _run_script(
tmp_path,
{
"GH_REQUIRED_CHECKS_COUNT_SEQUENCE": "0,0,1",
"GH_PR_HEAD_OID_SEQUENCE": "head-sha,head-sha,new-head-sha",
"GH_ALLOW_SQUASH": "true",
"GH_ALLOW_REBASE": "false",
"GH_ALLOW_MERGE": "false",
"REQUIRED_CHECKS_APPEAR_TIMEOUT_SECONDS": "3",
"REQUIRED_CHECKS_POLL_INTERVAL_SECONDS": "1",
},
)
assert result.returncode == 0
assert "PR head moved while waiting for required checks" in result.stdout
assert "after required checks completed" not in result.stdout

log_lines = (tmp_path / "gh.log").read_text(encoding="utf-8").splitlines()
assert len(log_lines) == 1
assert "--json name --jq length" in log_lines[0]
assert "--watch" not in log_lines[0]
assert "pr merge" not in log_lines[0]


def test_skips_when_head_becomes_stale_after_checks_appear(tmp_path: Path) -> None:
result = _run_script(
tmp_path,
{
"GH_REQUIRED_CHECKS_COUNT_SEQUENCE": "1",
"GH_PR_HEAD_OID_SEQUENCE": "head-sha,head-sha,new-head-sha",
"GH_ALLOW_SQUASH": "true",
"GH_ALLOW_REBASE": "false",
"GH_ALLOW_MERGE": "false",
},
)
assert result.returncode == 0
assert "PR head moved after required checks appeared" in result.stdout
assert "after required checks completed" not in result.stdout

log_lines = (tmp_path / "gh.log").read_text(encoding="utf-8").splitlines()
assert len(log_lines) == 1
assert "--json name --jq length" in log_lines[0]
assert "--watch" not in log_lines[0]
assert "pr merge" not in log_lines[0]


def test_skips_when_head_becomes_stale_after_checks_complete(tmp_path: Path) -> None:
result = _run_script(
tmp_path,
{
"GH_REQUIRED_CHECKS_COUNT_SEQUENCE": "1",
"GH_PR_HEAD_OID_SEQUENCE": "head-sha,head-sha,head-sha,new-head-sha",
"GH_ALLOW_SQUASH": "true",
"GH_ALLOW_REBASE": "false",
"GH_ALLOW_MERGE": "false",
},
)
assert result.returncode == 0
assert "PR head moved after required checks completed" in result.stdout

log_lines = (tmp_path / "gh.log").read_text(encoding="utf-8").splitlines()
assert len(log_lines) == 2
assert "--json name --jq length" in log_lines[0]
assert "--watch" in log_lines[1]
assert "pr merge" not in "\n".join(log_lines)


def test_caps_poll_sleep_to_remaining_timeout(tmp_path: Path) -> None:
result = _run_script(
tmp_path,
{
"GH_REQUIRED_CHECKS_COUNT_SEQUENCE": "0,1",
"GH_MERGE_SUCCESS_FLAG": "--squash",
"GH_ALLOW_SQUASH": "true",
"GH_ALLOW_REBASE": "false",
"GH_ALLOW_MERGE": "false",
"REQUIRED_CHECKS_APPEAR_TIMEOUT_SECONDS": "1",
"REQUIRED_CHECKS_POLL_INTERVAL_SECONDS": "10",
},
)
assert result.returncode == 0

log_lines = (tmp_path / "gh.log").read_text(encoding="utf-8").splitlines()
assert len(log_lines) == 4
assert "--json name --jq length" in log_lines[0]
assert "--json name --jq length" in log_lines[1]
assert "--watch" in log_lines[2]
assert "--squash" in log_lines[3]
Loading