diff --git a/.github/workflows/agentics-maintenance.yml b/.github/workflows/agentics-maintenance.yml
index 13a0d8a884..4fcf9606ec 100644
--- a/.github/workflows/agentics-maintenance.yml
+++ b/.github/workflows/agentics-maintenance.yml
@@ -53,6 +53,7 @@ on:
- 'activity_report'
- 'close_agentic_workflows_issues'
- 'clean_cache_memories'
+ - 'update_pull_request_branches'
- 'validate'
run_url:
description: 'Run URL or run ID to replay safe outputs from (e.g. https://github.com/owner/repo/actions/runs/12345 or 12345). Required when operation is safe_outputs.'
@@ -62,7 +63,7 @@ on:
workflow_call:
inputs:
operation:
- description: 'Optional maintenance operation to run (disable, enable, update, upgrade, safe_outputs, create_labels, activity_report, close_agentic_workflows_issues, clean_cache_memories, validate)'
+ description: 'Optional maintenance operation to run (disable, enable, update, upgrade, safe_outputs, create_labels, activity_report, close_agentic_workflows_issues, clean_cache_memories, update_pull_request_branches, validate)'
required: false
type: string
default: ''
@@ -157,7 +158,7 @@ jobs:
await main();
run_operation:
- if: ${{ (github.event_name == 'workflow_dispatch' || github.event_name == 'workflow_call') && inputs.operation != '' && inputs.operation != 'safe_outputs' && inputs.operation != 'create_labels' && inputs.operation != 'activity_report' && inputs.operation != 'close_agentic_workflows_issues' && inputs.operation != 'clean_cache_memories' && inputs.operation != 'validate' && (!(github.event.repository.fork)) }}
+ if: ${{ (github.event_name == 'workflow_dispatch' || github.event_name == 'workflow_call') && inputs.operation != '' && inputs.operation != 'safe_outputs' && inputs.operation != 'create_labels' && inputs.operation != 'activity_report' && inputs.operation != 'close_agentic_workflows_issues' && inputs.operation != 'clean_cache_memories' && inputs.operation != 'update_pull_request_branches' && inputs.operation != 'validate' && (!(github.event.repository.fork)) }}
runs-on: ubuntu-slim
permissions:
actions: write
@@ -213,6 +214,46 @@ jobs:
id: record
run: echo "operation=${{ inputs.operation }}" >> "$GITHUB_OUTPUT"
+ update_pull_request_branches:
+ if: ${{ (github.event_name == 'workflow_dispatch' || github.event_name == 'workflow_call') && inputs.operation == 'update_pull_request_branches' && (!(github.event.repository.fork)) }}
+ runs-on: ubuntu-slim
+ permissions:
+ pull-requests: write
+ steps:
+ - name: Checkout actions folder
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ with:
+ sparse-checkout: |
+ actions
+ persist-credentials: false
+
+ - name: Setup Scripts
+ uses: ./actions/setup
+ with:
+ destination: ${{ runner.temp }}/gh-aw/actions
+
+ - name: Check admin/maintainer permissions
+ uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io, getOctokit);
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/check_team_member.cjs');
+ await main();
+
+ - name: Update pull request branches
+ uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io, getOctokit);
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/update_pull_request_branches.cjs');
+ await main();
+
apply_safe_outputs:
if: ${{ (github.event_name == 'workflow_dispatch' || github.event_name == 'workflow_call') && inputs.operation == 'safe_outputs' && (!(github.event.repository.fork)) }}
runs-on: ubuntu-slim
diff --git a/actions/setup/js/run_operation_update_upgrade.cjs b/actions/setup/js/run_operation_update_upgrade.cjs
index c0dc3768d3..1da45d2774 100644
--- a/actions/setup/js/run_operation_update_upgrade.cjs
+++ b/actions/setup/js/run_operation_update_upgrade.cjs
@@ -45,7 +45,8 @@ function formatTimestamp(date) {
}
/**
- * Run 'gh aw update', 'gh aw upgrade', 'gh aw disable', or 'gh aw enable',
+ * Run maintenance operations handled by run_operation:
+ * - 'gh aw update', 'gh aw upgrade', 'gh aw disable', 'gh aw enable'
* creating a pull request when needed for update/upgrade operations.
*
* For update/upgrade: runs with --no-compile so lock files are not modified.
diff --git a/actions/setup/js/update_pull_request_branches.cjs b/actions/setup/js/update_pull_request_branches.cjs
new file mode 100644
index 0000000000..ccea1d609c
--- /dev/null
+++ b/actions/setup/js/update_pull_request_branches.cjs
@@ -0,0 +1,328 @@
+// @ts-check
+///
+
+const { getErrorMessage } = require("./error_helpers.cjs");
+const { withRetry, isTransientError, sleep } = require("./error_recovery.cjs");
+const { fetchAndLogRateLimit } = require("./github_rate_limit_logger.cjs");
+
+const ACTIVE_SESSION_STATES = new Set(["open", "active", "in_progress", "queued"]);
+const LIST_PULL_REQUESTS_PER_PAGE = 100;
+const SESSION_LIST_LIMIT = 1000;
+const SESSION_PAGE_SIZE = 100;
+const UPDATE_DELAY_MS = 1000;
+
+/**
+ * @param {unknown} value
+ * @returns {number | null}
+ */
+function parsePullRequestNumber(value) {
+ if (typeof value === "number" && Number.isInteger(value) && value > 0) return value;
+ if (typeof value !== "string") return null;
+ const trimmed = value.trim();
+ if (!trimmed) return null;
+ const parsed = Number.parseInt(trimmed, 10);
+ return Number.isInteger(parsed) && parsed > 0 ? parsed : null;
+}
+
+/**
+ * @param {unknown} value
+ * @returns {boolean}
+ */
+function isActiveSessionState(value) {
+ return typeof value === "string" && ACTIVE_SESSION_STATES.has(value.trim().toLowerCase());
+}
+
+/**
+ * @returns {Promise>}
+ */
+async function listPullRequestsWithActiveSessions() {
+ core.info("Listing agent sessions to identify PRs with active sessions");
+ const copilotApiURL = await getCopilotAPIURL();
+ core.info(`Resolved Copilot API endpoint for sessions: ${copilotApiURL}`);
+ core.info(`Fetching up to ${SESSION_LIST_LIMIT} sessions (page_size=${SESSION_PAGE_SIZE})`);
+
+ /** @type {Array<{resource_id?: number | string, state?: string, resource_type?: string}>} */
+ const sessions = [];
+ for (let pageNumber = 1; sessions.length < SESSION_LIST_LIMIT; pageNumber++) {
+ const pageSessions = await listAgentSessionsPage(copilotApiURL, pageNumber, SESSION_PAGE_SIZE);
+ core.info(`Fetched ${pageSessions.length} session(s) from page ${pageNumber}`);
+ if (pageSessions.length === 0) break;
+ sessions.push(...pageSessions);
+ if (pageSessions.length < SESSION_PAGE_SIZE) break;
+ }
+ if (sessions.length >= SESSION_LIST_LIMIT) {
+ core.warning(`Session list reached limit (${SESSION_LIST_LIMIT}); newer sessions may have been truncated`);
+ }
+ core.info(`Fetched ${sessions.length} total session record(s) for filtering`);
+
+ const prNumbers = new Set();
+ for (const session of sessions) {
+ if (session?.resource_type !== "pull") continue;
+ if (!isActiveSessionState(session?.state)) continue;
+ const prNumber = parsePullRequestNumber(session?.resource_id);
+ if (prNumber !== null) prNumbers.add(prNumber);
+ }
+
+ core.info(`Found ${prNumbers.size} pull request(s) with active agent sessions`);
+ return prNumbers;
+}
+
+/**
+ * @returns {Promise}
+ */
+async function getCopilotAPIURL() {
+ core.info("Resolving Copilot API endpoint from GraphQL viewer.copilotEndpoints.api");
+ const response = await github.graphql(`
+ query CopilotEndpointsForSessionListing {
+ viewer {
+ copilotEndpoints {
+ api
+ }
+ }
+ }
+ `);
+ const apiURL = response?.viewer?.copilotEndpoints?.api;
+ if (typeof apiURL !== "string" || !apiURL.trim()) {
+ throw new Error("Unable to resolve Copilot API URL for session listing");
+ }
+ const normalizedAPIURL = apiURL.replace(/\/+$/, "");
+ core.info(`Copilot API endpoint resolved: ${normalizedAPIURL}`);
+ return normalizedAPIURL;
+}
+
+/**
+ * @param {string} copilotApiURL
+ * @param {number} pageNumber
+ * @param {number} pageSize
+ * @returns {Promise>}
+ */
+async function listAgentSessionsPage(copilotApiURL, pageNumber, pageSize) {
+ const token = process.env.GH_TOKEN || process.env.GITHUB_TOKEN;
+ if (!token) throw new Error("Missing GH_TOKEN/GITHUB_TOKEN for Copilot session listing");
+
+ const sessionsURL = new URL(`${copilotApiURL}/agents/sessions`);
+ sessionsURL.searchParams.set("page_size", String(pageSize));
+ sessionsURL.searchParams.set("page_number", String(pageNumber));
+ sessionsURL.searchParams.set("sort", "last_updated_at,desc");
+ core.debug(`Requesting Copilot sessions page ${pageNumber}: ${sessionsURL.origin}${sessionsURL.pathname} (page_size=${pageSize})`);
+
+ const response = await fetch(sessionsURL.toString(), {
+ method: "GET",
+ headers: {
+ Accept: "application/json",
+ Authorization: `Bearer ${token}`,
+ "User-Agent": "gh-aw-update-pull-request-branches",
+ },
+ });
+
+ if (!response.ok) {
+ const truncatedBody = await readResponsePreview(response, 500);
+ core.error(`Failed to list agent sessions page ${pageNumber}: HTTP ${response.status} ${response.statusText}`);
+ if (truncatedBody) {
+ core.error(`Copilot sessions error response (truncated): ${truncatedBody}`);
+ }
+ throw new Error(`Failed to list agent sessions: HTTP ${response.status}`);
+ }
+
+ const rawBody = await response.json();
+ /** @type {any} */
+ const body = rawBody;
+ return Array.isArray(body?.sessions) ? body.sessions : [];
+}
+
+/**
+ * @param {Response} response
+ * @param {number} maxChars
+ * @returns {Promise}
+ */
+async function readResponsePreview(response, maxChars) {
+ if (!response.body) return "";
+ let reader;
+ try {
+ reader = response.body.getReader();
+ } catch (error) {
+ core.debug(`Failed to open error response preview stream (non-critical): ${getErrorMessage(error)}`);
+ return "";
+ }
+ const decoder = new TextDecoder();
+ let result = "";
+
+ try {
+ while (result.length < maxChars) {
+ const { done, value } = await reader.read();
+ if (done || !value) break;
+ result += decoder.decode(value, { stream: true });
+ }
+ } catch (error) {
+ core.debug(`Failed to read error response preview for debugging (non-critical): ${getErrorMessage(error)}`);
+ return "";
+ } finally {
+ reader.releaseLock();
+ }
+
+ return result.slice(0, maxChars);
+}
+
+/**
+ * @param {number[]} pullNumbers
+ * @returns {Promise}
+ */
+async function filterPullRequestsWithoutActiveSessions(pullNumbers) {
+ const pullRequestsWithSessions = await listPullRequestsWithActiveSessions();
+ const eligiblePullRequests = pullNumbers.filter(number => !pullRequestsWithSessions.has(number));
+ core.info(`Found ${eligiblePullRequests.length} eligible pull request(s) without active sessions`);
+ return eligiblePullRequests;
+}
+
+/**
+ * @param {string} owner
+ * @param {string} repo
+ * @returns {Promise}
+ */
+async function listOpenPullRequests(owner, repo) {
+ const pulls = await github.paginate(github.rest.pulls.list, {
+ owner,
+ repo,
+ state: "open",
+ per_page: LIST_PULL_REQUESTS_PER_PAGE,
+ });
+
+ return pulls.map(pr => pr.number).filter(number => Number.isInteger(number));
+}
+
+/**
+ * @param {string} owner
+ * @param {string} repo
+ * @param {number[]} pullNumbers
+ * @returns {Promise}
+ */
+async function filterMergeablePullRequests(owner, repo, pullNumbers) {
+ const mergeable = [];
+
+ for (const pullNumber of pullNumbers) {
+ const { data: pull } = await withRetry(
+ () =>
+ github.rest.pulls.get({
+ owner,
+ repo,
+ pull_number: pullNumber,
+ }),
+ {
+ maxRetries: 2,
+ initialDelayMs: 500,
+ maxDelayMs: 2000,
+ jitterMs: 0,
+ shouldRetry: isTransientError,
+ },
+ `fetch pull request #${pullNumber}`
+ );
+
+ const isMergeable = pull?.state === "open" && pull?.mergeable === true && pull?.draft !== true;
+ if (isMergeable) {
+ mergeable.push(pullNumber);
+ continue;
+ }
+
+ core.info(`Skipping PR #${pullNumber}: mergeable=${String(pull?.mergeable)}, state=${pull?.state || "unknown"}, draft=${String(Boolean(pull?.draft))}`);
+ }
+
+ return mergeable;
+}
+
+/**
+ * @param {unknown} error
+ * @returns {boolean}
+ */
+function isNonFatalUpdateBranchError(error) {
+ if (typeof error === "object" && error !== null && "status" in error && error.status === 422) {
+ return true;
+ }
+
+ const message = getErrorMessage(error).toLowerCase();
+ return message.includes("update branch failed") || message.includes("head branch is not behind");
+}
+
+/**
+ * @param {string} owner
+ * @param {string} repo
+ * @param {number} pullNumber
+ * @returns {Promise}
+ */
+async function updatePullRequestBranch(owner, repo, pullNumber) {
+ await withRetry(
+ () =>
+ github.rest.pulls.updateBranch({
+ owner,
+ repo,
+ pull_number: pullNumber,
+ }),
+ {
+ maxRetries: 2,
+ initialDelayMs: 1000,
+ maxDelayMs: 10000,
+ shouldRetry: isTransientError,
+ },
+ `update branch for pull request #${pullNumber}`
+ );
+}
+
+/**
+ * Update all mergeable PR branches that do not have active agent sessions.
+ * @returns {Promise}
+ */
+async function main() {
+ const owner = context.repo.owner;
+ const repo = context.repo.repo;
+
+ core.info(`Updating pull request branches in ${owner}/${repo}`);
+ await fetchAndLogRateLimit(github, "update_pull_request_branches_start");
+
+ const openPullRequests = await listOpenPullRequests(owner, repo);
+ core.info(`Found ${openPullRequests.length} open pull request(s)`);
+ if (openPullRequests.length === 0) return;
+
+ const mergeablePullRequests = await filterMergeablePullRequests(owner, repo, openPullRequests);
+ core.info(`Found ${mergeablePullRequests.length} mergeable pull request(s)`);
+ if (mergeablePullRequests.length === 0) return;
+
+ const eligiblePullRequests = await filterPullRequestsWithoutActiveSessions(mergeablePullRequests);
+ if (eligiblePullRequests.length === 0) return;
+
+ let updatedCount = 0;
+ let skippedCount = 0;
+ let failedCount = 0;
+
+ for (let i = 0; i < eligiblePullRequests.length; i++) {
+ const pullNumber = eligiblePullRequests[i];
+ try {
+ core.info(`Updating branch for PR #${pullNumber}`);
+ await updatePullRequestBranch(owner, repo, pullNumber);
+ updatedCount++;
+ } catch (error) {
+ if (isNonFatalUpdateBranchError(error)) {
+ skippedCount++;
+ core.warning(`Skipping PR #${pullNumber}: ${getErrorMessage(error)}`);
+ } else {
+ failedCount++;
+ core.error(`Failed to update branch for PR #${pullNumber}: ${getErrorMessage(error)}`);
+ }
+ }
+
+ if (i < eligiblePullRequests.length - 1) {
+ await sleep(UPDATE_DELAY_MS);
+ }
+ }
+
+ await fetchAndLogRateLimit(github, "update_pull_request_branches_end");
+ core.notice(`update_pull_request_branches completed: updated=${updatedCount}, skipped=${skippedCount}, failed=${failedCount}`);
+}
+
+module.exports = {
+ main,
+ parsePullRequestNumber,
+ isActiveSessionState,
+ listPullRequestsWithActiveSessions,
+ filterPullRequestsWithoutActiveSessions,
+ filterMergeablePullRequests,
+ isNonFatalUpdateBranchError,
+};
diff --git a/actions/setup/js/update_pull_request_branches.test.cjs b/actions/setup/js/update_pull_request_branches.test.cjs
new file mode 100644
index 0000000000..e2ef8e2a7f
--- /dev/null
+++ b/actions/setup/js/update_pull_request_branches.test.cjs
@@ -0,0 +1,142 @@
+// @ts-check
+import { describe, it, expect, beforeEach, vi } from "vitest";
+
+vi.mock("./github_rate_limit_logger.cjs", () => ({
+ fetchAndLogRateLimit: vi.fn().mockResolvedValue(undefined),
+}));
+
+const moduleUnderTest = await import("./update_pull_request_branches.cjs");
+
+describe("update_pull_request_branches", () => {
+ /** @type {any} */
+ let mockCore;
+ /** @type {any} */
+ let mockGithub;
+ /** @type {any} */
+ let mockContext;
+ /** @type {any} */
+ let fetchMock;
+
+ beforeEach(() => {
+ vi.clearAllMocks();
+
+ mockCore = {
+ info: vi.fn(),
+ warning: vi.fn(),
+ error: vi.fn(),
+ debug: vi.fn(),
+ notice: vi.fn(),
+ };
+ mockGithub = {
+ paginate: vi.fn(),
+ graphql: vi.fn(),
+ rest: {
+ pulls: {
+ list: vi.fn(),
+ get: vi.fn(),
+ updateBranch: vi.fn(),
+ },
+ },
+ };
+ mockContext = {
+ repo: {
+ owner: "owner",
+ repo: "repo",
+ },
+ };
+
+ global.core = mockCore;
+ global.github = mockGithub;
+ global.context = mockContext;
+ fetchMock = vi.fn();
+ global.fetch = fetchMock;
+ process.env.GH_TOKEN = "test-token";
+ });
+
+ it("updates only mergeable pull requests without active sessions", async () => {
+ mockGithub.paginate.mockResolvedValue([{ number: 1 }, { number: 2 }, { number: 3 }]);
+ mockGithub.rest.pulls.get.mockImplementation(async ({ pull_number }) => {
+ if (pull_number === 1) return { data: { state: "open", mergeable: true, draft: false } };
+ if (pull_number === 2) return { data: { state: "open", mergeable: false, draft: false } };
+ return { data: { state: "open", mergeable: true, draft: false } };
+ });
+ mockGithub.graphql.mockResolvedValue({ viewer: { copilotEndpoints: { api: "https://api.copilot.test" } } });
+ fetchMock.mockResolvedValue({
+ ok: true,
+ json: async () => ({
+ sessions: [
+ { resource_id: 3, state: "open", resource_type: "pull" },
+ { resource_id: 10, state: "closed", resource_type: "pull" },
+ ],
+ }),
+ });
+ mockGithub.rest.pulls.updateBranch.mockResolvedValue({ data: {} });
+
+ await moduleUnderTest.main();
+
+ expect(mockGithub.rest.pulls.updateBranch).toHaveBeenCalledTimes(1);
+ expect(mockGithub.rest.pulls.updateBranch).toHaveBeenCalledWith({
+ owner: "owner",
+ repo: "repo",
+ pull_number: 1,
+ });
+ });
+
+ it("continues on non-fatal updateBranch failures", async () => {
+ mockGithub.paginate.mockResolvedValue([{ number: 7 }]);
+ mockGithub.rest.pulls.get.mockResolvedValue({ data: { state: "open", mergeable: true, draft: false } });
+ mockGithub.graphql.mockResolvedValue({ viewer: { copilotEndpoints: { api: "https://api.copilot.test" } } });
+ fetchMock.mockResolvedValue({
+ ok: true,
+ json: async () => ({ sessions: [] }),
+ });
+ const err = new Error("Update branch failed");
+ // @ts-ignore
+ err.status = 422;
+ mockGithub.rest.pulls.updateBranch.mockRejectedValue(err);
+
+ await expect(moduleUnderTest.main()).resolves.not.toThrow();
+ expect(mockCore.warning).toHaveBeenCalledWith(expect.stringContaining("Skipping PR #7"));
+ });
+
+ it("parses pull request numbers and active states correctly", () => {
+ expect(moduleUnderTest.parsePullRequestNumber(12)).toBe(12);
+ expect(moduleUnderTest.parsePullRequestNumber("34")).toBe(34);
+ expect(moduleUnderTest.parsePullRequestNumber("0")).toBeNull();
+ expect(moduleUnderTest.parsePullRequestNumber("not-a-number")).toBeNull();
+
+ expect(moduleUnderTest.isActiveSessionState("OPEN")).toBe(true);
+ expect(moduleUnderTest.isActiveSessionState("in_progress")).toBe(true);
+ expect(moduleUnderTest.isActiveSessionState("closed")).toBe(false);
+ });
+
+ it("filters candidate pull requests to only those without active sessions", async () => {
+ mockGithub.graphql.mockResolvedValue({ viewer: { copilotEndpoints: { api: "https://api.copilot.test" } } });
+ fetchMock.mockResolvedValue({
+ ok: true,
+ json: async () => ({
+ sessions: [
+ { resource_id: 2, state: "OPEN", resource_type: "pull" },
+ { resource_id: 9, state: "queued", resource_type: "pull" },
+ ],
+ }),
+ });
+
+ const result = await moduleUnderTest.filterPullRequestsWithoutActiveSessions([1, 2, 3]);
+
+ expect(result).toEqual([1, 3]);
+ });
+
+ it("ignores draft pull requests when filtering mergeable pull requests", async () => {
+ mockGithub.rest.pulls.get.mockImplementation(async ({ pull_number }) => {
+ if (pull_number === 1) return { data: { state: "open", mergeable: true, draft: true } };
+ if (pull_number === 2) return { data: { state: "open", mergeable: true, draft: false } };
+ return { data: { state: "open", mergeable: false, draft: false } };
+ });
+
+ const result = await moduleUnderTest.filterMergeablePullRequests("owner", "repo", [1, 2, 3]);
+
+ expect(result).toEqual([2]);
+ expect(mockCore.info).toHaveBeenCalledWith(expect.stringContaining("Skipping PR #1"));
+ });
+});
diff --git a/pkg/workflow/maintenance_workflow_test.go b/pkg/workflow/maintenance_workflow_test.go
index 852f3fd78d..b540c06e03 100644
--- a/pkg/workflow/maintenance_workflow_test.go
+++ b/pkg/workflow/maintenance_workflow_test.go
@@ -282,9 +282,10 @@ func TestGenerateMaintenanceWorkflow_OperationJobConditions(t *testing.T) {
yaml := string(content)
operationSkipCondition := `github.event_name != 'workflow_dispatch' && github.event_name != 'workflow_call' || inputs.operation == ''`
- operationRunCondition := `(github.event_name == 'workflow_dispatch' || github.event_name == 'workflow_call') && inputs.operation != '' && inputs.operation != 'safe_outputs' && inputs.operation != 'create_labels' && inputs.operation != 'activity_report' && inputs.operation != 'close_agentic_workflows_issues' && inputs.operation != 'clean_cache_memories' && inputs.operation != 'validate'`
+ operationRunCondition := `(github.event_name == 'workflow_dispatch' || github.event_name == 'workflow_call') && inputs.operation != '' && inputs.operation != 'safe_outputs' && inputs.operation != 'create_labels' && inputs.operation != 'activity_report' && inputs.operation != 'close_agentic_workflows_issues' && inputs.operation != 'clean_cache_memories' && inputs.operation != 'update_pull_request_branches' && inputs.operation != 'validate'`
applySafeOutputsCondition := `(github.event_name == 'workflow_dispatch' || github.event_name == 'workflow_call') && inputs.operation == 'safe_outputs'`
createLabelsCondition := `(github.event_name == 'workflow_dispatch' || github.event_name == 'workflow_call') && inputs.operation == 'create_labels'`
+ updatePullRequestBranchesCondition := `(github.event_name == 'workflow_dispatch' || github.event_name == 'workflow_call') && inputs.operation == 'update_pull_request_branches'`
activityReportCondition := `(github.event_name == 'workflow_dispatch' || github.event_name == 'workflow_call') && inputs.operation == 'activity_report'`
closeAgenticWorkflowIssuesCondition := `(github.event_name == 'workflow_dispatch' || github.event_name == 'workflow_call') && inputs.operation == 'close_agentic_workflows_issues'`
cleanCacheMemoriesCondition := `github.event_name != 'workflow_dispatch' && github.event_name != 'workflow_call' || inputs.operation == '' || inputs.operation == 'clean_cache_memories'`
@@ -356,6 +357,20 @@ func TestGenerateMaintenanceWorkflow_OperationJobConditions(t *testing.T) {
}
}
+ // update_pull_request_branches job should be triggered when operation == 'update_pull_request_branches'
+ updatePullRequestBranchesIdx := strings.Index(yaml, "\n update_pull_request_branches:")
+ if updatePullRequestBranchesIdx == -1 {
+ t.Errorf("Job update_pull_request_branches not found in generated workflow")
+ } else {
+ updatePullRequestBranchesSection := yaml[updatePullRequestBranchesIdx : updatePullRequestBranchesIdx+runOpSectionSearchRange]
+ if !strings.Contains(updatePullRequestBranchesSection, updatePullRequestBranchesCondition) {
+ t.Errorf("Job update_pull_request_branches should have the activation condition %q in:\n%s", updatePullRequestBranchesCondition, updatePullRequestBranchesSection)
+ }
+ if !strings.Contains(updatePullRequestBranchesSection, "pull-requests: write") {
+ t.Errorf("Job update_pull_request_branches should include pull-requests: write permission in:\n%s", updatePullRequestBranchesSection)
+ }
+ }
+
// validate_workflows job should be triggered when operation == 'validate'
validateCondition := `(github.event_name == 'workflow_dispatch' || github.event_name == 'workflow_call') && inputs.operation == 'validate'`
validateIdx := strings.Index(yaml, "\n validate_workflows:")
@@ -454,6 +469,11 @@ func TestGenerateMaintenanceWorkflow_OperationJobConditions(t *testing.T) {
t.Error("workflow_dispatch operation choices should include 'clean_cache_memories'")
}
+ // Verify update_pull_request_branches is an option in the operation choices
+ if !strings.Contains(yaml, "- 'update_pull_request_branches'") {
+ t.Error("workflow_dispatch operation choices should include 'update_pull_request_branches'")
+ }
+
// Verify validate is an option in the operation choices
if !strings.Contains(yaml, "- 'validate'") {
t.Error("workflow_dispatch operation choices should include 'validate'")
diff --git a/pkg/workflow/maintenance_workflow_yaml.go b/pkg/workflow/maintenance_workflow_yaml.go
index 3dd0362056..05319c68e4 100644
--- a/pkg/workflow/maintenance_workflow_yaml.go
+++ b/pkg/workflow/maintenance_workflow_yaml.go
@@ -62,6 +62,7 @@ on:
- 'activity_report'
- 'close_agentic_workflows_issues'
- 'clean_cache_memories'
+ - 'update_pull_request_branches'
- 'validate'
run_url:
description: 'Run URL or run ID to replay safe outputs from (e.g. https://github.com/owner/repo/actions/runs/12345 or 12345). Required when operation is safe_outputs.'
@@ -71,7 +72,7 @@ on:
workflow_call:
inputs:
operation:
- description: 'Optional maintenance operation to run (disable, enable, update, upgrade, safe_outputs, create_labels, activity_report, close_agentic_workflows_issues, clean_cache_memories, validate)'
+ description: 'Optional maintenance operation to run (disable, enable, update, upgrade, safe_outputs, create_labels, activity_report, close_agentic_workflows_issues, clean_cache_memories, update_pull_request_branches, validate)'
required: false
type: string
default: ''
@@ -196,8 +197,8 @@ jobs:
`)
// Add unified run_operation job for all dispatch operations except those with dedicated jobs
- // (safe_outputs, create_labels, activity_report, close_agentic_workflows_issues, clean_cache_memories, validate)
- runOperationCondition := buildRunOperationCondition("safe_outputs", "create_labels", "activity_report", "close_agentic_workflows_issues", "clean_cache_memories", "validate")
+ // (safe_outputs, create_labels, activity_report, close_agentic_workflows_issues, clean_cache_memories, update_pull_request_branches, validate)
+ runOperationCondition := buildRunOperationCondition("safe_outputs", "create_labels", "activity_report", "close_agentic_workflows_issues", "clean_cache_memories", "update_pull_request_branches", "validate")
yaml.WriteString(`
run_operation:
if: ${{ ` + RenderCondition(runOperationCondition) + ` }}
@@ -251,6 +252,54 @@ jobs:
run: echo "operation=${{ inputs.operation }}" >> "$GITHUB_OUTPUT"
`)
+ // Add update_pull_request_branches job for workflow_dispatch with operation == 'update_pull_request_branches'
+ yaml.WriteString(`
+ update_pull_request_branches:
+ if: ${{ ` + RenderCondition(buildDispatchOperationCondition("update_pull_request_branches")) + ` }}
+ runs-on: ` + runsOnValue + `
+ permissions:
+ pull-requests: write
+ steps:
+`)
+
+ // Add checkout step only in dev/script mode (for local action paths)
+ if actionMode == ActionModeDev || actionMode == ActionModeScript {
+ yaml.WriteString(" - name: Checkout actions folder\n")
+ yaml.WriteString(" uses: " + getActionPin("actions/checkout") + "\n")
+ yaml.WriteString(" with:\n")
+ yaml.WriteString(" sparse-checkout: |\n")
+ yaml.WriteString(" actions\n")
+ yaml.WriteString(" persist-credentials: false\n\n")
+ }
+
+ yaml.WriteString(` - name: Setup Scripts
+ uses: ` + setupActionRef + `
+ with:
+ destination: ${{ runner.temp }}/gh-aw/actions
+
+ - name: Check admin/maintainer permissions
+ uses: ` + getCachedActionPinFromResolver("actions/github-script", resolver) + `
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io, getOctokit);
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/check_team_member.cjs');
+ await main();
+
+ - name: Update pull request branches
+ uses: ` + getCachedActionPinFromResolver("actions/github-script", resolver) + `
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io, getOctokit);
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/update_pull_request_branches.cjs');
+ await main();
+`)
+
// Add apply_safe_outputs job for workflow_dispatch with operation == 'safe_outputs'
yaml.WriteString(`
apply_safe_outputs: