From 7f8551308c7b6542b4427232d0457ba93b1559b3 Mon Sep 17 00:00:00 2001
From: docs-bot <77750099+docs-bot@users.noreply.github.com>
Date: Mon, 1 Dec 2025 06:39:06 -0800
Subject: [PATCH 01/17] GraphQL schema update (#58611)
Co-authored-by: heiskr <1221423+heiskr@users.noreply.github.com>
---
src/graphql/data/fpt/changelog.json | 22 +++
src/graphql/data/fpt/schema.docs.graphql | 151 +++++++++++++++--
src/graphql/data/fpt/schema.json | 187 +++++++++++++++++++++-
src/graphql/data/ghec/schema.docs.graphql | 151 +++++++++++++++--
src/graphql/data/ghec/schema.json | 187 +++++++++++++++++++++-
5 files changed, 666 insertions(+), 32 deletions(-)
diff --git a/src/graphql/data/fpt/changelog.json b/src/graphql/data/fpt/changelog.json
index 0ae892d0365e..5735597dbfef 100644
--- a/src/graphql/data/fpt/changelog.json
+++ b/src/graphql/data/fpt/changelog.json
@@ -1,4 +1,26 @@
[
+ {
+ "schemaChanges": [
+ {
+ "title": "The GraphQL schema includes these changes:",
+ "changes": [
+ "
Type SuggestedReviewerActor was added
",
+ "Type SuggestedReviewerActorConnection was added
",
+ "Type SuggestedReviewerActorEdge was added
",
+ "Enum value 'ISSUE_FIELD_ADDED_EVENTwas added to enumIssueTimelineItemsItemType'
",
+ "Enum value 'ISSUE_FIELD_CHANGED_EVENTwas added to enumIssueTimelineItemsItemType'
",
+ "Enum value 'ISSUE_FIELD_REMOVED_EVENTwas added to enumIssueTimelineItemsItemType'
",
+ "Field suggestedReviewerActors was added to object type PullRequest
",
+ "Enum value 'ISSUE_FIELD_ADDED_EVENTwas added to enumPullRequestTimelineItemsItemType'
",
+ "Enum value 'ISSUE_FIELD_CHANGED_EVENTwas added to enumPullRequestTimelineItemsItemType'
",
+ "Enum value 'ISSUE_FIELD_REMOVED_EVENTwas added to enumPullRequestTimelineItemsItemType'
"
+ ]
+ }
+ ],
+ "previewChanges": [],
+ "upcomingChanges": [],
+ "date": "2025-11-30"
+ },
{
"schemaChanges": [
{
diff --git a/src/graphql/data/fpt/schema.docs.graphql b/src/graphql/data/fpt/schema.docs.graphql
index 9a9a6d015156..a052abfcbb8a 100644
--- a/src/graphql/data/fpt/schema.docs.graphql
+++ b/src/graphql/data/fpt/schema.docs.graphql
@@ -7335,7 +7335,9 @@ type ConvertedToDiscussionEvent implements Node {
}
"""
-Request Copilot code review for new pull requests automatically if the author has access to Copilot code review.
+Request Copilot code review for new pull requests automatically if the author
+has access to Copilot code review and their premium requests quota has not
+reached the limit.
"""
type CopilotCodeReviewParameters {
"""
@@ -7350,7 +7352,9 @@ type CopilotCodeReviewParameters {
}
"""
-Request Copilot code review for new pull requests automatically if the author has access to Copilot code review.
+Request Copilot code review for new pull requests automatically if the author
+has access to Copilot code review and their premium requests quota has not
+reached the limit.
"""
input CopilotCodeReviewParametersInput {
"""
@@ -20648,6 +20652,21 @@ enum IssueTimelineItemsItemType {
"""
ISSUE_COMMENT
+ """
+ Represents a 'issue_field_added' event on a given issue.
+ """
+ ISSUE_FIELD_ADDED_EVENT
+
+ """
+ Represents a 'issue_field_changed' event on a given issue.
+ """
+ ISSUE_FIELD_CHANGED_EVENT
+
+ """
+ Represents a 'issue_field_removed' event on a given issue.
+ """
+ ISSUE_FIELD_REMOVED_EVENT
+
"""
Represents a 'issue_type_added' event on a given issue.
"""
@@ -41291,6 +41310,31 @@ type PullRequest implements Assignable & Closable & Comment & Labelable & Lockab
query: String
): AssigneeConnection!
+ """
+ Reviewer actor suggestions based on commit history, past review comments, and integrations.
+ """
+ suggestedReviewerActors(
+ """
+ Returns the elements in the list that come after the specified cursor.
+ """
+ after: String
+
+ """
+ Returns the elements in the list that come before the specified cursor.
+ """
+ before: String
+
+ """
+ Returns the first _n_ elements from the list.
+ """
+ first: Int
+
+ """
+ Returns the last _n_ elements from the list.
+ """
+ last: Int
+ ): SuggestedReviewerActorConnection!
+
"""
A list of reviewer suggestions based on commit history and past review comments.
"""
@@ -41905,7 +41949,9 @@ type PullRequestParameters {
allowedMergeMethods: [PullRequestAllowedMergeMethods!]
"""
- Request Copilot code review for new pull requests automatically if the author has access to Copilot code review.
+ Request Copilot code review for new pull requests automatically if the author
+ has access to Copilot code review and their premium requests quota has not
+ reached the limit.
"""
automaticCopilotCodeReviewEnabled: Boolean!
@@ -41953,7 +41999,9 @@ input PullRequestParametersInput {
allowedMergeMethods: [PullRequestAllowedMergeMethods!]
"""
- Request Copilot code review for new pull requests automatically if the author has access to Copilot code review.
+ Request Copilot code review for new pull requests automatically if the author
+ has access to Copilot code review and their premium requests quota has not
+ reached the limit.
"""
automaticCopilotCodeReviewEnabled: Boolean
@@ -43519,6 +43567,21 @@ enum PullRequestTimelineItemsItemType {
"""
ISSUE_COMMENT
+ """
+ Represents a 'issue_field_added' event on a given issue.
+ """
+ ISSUE_FIELD_ADDED_EVENT
+
+ """
+ Represents a 'issue_field_changed' event on a given issue.
+ """
+ ISSUE_FIELD_CHANGED_EVENT
+
+ """
+ Represents a 'issue_field_removed' event on a given issue.
+ """
+ ISSUE_FIELD_REMOVED_EVENT
+
"""
Represents a 'issue_type_added' event on a given issue.
"""
@@ -45719,9 +45782,13 @@ input RemoveAssigneesFromAssignableInput {
assignableId: ID! @possibleTypes(concreteTypes: ["Issue", "PullRequest"], abstractType: "Assignable")
"""
- The id of users to remove as assignees.
+ The ids of actors to remove as assignees.
"""
- assigneeIds: [ID!]! @possibleTypes(concreteTypes: ["User"])
+ assigneeIds: [ID!]!
+ @possibleTypes(
+ concreteTypes: ["Bot", "EnterpriseUserAccount", "Mannequin", "Organization", "User"]
+ abstractType: "Actor"
+ )
"""
A unique identifier for the client performing the mutation.
@@ -53261,7 +53328,9 @@ enum RepositoryRuleType {
COMMIT_MESSAGE_PATTERN
"""
- Request Copilot code review for new pull requests automatically if the author has access to Copilot code review.
+ Request Copilot code review for new pull requests automatically if the author
+ has access to Copilot code review and their premium requests quota has not
+ reached the limit.
"""
COPILOT_CODE_REVIEW
@@ -60390,6 +60459,66 @@ type SuggestedReviewer {
reviewer: User!
}
+"""
+A suggestion to review a pull request based on an actor's commit history, review comments, and integrations.
+"""
+type SuggestedReviewerActor {
+ """
+ Is this suggestion based on past commits?
+ """
+ isAuthor: Boolean!
+
+ """
+ Is this suggestion based on past review comments?
+ """
+ isCommenter: Boolean!
+
+ """
+ Identifies the actor suggested to review the pull request.
+ """
+ reviewer: Actor!
+}
+
+"""
+A suggestion to review a pull request based on an actor's commit history, review comments, and integrations.
+"""
+type SuggestedReviewerActorConnection {
+ """
+ A list of edges.
+ """
+ edges: [SuggestedReviewerActorEdge]
+
+ """
+ A list of nodes.
+ """
+ nodes: [SuggestedReviewerActor]
+
+ """
+ Information to aid in pagination.
+ """
+ pageInfo: PageInfo!
+
+ """
+ Identifies the total count of items in the connection.
+ """
+ totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type SuggestedReviewerActorEdge {
+ """
+ A cursor for use in pagination.
+ """
+ cursor: String!
+
+ """
+ The item at the end of the edge.
+ """
+ node: SuggestedReviewerActor
+}
+
"""
Represents a Git tag.
"""
@@ -65626,9 +65755,13 @@ Autogenerated input type of UpdateIssue
"""
input UpdateIssueInput {
"""
- An array of Node IDs of users for this issue.
+ An array of Node IDs of users or bots for this issue.
"""
- assigneeIds: [ID!] @possibleTypes(concreteTypes: ["User"])
+ assigneeIds: [ID!]
+ @possibleTypes(
+ concreteTypes: ["Bot", "EnterpriseUserAccount", "Mannequin", "Organization", "User"]
+ abstractType: "Actor"
+ )
"""
The body for the issue description.
diff --git a/src/graphql/data/fpt/schema.json b/src/graphql/data/fpt/schema.json
index 534ae6d97d30..4766206c9fcc 100644
--- a/src/graphql/data/fpt/schema.json
+++ b/src/graphql/data/fpt/schema.json
@@ -17559,7 +17559,7 @@
"kind": "objects",
"id": "copilotcodereviewparameters",
"href": "/graphql/reference/objects#copilotcodereviewparameters",
- "description": "Request Copilot code review for new pull requests automatically if the author has access to Copilot code review.
",
+ "description": "Request Copilot code review for new pull requests automatically if the author\nhas access to Copilot code review and their premium requests quota has not\nreached the limit.
",
"fields": [
{
"name": "reviewDraftPullRequests",
@@ -55187,6 +55187,56 @@
}
]
},
+ {
+ "name": "suggestedReviewerActors",
+ "description": "Reviewer actor suggestions based on commit history, past review comments, and integrations.
",
+ "type": "SuggestedReviewerActorConnection!",
+ "id": "suggestedrevieweractorconnection",
+ "kind": "objects",
+ "href": "/graphql/reference/objects#suggestedrevieweractorconnection",
+ "arguments": [
+ {
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.
",
+ "type": {
+ "name": "String",
+ "id": "string",
+ "kind": "scalars",
+ "href": "/graphql/reference/scalars#string"
+ }
+ },
+ {
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.
",
+ "type": {
+ "name": "String",
+ "id": "string",
+ "kind": "scalars",
+ "href": "/graphql/reference/scalars#string"
+ }
+ },
+ {
+ "name": "first",
+ "description": "Returns the first n elements from the list.
",
+ "type": {
+ "name": "Int",
+ "id": "int",
+ "kind": "scalars",
+ "href": "/graphql/reference/scalars#int"
+ }
+ },
+ {
+ "name": "last",
+ "description": "Returns the last n elements from the list.
",
+ "type": {
+ "name": "Int",
+ "id": "int",
+ "kind": "scalars",
+ "href": "/graphql/reference/scalars#int"
+ }
+ }
+ ]
+ },
{
"name": "suggestedReviewers",
"description": "A list of reviewer suggestions based on commit history and past review comments.
",
@@ -56134,7 +56184,7 @@
},
{
"name": "automaticCopilotCodeReviewEnabled",
- "description": "Request Copilot code review for new pull requests automatically if the author has access to Copilot code review.
",
+ "description": "Request Copilot code review for new pull requests automatically if the author\nhas access to Copilot code review and their premium requests quota has not\nreached the limit.
",
"type": "Boolean!",
"id": "boolean",
"kind": "scalars",
@@ -75054,6 +75104,105 @@
}
]
},
+ {
+ "name": "SuggestedReviewerActor",
+ "kind": "objects",
+ "id": "suggestedrevieweractor",
+ "href": "/graphql/reference/objects#suggestedrevieweractor",
+ "description": "A suggestion to review a pull request based on an actor's commit history, review comments, and integrations.
",
+ "fields": [
+ {
+ "name": "isAuthor",
+ "description": "Is this suggestion based on past commits?.
",
+ "type": "Boolean!",
+ "id": "boolean",
+ "kind": "scalars",
+ "href": "/graphql/reference/scalars#boolean"
+ },
+ {
+ "name": "isCommenter",
+ "description": "Is this suggestion based on past review comments?.
",
+ "type": "Boolean!",
+ "id": "boolean",
+ "kind": "scalars",
+ "href": "/graphql/reference/scalars#boolean"
+ },
+ {
+ "name": "reviewer",
+ "description": "Identifies the actor suggested to review the pull request.
",
+ "type": "Actor!",
+ "id": "actor",
+ "kind": "interfaces",
+ "href": "/graphql/reference/interfaces#actor"
+ }
+ ]
+ },
+ {
+ "name": "SuggestedReviewerActorConnection",
+ "kind": "objects",
+ "id": "suggestedrevieweractorconnection",
+ "href": "/graphql/reference/objects#suggestedrevieweractorconnection",
+ "description": "A suggestion to review a pull request based on an actor's commit history, review comments, and integrations.
",
+ "fields": [
+ {
+ "name": "edges",
+ "description": "A list of edges.
",
+ "type": "[SuggestedReviewerActorEdge]",
+ "id": "suggestedrevieweractoredge",
+ "kind": "objects",
+ "href": "/graphql/reference/objects#suggestedrevieweractoredge"
+ },
+ {
+ "name": "nodes",
+ "description": "A list of nodes.
",
+ "type": "[SuggestedReviewerActor]",
+ "id": "suggestedrevieweractor",
+ "kind": "objects",
+ "href": "/graphql/reference/objects#suggestedrevieweractor"
+ },
+ {
+ "name": "pageInfo",
+ "description": "Information to aid in pagination.
",
+ "type": "PageInfo!",
+ "id": "pageinfo",
+ "kind": "objects",
+ "href": "/graphql/reference/objects#pageinfo"
+ },
+ {
+ "name": "totalCount",
+ "description": "Identifies the total count of items in the connection.
",
+ "type": "Int!",
+ "id": "int",
+ "kind": "scalars",
+ "href": "/graphql/reference/scalars#int"
+ }
+ ]
+ },
+ {
+ "name": "SuggestedReviewerActorEdge",
+ "kind": "objects",
+ "id": "suggestedrevieweractoredge",
+ "href": "/graphql/reference/objects#suggestedrevieweractoredge",
+ "description": "An edge in a connection.
",
+ "fields": [
+ {
+ "name": "cursor",
+ "description": "A cursor for use in pagination.
",
+ "type": "String!",
+ "id": "string",
+ "kind": "scalars",
+ "href": "/graphql/reference/scalars#string"
+ },
+ {
+ "name": "node",
+ "description": "The item at the end of the edge.
",
+ "type": "SuggestedReviewerActor",
+ "id": "suggestedrevieweractor",
+ "kind": "objects",
+ "href": "/graphql/reference/objects#suggestedrevieweractor"
+ }
+ ]
+ },
{
"name": "Tag",
"kind": "objects",
@@ -90453,6 +90602,18 @@
"name": "ISSUE_COMMENT",
"description": "Represents a comment on an Issue.
"
},
+ {
+ "name": "ISSUE_FIELD_ADDED_EVENT",
+ "description": "Represents aissue_field_addedevent on a given issue.
"
+ },
+ {
+ "name": "ISSUE_FIELD_CHANGED_EVENT",
+ "description": "Represents aissue_field_changedevent on a given issue.
"
+ },
+ {
+ "name": "ISSUE_FIELD_REMOVED_EVENT",
+ "description": "Represents aissue_field_removedevent on a given issue.
"
+ },
{
"name": "ISSUE_TYPE_ADDED_EVENT",
"description": "Represents aissue_type_addedevent on a given issue.
"
@@ -92636,6 +92797,18 @@
"name": "ISSUE_COMMENT",
"description": "Represents a comment on an Issue.
"
},
+ {
+ "name": "ISSUE_FIELD_ADDED_EVENT",
+ "description": "Represents aissue_field_addedevent on a given issue.
"
+ },
+ {
+ "name": "ISSUE_FIELD_CHANGED_EVENT",
+ "description": "Represents aissue_field_changedevent on a given issue.
"
+ },
+ {
+ "name": "ISSUE_FIELD_REMOVED_EVENT",
+ "description": "Represents aissue_field_removedevent on a given issue.
"
+ },
{
"name": "ISSUE_TYPE_ADDED_EVENT",
"description": "Represents aissue_type_addedevent on a given issue.
"
@@ -93413,7 +93586,7 @@
},
{
"name": "COPILOT_CODE_REVIEW",
- "description": "Request Copilot code review for new pull requests automatically if the author has access to Copilot code review.
"
+ "description": "Request Copilot code review for new pull requests automatically if the author\nhas access to Copilot code review and their premium requests quota has not\nreached the limit.
"
},
{
"name": "CREATION",
@@ -100474,7 +100647,7 @@
"kind": "inputObjects",
"id": "copilotcodereviewparametersinput",
"href": "/graphql/reference/input-objects#copilotcodereviewparametersinput",
- "description": "Request Copilot code review for new pull requests automatically if the author has access to Copilot code review.
",
+ "description": "Request Copilot code review for new pull requests automatically if the author\nhas access to Copilot code review and their premium requests quota has not\nreached the limit.
",
"inputFields": [
{
"name": "reviewDraftPullRequests",
@@ -106190,7 +106363,7 @@
},
{
"name": "automaticCopilotCodeReviewEnabled",
- "description": "Request Copilot code review for new pull requests automatically if the author has access to Copilot code review.
",
+ "description": "Request Copilot code review for new pull requests automatically if the author\nhas access to Copilot code review and their premium requests quota has not\nreached the limit.
",
"type": "Boolean",
"id": "boolean",
"kind": "scalars",
@@ -106499,7 +106672,7 @@
},
{
"name": "assigneeIds",
- "description": "The id of users to remove as assignees.
",
+ "description": "The ids of actors to remove as assignees.
",
"type": "[ID!]!",
"id": "id",
"kind": "scalars",
@@ -110782,7 +110955,7 @@
"inputFields": [
{
"name": "assigneeIds",
- "description": "An array of Node IDs of users for this issue.
",
+ "description": "An array of Node IDs of users or bots for this issue.
",
"type": "[ID!]",
"id": "id",
"kind": "scalars",
diff --git a/src/graphql/data/ghec/schema.docs.graphql b/src/graphql/data/ghec/schema.docs.graphql
index 9a9a6d015156..a052abfcbb8a 100644
--- a/src/graphql/data/ghec/schema.docs.graphql
+++ b/src/graphql/data/ghec/schema.docs.graphql
@@ -7335,7 +7335,9 @@ type ConvertedToDiscussionEvent implements Node {
}
"""
-Request Copilot code review for new pull requests automatically if the author has access to Copilot code review.
+Request Copilot code review for new pull requests automatically if the author
+has access to Copilot code review and their premium requests quota has not
+reached the limit.
"""
type CopilotCodeReviewParameters {
"""
@@ -7350,7 +7352,9 @@ type CopilotCodeReviewParameters {
}
"""
-Request Copilot code review for new pull requests automatically if the author has access to Copilot code review.
+Request Copilot code review for new pull requests automatically if the author
+has access to Copilot code review and their premium requests quota has not
+reached the limit.
"""
input CopilotCodeReviewParametersInput {
"""
@@ -20648,6 +20652,21 @@ enum IssueTimelineItemsItemType {
"""
ISSUE_COMMENT
+ """
+ Represents a 'issue_field_added' event on a given issue.
+ """
+ ISSUE_FIELD_ADDED_EVENT
+
+ """
+ Represents a 'issue_field_changed' event on a given issue.
+ """
+ ISSUE_FIELD_CHANGED_EVENT
+
+ """
+ Represents a 'issue_field_removed' event on a given issue.
+ """
+ ISSUE_FIELD_REMOVED_EVENT
+
"""
Represents a 'issue_type_added' event on a given issue.
"""
@@ -41291,6 +41310,31 @@ type PullRequest implements Assignable & Closable & Comment & Labelable & Lockab
query: String
): AssigneeConnection!
+ """
+ Reviewer actor suggestions based on commit history, past review comments, and integrations.
+ """
+ suggestedReviewerActors(
+ """
+ Returns the elements in the list that come after the specified cursor.
+ """
+ after: String
+
+ """
+ Returns the elements in the list that come before the specified cursor.
+ """
+ before: String
+
+ """
+ Returns the first _n_ elements from the list.
+ """
+ first: Int
+
+ """
+ Returns the last _n_ elements from the list.
+ """
+ last: Int
+ ): SuggestedReviewerActorConnection!
+
"""
A list of reviewer suggestions based on commit history and past review comments.
"""
@@ -41905,7 +41949,9 @@ type PullRequestParameters {
allowedMergeMethods: [PullRequestAllowedMergeMethods!]
"""
- Request Copilot code review for new pull requests automatically if the author has access to Copilot code review.
+ Request Copilot code review for new pull requests automatically if the author
+ has access to Copilot code review and their premium requests quota has not
+ reached the limit.
"""
automaticCopilotCodeReviewEnabled: Boolean!
@@ -41953,7 +41999,9 @@ input PullRequestParametersInput {
allowedMergeMethods: [PullRequestAllowedMergeMethods!]
"""
- Request Copilot code review for new pull requests automatically if the author has access to Copilot code review.
+ Request Copilot code review for new pull requests automatically if the author
+ has access to Copilot code review and their premium requests quota has not
+ reached the limit.
"""
automaticCopilotCodeReviewEnabled: Boolean
@@ -43519,6 +43567,21 @@ enum PullRequestTimelineItemsItemType {
"""
ISSUE_COMMENT
+ """
+ Represents a 'issue_field_added' event on a given issue.
+ """
+ ISSUE_FIELD_ADDED_EVENT
+
+ """
+ Represents a 'issue_field_changed' event on a given issue.
+ """
+ ISSUE_FIELD_CHANGED_EVENT
+
+ """
+ Represents a 'issue_field_removed' event on a given issue.
+ """
+ ISSUE_FIELD_REMOVED_EVENT
+
"""
Represents a 'issue_type_added' event on a given issue.
"""
@@ -45719,9 +45782,13 @@ input RemoveAssigneesFromAssignableInput {
assignableId: ID! @possibleTypes(concreteTypes: ["Issue", "PullRequest"], abstractType: "Assignable")
"""
- The id of users to remove as assignees.
+ The ids of actors to remove as assignees.
"""
- assigneeIds: [ID!]! @possibleTypes(concreteTypes: ["User"])
+ assigneeIds: [ID!]!
+ @possibleTypes(
+ concreteTypes: ["Bot", "EnterpriseUserAccount", "Mannequin", "Organization", "User"]
+ abstractType: "Actor"
+ )
"""
A unique identifier for the client performing the mutation.
@@ -53261,7 +53328,9 @@ enum RepositoryRuleType {
COMMIT_MESSAGE_PATTERN
"""
- Request Copilot code review for new pull requests automatically if the author has access to Copilot code review.
+ Request Copilot code review for new pull requests automatically if the author
+ has access to Copilot code review and their premium requests quota has not
+ reached the limit.
"""
COPILOT_CODE_REVIEW
@@ -60390,6 +60459,66 @@ type SuggestedReviewer {
reviewer: User!
}
+"""
+A suggestion to review a pull request based on an actor's commit history, review comments, and integrations.
+"""
+type SuggestedReviewerActor {
+ """
+ Is this suggestion based on past commits?
+ """
+ isAuthor: Boolean!
+
+ """
+ Is this suggestion based on past review comments?
+ """
+ isCommenter: Boolean!
+
+ """
+ Identifies the actor suggested to review the pull request.
+ """
+ reviewer: Actor!
+}
+
+"""
+A suggestion to review a pull request based on an actor's commit history, review comments, and integrations.
+"""
+type SuggestedReviewerActorConnection {
+ """
+ A list of edges.
+ """
+ edges: [SuggestedReviewerActorEdge]
+
+ """
+ A list of nodes.
+ """
+ nodes: [SuggestedReviewerActor]
+
+ """
+ Information to aid in pagination.
+ """
+ pageInfo: PageInfo!
+
+ """
+ Identifies the total count of items in the connection.
+ """
+ totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type SuggestedReviewerActorEdge {
+ """
+ A cursor for use in pagination.
+ """
+ cursor: String!
+
+ """
+ The item at the end of the edge.
+ """
+ node: SuggestedReviewerActor
+}
+
"""
Represents a Git tag.
"""
@@ -65626,9 +65755,13 @@ Autogenerated input type of UpdateIssue
"""
input UpdateIssueInput {
"""
- An array of Node IDs of users for this issue.
+ An array of Node IDs of users or bots for this issue.
"""
- assigneeIds: [ID!] @possibleTypes(concreteTypes: ["User"])
+ assigneeIds: [ID!]
+ @possibleTypes(
+ concreteTypes: ["Bot", "EnterpriseUserAccount", "Mannequin", "Organization", "User"]
+ abstractType: "Actor"
+ )
"""
The body for the issue description.
diff --git a/src/graphql/data/ghec/schema.json b/src/graphql/data/ghec/schema.json
index 534ae6d97d30..4766206c9fcc 100644
--- a/src/graphql/data/ghec/schema.json
+++ b/src/graphql/data/ghec/schema.json
@@ -17559,7 +17559,7 @@
"kind": "objects",
"id": "copilotcodereviewparameters",
"href": "/graphql/reference/objects#copilotcodereviewparameters",
- "description": "Request Copilot code review for new pull requests automatically if the author has access to Copilot code review.
",
+ "description": "Request Copilot code review for new pull requests automatically if the author\nhas access to Copilot code review and their premium requests quota has not\nreached the limit.
",
"fields": [
{
"name": "reviewDraftPullRequests",
@@ -55187,6 +55187,56 @@
}
]
},
+ {
+ "name": "suggestedReviewerActors",
+ "description": "Reviewer actor suggestions based on commit history, past review comments, and integrations.
",
+ "type": "SuggestedReviewerActorConnection!",
+ "id": "suggestedrevieweractorconnection",
+ "kind": "objects",
+ "href": "/graphql/reference/objects#suggestedrevieweractorconnection",
+ "arguments": [
+ {
+ "name": "after",
+ "description": "Returns the elements in the list that come after the specified cursor.
",
+ "type": {
+ "name": "String",
+ "id": "string",
+ "kind": "scalars",
+ "href": "/graphql/reference/scalars#string"
+ }
+ },
+ {
+ "name": "before",
+ "description": "Returns the elements in the list that come before the specified cursor.
",
+ "type": {
+ "name": "String",
+ "id": "string",
+ "kind": "scalars",
+ "href": "/graphql/reference/scalars#string"
+ }
+ },
+ {
+ "name": "first",
+ "description": "Returns the first n elements from the list.
",
+ "type": {
+ "name": "Int",
+ "id": "int",
+ "kind": "scalars",
+ "href": "/graphql/reference/scalars#int"
+ }
+ },
+ {
+ "name": "last",
+ "description": "Returns the last n elements from the list.
",
+ "type": {
+ "name": "Int",
+ "id": "int",
+ "kind": "scalars",
+ "href": "/graphql/reference/scalars#int"
+ }
+ }
+ ]
+ },
{
"name": "suggestedReviewers",
"description": "A list of reviewer suggestions based on commit history and past review comments.
",
@@ -56134,7 +56184,7 @@
},
{
"name": "automaticCopilotCodeReviewEnabled",
- "description": "Request Copilot code review for new pull requests automatically if the author has access to Copilot code review.
",
+ "description": "Request Copilot code review for new pull requests automatically if the author\nhas access to Copilot code review and their premium requests quota has not\nreached the limit.
",
"type": "Boolean!",
"id": "boolean",
"kind": "scalars",
@@ -75054,6 +75104,105 @@
}
]
},
+ {
+ "name": "SuggestedReviewerActor",
+ "kind": "objects",
+ "id": "suggestedrevieweractor",
+ "href": "/graphql/reference/objects#suggestedrevieweractor",
+ "description": "A suggestion to review a pull request based on an actor's commit history, review comments, and integrations.
",
+ "fields": [
+ {
+ "name": "isAuthor",
+ "description": "Is this suggestion based on past commits?.
",
+ "type": "Boolean!",
+ "id": "boolean",
+ "kind": "scalars",
+ "href": "/graphql/reference/scalars#boolean"
+ },
+ {
+ "name": "isCommenter",
+ "description": "Is this suggestion based on past review comments?.
",
+ "type": "Boolean!",
+ "id": "boolean",
+ "kind": "scalars",
+ "href": "/graphql/reference/scalars#boolean"
+ },
+ {
+ "name": "reviewer",
+ "description": "Identifies the actor suggested to review the pull request.
",
+ "type": "Actor!",
+ "id": "actor",
+ "kind": "interfaces",
+ "href": "/graphql/reference/interfaces#actor"
+ }
+ ]
+ },
+ {
+ "name": "SuggestedReviewerActorConnection",
+ "kind": "objects",
+ "id": "suggestedrevieweractorconnection",
+ "href": "/graphql/reference/objects#suggestedrevieweractorconnection",
+ "description": "A suggestion to review a pull request based on an actor's commit history, review comments, and integrations.
",
+ "fields": [
+ {
+ "name": "edges",
+ "description": "A list of edges.
",
+ "type": "[SuggestedReviewerActorEdge]",
+ "id": "suggestedrevieweractoredge",
+ "kind": "objects",
+ "href": "/graphql/reference/objects#suggestedrevieweractoredge"
+ },
+ {
+ "name": "nodes",
+ "description": "A list of nodes.
",
+ "type": "[SuggestedReviewerActor]",
+ "id": "suggestedrevieweractor",
+ "kind": "objects",
+ "href": "/graphql/reference/objects#suggestedrevieweractor"
+ },
+ {
+ "name": "pageInfo",
+ "description": "Information to aid in pagination.
",
+ "type": "PageInfo!",
+ "id": "pageinfo",
+ "kind": "objects",
+ "href": "/graphql/reference/objects#pageinfo"
+ },
+ {
+ "name": "totalCount",
+ "description": "Identifies the total count of items in the connection.
",
+ "type": "Int!",
+ "id": "int",
+ "kind": "scalars",
+ "href": "/graphql/reference/scalars#int"
+ }
+ ]
+ },
+ {
+ "name": "SuggestedReviewerActorEdge",
+ "kind": "objects",
+ "id": "suggestedrevieweractoredge",
+ "href": "/graphql/reference/objects#suggestedrevieweractoredge",
+ "description": "An edge in a connection.
",
+ "fields": [
+ {
+ "name": "cursor",
+ "description": "A cursor for use in pagination.
",
+ "type": "String!",
+ "id": "string",
+ "kind": "scalars",
+ "href": "/graphql/reference/scalars#string"
+ },
+ {
+ "name": "node",
+ "description": "The item at the end of the edge.
",
+ "type": "SuggestedReviewerActor",
+ "id": "suggestedrevieweractor",
+ "kind": "objects",
+ "href": "/graphql/reference/objects#suggestedrevieweractor"
+ }
+ ]
+ },
{
"name": "Tag",
"kind": "objects",
@@ -90453,6 +90602,18 @@
"name": "ISSUE_COMMENT",
"description": "Represents a comment on an Issue.
"
},
+ {
+ "name": "ISSUE_FIELD_ADDED_EVENT",
+ "description": "Represents aissue_field_addedevent on a given issue.
"
+ },
+ {
+ "name": "ISSUE_FIELD_CHANGED_EVENT",
+ "description": "Represents aissue_field_changedevent on a given issue.
"
+ },
+ {
+ "name": "ISSUE_FIELD_REMOVED_EVENT",
+ "description": "Represents aissue_field_removedevent on a given issue.
"
+ },
{
"name": "ISSUE_TYPE_ADDED_EVENT",
"description": "Represents aissue_type_addedevent on a given issue.
"
@@ -92636,6 +92797,18 @@
"name": "ISSUE_COMMENT",
"description": "Represents a comment on an Issue.
"
},
+ {
+ "name": "ISSUE_FIELD_ADDED_EVENT",
+ "description": "Represents aissue_field_addedevent on a given issue.
"
+ },
+ {
+ "name": "ISSUE_FIELD_CHANGED_EVENT",
+ "description": "Represents aissue_field_changedevent on a given issue.
"
+ },
+ {
+ "name": "ISSUE_FIELD_REMOVED_EVENT",
+ "description": "Represents aissue_field_removedevent on a given issue.
"
+ },
{
"name": "ISSUE_TYPE_ADDED_EVENT",
"description": "Represents aissue_type_addedevent on a given issue.
"
@@ -93413,7 +93586,7 @@
},
{
"name": "COPILOT_CODE_REVIEW",
- "description": "Request Copilot code review for new pull requests automatically if the author has access to Copilot code review.
"
+ "description": "Request Copilot code review for new pull requests automatically if the author\nhas access to Copilot code review and their premium requests quota has not\nreached the limit.
"
},
{
"name": "CREATION",
@@ -100474,7 +100647,7 @@
"kind": "inputObjects",
"id": "copilotcodereviewparametersinput",
"href": "/graphql/reference/input-objects#copilotcodereviewparametersinput",
- "description": "Request Copilot code review for new pull requests automatically if the author has access to Copilot code review.
",
+ "description": "Request Copilot code review for new pull requests automatically if the author\nhas access to Copilot code review and their premium requests quota has not\nreached the limit.
",
"inputFields": [
{
"name": "reviewDraftPullRequests",
@@ -106190,7 +106363,7 @@
},
{
"name": "automaticCopilotCodeReviewEnabled",
- "description": "Request Copilot code review for new pull requests automatically if the author has access to Copilot code review.
",
+ "description": "Request Copilot code review for new pull requests automatically if the author\nhas access to Copilot code review and their premium requests quota has not\nreached the limit.
",
"type": "Boolean",
"id": "boolean",
"kind": "scalars",
@@ -106499,7 +106672,7 @@
},
{
"name": "assigneeIds",
- "description": "The id of users to remove as assignees.
",
+ "description": "The ids of actors to remove as assignees.
",
"type": "[ID!]!",
"id": "id",
"kind": "scalars",
@@ -110782,7 +110955,7 @@
"inputFields": [
{
"name": "assigneeIds",
- "description": "An array of Node IDs of users for this issue.
",
+ "description": "An array of Node IDs of users or bots for this issue.
",
"type": "[ID!]",
"id": "id",
"kind": "scalars",
From 735394822aed810a20da218d9c35661317b213e0 Mon Sep 17 00:00:00 2001
From: Kensuke Nagae
Date: Mon, 1 Dec 2025 23:42:43 +0900
Subject: [PATCH 02/17] Add new section to explain Docker Hub's rate limit for
GitHub Actions (#58630)
Co-authored-by: Joe Clark <31087804+jc-clark@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
---
content/actions/reference/limits.md | 6 ++++++
1 file changed, 6 insertions(+)
diff --git a/content/actions/reference/limits.md b/content/actions/reference/limits.md
index a06a730dcd57..cc6bcb2572c6 100644
--- a/content/actions/reference/limits.md
+++ b/content/actions/reference/limits.md
@@ -83,3 +83,9 @@ When using larger runners with vnet injection, you need to determine the appropr
* **OAuth apps \-** {% data reusables.rest-api.primary-rate-limit-oauth-apps %}
* **GITHUB TOKEN** \- {% data reusables.rest-api.primary-rate-limit-github-token-in-actions %}
* **Secondary rate limits** \- In addition to primary rate limits, {% data variables.product.github %} enforces secondary rate limits in order to prevent abuse and keep the API available for all users, these are not configurable with GHEC. For more information, see [AUTOTITLE](/rest/using-the-rest-api/rate-limits-for-the-rest-api?apiVersion=2022-11-28#about-secondary-rate-limits).
+
+### Docker Hub's rate limit for {% data variables.product.prodname_actions %}
+
+* **{% data variables.product.github %}-hosted runners pulling public images:** Docker Hub's rate limit is not applied.
+* **{% data variables.product.github %}-hosted runners pulling private images:** Pulling private images from Docker Hub is subject to the rate limit.
+* **Self-hosted runners pulling public or private images:** Pulling images from Docker Hub is always subject to the rate limit.
\ No newline at end of file
From 9708c4c1b1f1e67c967391d7a7a489aca05359eb Mon Sep 17 00:00:00 2001
From: mc <42146119+mchammer01@users.noreply.github.com>
Date: Mon, 1 Dec 2025 14:43:49 +0000
Subject: [PATCH 03/17] Secret scanning sends detections in secret gists to
partners (#56885)
---
.../introduction/about-secret-scanning-for-partners.md | 4 ++++
.../secret-scanning/introduction/about-secret-scanning.md | 6 ++++++
.../creating-gists.md | 2 ++
data/reusables/secret-scanning/what-is-scanned.md | 6 +-----
4 files changed, 13 insertions(+), 5 deletions(-)
diff --git a/content/code-security/secret-scanning/introduction/about-secret-scanning-for-partners.md b/content/code-security/secret-scanning/introduction/about-secret-scanning-for-partners.md
index 132774ff2f1a..d69f24a76854 100644
--- a/content/code-security/secret-scanning/introduction/about-secret-scanning-for-partners.md
+++ b/content/code-security/secret-scanning/introduction/about-secret-scanning-for-partners.md
@@ -18,6 +18,10 @@ shortTitle: Secret scanning for partners
> [!NOTE]You cannot change the configuration of {% data variables.product.prodname_secret_scanning %} for partner patterns on public repositories.
+{% data variables.secret-scanning.partner_alerts_caps %} scans:
+
+{% data reusables.secret-scanning.what-is-scanned %}
+
The reason partner alerts are directly sent to the secret providers whenever a leak is detected for one of their secrets is that this enables the provider to take immediate action to protect you and protect their resources. The notification process for regular alerts is different. Regular alerts are displayed on the repository's **Security** tab on {% data variables.product.prodname_dotcom %} for you to resolve.
{% data reusables.secret-scanning.secret-scanning-pattern-pair-matches %}
diff --git a/content/code-security/secret-scanning/introduction/about-secret-scanning.md b/content/code-security/secret-scanning/introduction/about-secret-scanning.md
index 5cf865533d2d..2bbc52e356d1 100644
--- a/content/code-security/secret-scanning/introduction/about-secret-scanning.md
+++ b/content/code-security/secret-scanning/introduction/about-secret-scanning.md
@@ -29,8 +29,14 @@ shortTitle: Secret scanning
{% data variables.product.prodname_secret_scanning_caps %} scans your entire Git history on all branches present in your {% data variables.product.prodname_dotcom %} repository for secrets, even if the repository is archived. {% data variables.product.prodname_dotcom %} will also periodically run a full Git history scan for new secret types in existing content in {% ifversion fpt or ghec %}public repositories{% else %}repositories with {% data variables.product.prodname_GH_secret_protection %} enabled{% endif %} where {% data variables.product.prodname_secret_scanning %} is enabled when new supported secret types are added.
+Additionally, {% data variables.product.prodname_secret_scanning %} scans:
+
{% data reusables.secret-scanning.what-is-scanned %}
+{% ifversion fpt or ghec %}
+This additional scanning is free for public repositories.
+{% endif %}
+
{% ifversion ghas-products %}{% ifversion secret-risk-assessment %}
> [!TIP]
> Regardless of the enablement status of {% data variables.product.prodname_AS %} features, organizations on {% data variables.product.prodname_team %} and {% data variables.product.prodname_enterprise %} can run a free report to scan the code in the organization for leaked secrets.
diff --git a/content/get-started/writing-on-github/editing-and-sharing-content-with-gists/creating-gists.md b/content/get-started/writing-on-github/editing-and-sharing-content-with-gists/creating-gists.md
index f0058cb90e65..e0db2628e907 100644
--- a/content/get-started/writing-on-github/editing-and-sharing-content-with-gists/creating-gists.md
+++ b/content/get-started/writing-on-github/editing-and-sharing-content-with-gists/creating-gists.md
@@ -22,6 +22,8 @@ Gists can be public or secret. Public gists show up in {% data variables.gists.d
Secret gists don't show up in {% data variables.gists.discover_url %} and are not searchable unless you are logged in and are the author of the secret gist. Secret gists aren't private. If you send the URL of a secret gist to a friend, they'll be able to see it. However, if someone you don't know discovers the URL, they'll also be able to see your gist. If you need to keep your code away from prying eyes, you may want to [create a private repository](/repositories/creating-and-managing-repositories/creating-a-new-repository) instead.
+For {% data variables.product.prodname_dotcom_the_website %} and {% data variables.product.prodname_ghe_cloud %}, {% data variables.product.github %} automatically scans _secret gists_ for partner secrets and informs the relevant partner whenever one of their secrets is leaked.{% ifversion fpt or ghec %} See [AUTOTITLE](/code-security/secret-scanning/introduction/about-secret-scanning-for-partners).{% endif %}
+
{% data reusables.gist.cannot-convert-public-gists-to-secret %} However, a secret gist can be made public by editing the gist and updating the visibility to public.
{% ifversion ghes %}
diff --git a/data/reusables/secret-scanning/what-is-scanned.md b/data/reusables/secret-scanning/what-is-scanned.md
index f19c5b340cdb..bbc061ef39ff 100644
--- a/data/reusables/secret-scanning/what-is-scanned.md
+++ b/data/reusables/secret-scanning/what-is-scanned.md
@@ -1,12 +1,8 @@
-Additionally, {% data variables.product.prodname_secret_scanning %} scans:
* Descriptions and comments in issues
* Titles, descriptions, and comments, in open and closed _historical_ issues{% ifversion fpt or ghec %}. A notification is sent to the relevant partner when a historical partner pattern is detected.{% endif %}
* Titles, descriptions, and comments in pull requests
* Titles, descriptions, and comments in {% data variables.product.prodname_discussions %}{% ifversion secret-scanning-enhancements-wikis %}
* Wikis{% endif %}
-
-{% ifversion fpt or ghec %}
-This additional scanning is free for public repositories.
-{% endif %}
+* Secret gists. A notification is sent to the relevant partner when a partner pattern is detected in a secret gist.
{% data reusables.secret-scanning.beta-prs-discussions-wikis-scanned %}
From aaf38f519150fc3a16e4912b52ea54532e8aea62 Mon Sep 17 00:00:00 2001
From: Yeikel Santana
Date: Mon, 1 Dec 2025 09:46:42 -0500
Subject: [PATCH 04/17] Document that Dependabot supports updates by commit and
commit+tag/release (#41378)
---
.../actions/dependabot-version-updates-actions-caveats.md | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/data/reusables/actions/dependabot-version-updates-actions-caveats.md b/data/reusables/actions/dependabot-version-updates-actions-caveats.md
index 9c3902afbdef..4587964a6080 100644
--- a/data/reusables/actions/dependabot-version-updates-actions-caveats.md
+++ b/data/reusables/actions/dependabot-version-updates-actions-caveats.md
@@ -1,3 +1,5 @@
-* {% data variables.product.prodname_dependabot %} only supports updates to {% data variables.product.prodname_actions %} using the {% data variables.product.prodname_dotcom %} repository syntax, such as `{% data reusables.actions.action-checkout %}`. {% data variables.product.prodname_dependabot %} will ignore actions or reusable workflows referenced locally (for example, `./.github/actions/foo.yml`).
+* {% data variables.product.prodname_dependabot %} only supports updates to {% data variables.product.prodname_actions %} using the {% data variables.product.prodname_dotcom %} repository syntax, such as `{% data reusables.actions.action-checkout %}` or `actions/checkout@` . {% data variables.product.prodname_dependabot %} will ignore actions or reusable workflows referenced locally (for example, `./.github/actions/foo.yml`).
+* {% data variables.product.prodname_dependabot %} updates the version documentation of {% data variables.product.prodname_actions %} when the comment is on the same line, such as `actions/checkout@ #` or `actions/checkout@ #`.
+* If the commit you use is not associated with any tag, {% data variables.product.prodname_dependabot %} will update the {% data variables.product.prodname_actions %} to the latest commit (which might differ from the latest release).
* Docker Hub and {% data variables.product.prodname_registry %} {% data variables.product.prodname_container_registry %} URLs are currently not supported. For example, references to Docker container actions using `docker://` syntax aren't supported.
* {% data variables.product.prodname_dependabot %} supports both public and private repositories for {% data variables.product.prodname_actions %}. For private registry configuration options, see "`git`" in [AUTOTITLE](/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#git).
From 3f77737a1a8acaeff9fe0c6bcbe67f0a1dbcf9c6 Mon Sep 17 00:00:00 2001
From: Copilot <198982749+Copilot@users.noreply.github.com>
Date: Mon, 1 Dec 2025 14:52:25 +0000
Subject: [PATCH 05/17] Remove public preview markers for Copilot Coding Agent
(now GA) (#58494)
Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com>
Co-authored-by: timrogers <116134+timrogers@users.noreply.github.com>
Co-authored-by: Tim Rogers
Co-authored-by: Sarita Iyer <66540150+saritai@users.noreply.github.com>
Co-authored-by: Tim Rogers
---
content/copilot/concepts/tools/ai-tools.md | 4 ++--
.../enable-developers/integrate-ai-agents.md | 2 +-
data/reusables/copilot/differences-cfi-cfb-table.md | 2 +-
3 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/content/copilot/concepts/tools/ai-tools.md b/content/copilot/concepts/tools/ai-tools.md
index bcd5f544dce1..5748e82c6bdd 100644
--- a/content/copilot/concepts/tools/ai-tools.md
+++ b/content/copilot/concepts/tools/ai-tools.md
@@ -37,7 +37,7 @@ The use of AI tools is increasingly becoming a standard part of a software devel
* **Deployment**:
* **{% data variables.copilot.copilot_chat_short %}** can help you configure continuous integration and continuous deployment (CI/CD) pipelines.
* **Operation**:
- * **{% data variables.copilot.copilot_coding_agent %}** ({% data variables.release-phases.public_preview %}) can raise pull requests for open issues.
+ * **{% data variables.copilot.copilot_coding_agent %}** can raise pull requests for open issues.
* **{% data variables.copilot.copilot_chat_short %}** can help with tasks you're working on yourself.
## Planning
@@ -125,7 +125,7 @@ The deployment phase involves preparing your code for production and ensuring a
During the operation phase, the focus is on maintaining and monitoring your application in production to ensure it runs smoothly and meets user expectations. This phase often involves tasks like debugging production issues, optimizing performance, and ensuring system reliability.
-You can use the **{% data variables.copilot.copilot_coding_agent %}** ({% data variables.release-phases.public_preview %}) as an autonomous agent that can help maintain and improve your application in production. Assign a {% data variables.product.github %} issue to {% data variables.product.prodname_copilot_short %}, and it will autonomously explore the repository, identify potential fixes, and create a pull request with the proposed changes. Then it will automatically request a review from you.
+You can use the **{% data variables.copilot.copilot_coding_agent %}** as an autonomous agent that can help maintain and improve your application in production. Assign a {% data variables.product.github %} issue to {% data variables.product.prodname_copilot_short %}, and it will autonomously explore the repository, identify potential fixes, and create a pull request with the proposed changes. Then it will automatically request a review from you.
For issues you're tackling yourself, use **{% data variables.copilot.copilot_chat_short %}** for help analyzing logs, debugging issues, and suggesting optimizations. For example:
diff --git a/content/copilot/tutorials/roll-out-at-scale/enable-developers/integrate-ai-agents.md b/content/copilot/tutorials/roll-out-at-scale/enable-developers/integrate-ai-agents.md
index b2fa2691be2a..a7963243e40f 100644
--- a/content/copilot/tutorials/roll-out-at-scale/enable-developers/integrate-ai-agents.md
+++ b/content/copilot/tutorials/roll-out-at-scale/enable-developers/integrate-ai-agents.md
@@ -158,7 +158,7 @@ To integrate agentic AI features effectively into your workstreams, you'll need
| Custom instructions | [AUTOTITLE](/copilot/customizing-copilot/adding-repository-custom-instructions-for-github-copilot?tool=vscode) |
| {% data variables.copilot.copilot_code-review_short %} | [AUTOTITLE](/copilot/how-tos/agents/copilot-code-review/automatic-code-review) |
| {% data variables.copilot.copilot_custom_agents %} | [AUTOTITLE](/copilot/concepts/agents/coding-agent/about-custom-agents) |
-| {% data variables.copilot.copilot_coding_agent %} ({% data variables.release-phases.public_preview %}) | [AUTOTITLE](/copilot/rolling-out-github-copilot-at-scale/enabling-developers/using-copilot-coding-agent-in-org) |
+| {% data variables.copilot.copilot_coding_agent %} | [AUTOTITLE](/copilot/rolling-out-github-copilot-at-scale/enabling-developers/using-copilot-coding-agent-in-org) |
| {% data variables.copilot.copilot_autofix_short %} | [AUTOTITLE](/code-security/code-scanning/enabling-code-scanning/configuring-default-setup-for-code-scanning) |
{% endrowheaders %}
diff --git a/data/reusables/copilot/differences-cfi-cfb-table.md b/data/reusables/copilot/differences-cfi-cfb-table.md
index a025a0a3f708..dba95f7a3c94 100644
--- a/data/reusables/copilot/differences-cfi-cfb-table.md
+++ b/data/reusables/copilot/differences-cfi-cfb-table.md
@@ -14,7 +14,7 @@
| Agents | {% data variables.copilot.copilot_free_short %} | {% data variables.copilot.copilot_pro_short %} | {% data variables.copilot.copilot_pro_plus_short %} | {% data variables.copilot.copilot_business_short %} | {% data variables.copilot.copilot_enterprise_short %} |
| --- | --- | --- | --- | --- | --- |
-| {% data variables.copilot.copilot_coding_agent %} ({% data variables.release-phases.public_preview %}) | {% octicon "x" aria-label="Not included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |
+| {% data variables.copilot.copilot_coding_agent %} | {% octicon "x" aria-label="Not included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |
| Agent mode | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |
| {% data variables.product.prodname_copilot_short %} code review | Only "Review selection" in {% data variables.product.prodname_vscode_shortname %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |
| Model Context Protocol (MCP) | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |
From c4ed83adb3cbf4174c0b3813b735520fad875171 Mon Sep 17 00:00:00 2001
From: Felicity Chapman
Date: Mon, 1 Dec 2025 15:25:41 +0000
Subject: [PATCH 06/17] Minor updates for upcoming billing date change for
self-serve, metered GHEC users (#58585)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
---
content/billing/concepts/billing-cycles.md | 12 ++++++++----
content/billing/get-started/how-billing-works.md | 6 ++++--
2 files changed, 12 insertions(+), 6 deletions(-)
diff --git a/content/billing/concepts/billing-cycles.md b/content/billing/concepts/billing-cycles.md
index ecf4d1354309..ae3ffc19bed4 100644
--- a/content/billing/concepts/billing-cycles.md
+++ b/content/billing/concepts/billing-cycles.md
@@ -26,13 +26,17 @@ Your billing experience depends on whether your products are metered, volume-bas
## Billing cycles for metered products
-Metered products have a fixed **billing period** that starts at 00:00:00 UTC on the first day of each month and ends at 23:59:59 UTC on the last day of the month.
+Metered products, and all payments made using an Azure subscription ID, have a fixed **billing period** that starts at 00:00:00 UTC on the first day of each month and ends at 23:59:59 UTC on the last day of the month.
-At the end of each month, your metered usage is calculated and scheduled to be billed on your **bill cycle day**.
+At the end of each month, your metered usage is calculated and scheduled to be billed on your **billing date**. Accounts using an Azure subscription ID can access their specific billing date in the Azure commerce portal. For users with other payment methods:
-{% ifversion fpt %}For personal accounts and organizations, your bill cycle day is typically the day you started a paid plan (not necessarily when the account was created).{% elsif ghec %}Your bill cycle day is typically determined by when you converted from a trial to a paid enterprise account.{% endif %} For example, if you {% ifversion fpt %}started a paid plan{% elsif ghec %}converted from a trial{% endif %} on the 15th of a month, you will be billed on the 15th of each subsequent month.
+* **Personal and organization accounts:** This is typically the day you started a paid plan (not necessarily when the account was created).
+* **Enterprise accounts:** This is typically determined by when you converted from a trial to a paid enterprise account.
-> [!NOTE] If you are paying via an Azure subscription ID, your **billing period** will run from the first day of each month to the last day of the month. To access your specific **bill cycle day**, please visit the Azure commerce portal.
+For example, if you started a paid plan or converted from a trial on the 15th of a month, you will be billed on the 15th of each subsequent month.
+
+> [!NOTE]
+> From **December 1, 2025**, all self-serve, metered {% data variables.product.prodname_ghe_cloud %} accounts that pay by credit card will migrate to a **billing date** of 1st of the month. See [Billing date standardized to the first of the month for self-serve credit card metered Enterprise customers](https://github.blog/changelog/2025-11-17-billing-date-standardized-to-the-first-of-the-month-for-self-serve-credit-card-metered-enterprise-customers-now-generally-available/) in the changelog.
## Billing cycles for volume-based products
diff --git a/content/billing/get-started/how-billing-works.md b/content/billing/get-started/how-billing-works.md
index 371f53b9c1af..c7a372131737 100644
--- a/content/billing/get-started/how-billing-works.md
+++ b/content/billing/get-started/how-billing-works.md
@@ -61,9 +61,11 @@ If required, {% data variables.product.prodname_ghe_cloud %} accounts can reques
Each account has a **billing date** and a **billing cycle**.
-For credit card and PayPal payments, the billing date is the day you started a paid plan (not necessarily when the account was created). For example, if you started a paid plan on the 15th of a month, you will be billed on the 15th of each subsequent month. For payments using an Azure subscription ID, the billing date is available in the Azure commerce portal.
+For **personal and organization accounts** set up for credit card and PayPal payments, the **billing date** is typically the day you started a paid plan (not necessarily when the account was created). For example, if you started a paid plan on the 15th of a month, you will be billed on the 15th of each subsequent month. For payments using an Azure subscription ID, the billing date is available in the Azure commerce portal.
-Most users pay for {% data variables.product.github %} using metered billing. The billing cycle for all metered products is a fixed period from the first day to the last day of the month.
+For **enterprise accounts**, your **billing date** will vary. See [AUTOTITLE](/billing/concepts/billing-cycles).
+
+Most users pay for {% data variables.product.github %} using metered billing. The **billing cycle** for all metered products is a fixed period from the first day to the last day of the month.
### Authorization holds
From 30c3938574e5d199f60b19317289cb975993e026 Mon Sep 17 00:00:00 2001
From: Sarah Schneider
Date: Mon, 1 Dec 2025 10:25:52 -0500
Subject: [PATCH 07/17] Show message that links to info on suppressing a linter
rule (#58570)
---
src/content-linter/scripts/lint-content.ts | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/src/content-linter/scripts/lint-content.ts b/src/content-linter/scripts/lint-content.ts
index f84ec31df84d..a5ab8ef7f7b8 100755
--- a/src/content-linter/scripts/lint-content.ts
+++ b/src/content-linter/scripts/lint-content.ts
@@ -284,6 +284,11 @@ async function main() {
// Ensure previous console logging is not truncated
console.log('\n')
const took = end - start
+ if (warningFileCount > 0 || errorFileCount > 0) {
+ spinner.info(
+ `š” You can disable linter rules for specific lines or blocks of text. See https://gh.io/suppress-linter-rule.\n\n`,
+ )
+ }
spinner.info(
`š¦ Markdownlint finished in ${(took > 1000 ? took / 1000 : took).toFixed(1)} ${
took > 1000 ? 's' : 'ms'
From 4936053007ed6db1e58b2194e7a3454cd39ed37c Mon Sep 17 00:00:00 2001
From: Evan Bonsignori
Date: Mon, 1 Dec 2025 07:26:08 -0800
Subject: [PATCH 08/17] clean up Next.js config (#58556)
---
next.config.ts | 26 ++++++--------------------
1 file changed, 6 insertions(+), 20 deletions(-)
diff --git a/next.config.ts b/next.config.ts
index b3106dd10c1a..3bc08e9c78d6 100644
--- a/next.config.ts
+++ b/next.config.ts
@@ -37,7 +37,7 @@ const config: NextConfig = {
},
// Don't use automatic Next.js logging in dev unless the log level is `debug` or higher
// See `src/observability/logger/README.md` for log levels
- logging: getLogLevelNumber() < 3 ? false : {},
+ logging: getLogLevelNumber() < 3 ? undefined : {},
async rewrites() {
const DEFAULT_VERSION = 'free-pro-team@latest'
return productIds.map((productId) => {
@@ -48,6 +48,11 @@ const config: NextConfig = {
})
},
+ webpack: (webpackConfig) => {
+ webpackConfig.resolve.fallback = { fs: false, async_hooks: false }
+ return webpackConfig
+ },
+
// Turbopack is the default bundler in Next.js 16
// Keep webpack config for now to support both bundlers
@@ -63,13 +68,6 @@ const config: NextConfig = {
},
},
- webpack: (webpackConfig) => {
- webpackConfig.experiments = webpackConfig.experiments || {}
- webpackConfig.experiments.topLevelAwait = true
- webpackConfig.resolve.fallback = { fs: false, async_hooks: false }
- return webpackConfig
- },
-
// https://nextjs.org/docs/api-reference/next.config.js/compression
compress: false,
@@ -79,18 +77,6 @@ const config: NextConfig = {
// the CDN marks the cached content as "fresh".
generateEtags: false,
- experimental: {
- // The output of our getServerSideProps() return large chunks of
- // data because it contains our rendered Markdown.
- // The default, for a "Large Page Data" warning is 128KB
- // but many of our pages are much larger.
- // The warning is: https://nextjs.org/docs/messages/large-page-data
- largePageDataBytes: 1024 * 1024, // 1 MB
-
- // This makes it so that going Back will scroll to the previous position
- scrollRestoration: true,
- },
-
compiler: {
styledComponents: true,
},
From 96860a72bc0920c29a3aa2fef52a321329bc213f Mon Sep 17 00:00:00 2001
From: Kevin Heis
Date: Mon, 1 Dec 2025 07:26:47 -0800
Subject: [PATCH 09/17] Resolve TODO comments (batch 1) (#58553)
---
.../liquid-ifversion-versions.ts | 19 +-
src/content-linter/tests/category-pages.ts | 3 +-
.../scripts/what-docs-early-access-branch.ts | 1 -
src/frame/tests/page.ts | 13 --
src/frame/tests/site-tree.ts | 14 +-
src/landings/components/SidebarProduct.tsx | 2 -
.../middleware/learning-track.ts | 2 -
src/learning-track/tests/lint-data.ts | 2 -
src/observability/tests/logger.ts | 4 -
src/rest/scripts/update-files.ts | 5 +-
src/rest/scripts/utils/operation.ts | 4 -
.../helpers/execute-search-actions.ts | 3 +-
src/search/components/input/SearchOverlay.tsx | 7 +-
src/search/lib/sanitize-search-query.ts | 58 +++++
src/search/tests/sanitize-search-query.ts | 218 ++++++++++++++++++
15 files changed, 296 insertions(+), 59 deletions(-)
create mode 100644 src/search/lib/sanitize-search-query.ts
create mode 100644 src/search/tests/sanitize-search-query.ts
diff --git a/src/content-linter/lib/linting-rules/liquid-ifversion-versions.ts b/src/content-linter/lib/linting-rules/liquid-ifversion-versions.ts
index 7af206e3594e..6631133278ff 100644
--- a/src/content-linter/lib/linting-rules/liquid-ifversion-versions.ts
+++ b/src/content-linter/lib/linting-rules/liquid-ifversion-versions.ts
@@ -15,8 +15,9 @@ import {
isAllVersions,
getFeatureVersionsObject,
isInAllGhes,
+ isGhesReleaseDeprecated,
} from '@/ghes-releases/scripts/version-utils'
-import { deprecated, oldestSupported } from '@/versions/lib/enterprise-server-releases'
+import { oldestSupported } from '@/versions/lib/enterprise-server-releases'
import type { RuleParams, RuleErrorCallback } from '@/content-linter/types'
export const liquidIfversionVersions = {
@@ -337,19 +338,9 @@ function updateConditionals(condTagItems: any[]) {
}
// Checks for features that are only available in no
// supported GHES releases
- // TODO use isGhesReleaseDeprecated
- if (item.versionsObjAll.ghes.startsWith('<=')) {
- const releaseNumber = item.versionsObjAll.ghes.replace('<=', '').trim()
- if (deprecated.includes(releaseNumber)) {
- item.action.type = 'delete'
- continue
- }
- } else if (item.versionsObjAll.ghes.startsWith('<')) {
- const releaseNumber = item.versionsObjAll.ghes.replace('<', '').trim()
- if (deprecated.includes(releaseNumber) || releaseNumber === oldestSupported) {
- item.action.type = 'delete'
- continue
- }
+ if (isGhesReleaseDeprecated(oldestSupported, item.versionsObjAll.ghes)) {
+ item.action.type = 'delete'
+ continue
}
}
if (item.versionsObj?.feature || item.fileVersionsFm?.feature) break
diff --git a/src/content-linter/tests/category-pages.ts b/src/content-linter/tests/category-pages.ts
index 42e4a79d1f69..a6add3b57499 100644
--- a/src/content-linter/tests/category-pages.ts
+++ b/src/content-linter/tests/category-pages.ts
@@ -53,7 +53,7 @@ describe.skip('category pages', () => {
// Get links included in product index page.
// Each link corresponds to a product subdirectory (category).
// Example: "getting-started-with-github"
- const contents = fs.readFileSync(productIndex, 'utf8') // TODO move to async
+ const contents = fs.readFileSync(productIndex, 'utf8')
const data = getFrontmatterData(contents)
const productDir = path.dirname(productIndex)
@@ -62,7 +62,6 @@ describe.skip('category pages', () => {
const categoryLinks = children
// Only include category directories, not standalone category files like content/actions/quickstart.md
.filter((link) => fs.existsSync(getPath(productDir, link, 'index')))
- // TODO this should move to async, but you can't asynchronously define tests with vitest...
// Map those to the Markdown file paths that represent that category page index
const categoryPaths = categoryLinks.map((link) => getPath(productDir, link, 'index'))
diff --git a/src/early-access/scripts/what-docs-early-access-branch.ts b/src/early-access/scripts/what-docs-early-access-branch.ts
index c6612b450b71..3bc261f79a01 100644
--- a/src/early-access/scripts/what-docs-early-access-branch.ts
+++ b/src/early-access/scripts/what-docs-early-access-branch.ts
@@ -2,7 +2,6 @@ import { getOctokit } from '@actions/github'
import { setOutput } from '@actions/core'
async function main(): Promise {
- // TODO Is there a lib function for this?
const { BRANCH_NAME, GITHUB_TOKEN } = process.env
if (!BRANCH_NAME) throw new Error("'BRANCH_NAME' env var not set")
if (!GITHUB_TOKEN) throw new Error("'GITHUB_TOKEN' env var not set")
diff --git a/src/frame/tests/page.ts b/src/frame/tests/page.ts
index 9057315ef168..03f8821b0798 100644
--- a/src/frame/tests/page.ts
+++ b/src/frame/tests/page.ts
@@ -439,19 +439,6 @@ describe('catches errors thrown in Page class', () => {
await expect(getPage).rejects.toThrowError('versions')
})
- // TODO - UNSKIP WHEN GHAE IS UPDATED WITH SEMVER VERSIONING
- test.skip('invalid versions frontmatter', async () => {
- async function getPage() {
- return await Page.init({
- relativePath: 'page-with-invalid-product-version.md',
- basePath: path.join(__dirname, '../../../src/fixtures/fixtures'),
- languageCode: 'en',
- })
- }
-
- await expect(getPage).rejects.toThrowError('versions')
- })
-
test('English page with a version in frontmatter that its parent product is not available in', async () => {
async function getPage() {
return await Page.init({
diff --git a/src/frame/tests/site-tree.ts b/src/frame/tests/site-tree.ts
index a19b17a8d3c1..5637b026789f 100644
--- a/src/frame/tests/site-tree.ts
+++ b/src/frame/tests/site-tree.ts
@@ -7,6 +7,7 @@ import { loadSiteTree } from '@/frame/lib/page-data'
import nonEnterpriseDefaultVersion from '@/versions/lib/non-enterprise-default-version'
import { formatAjvErrors } from '@/tests/helpers/schemas'
import type { SiteTree, Tree } from '@/types'
+import findPageInSiteTree from '@/frame/lib/find-page-in-site-tree'
const latestEnterpriseRelease = EnterpriseServerReleases.latest
@@ -37,15 +38,14 @@ describe('siteTree', () => {
const ghesSiteTree = siteTree.en[ghesLatest]
// Find a page in the tree that we know contains Liquid
- // TODO: use new findPageInSiteTree helper when it's available
- const pageWithDynamicTitle = ghesSiteTree.childPages
- .find((child) => child.href === `/en/${ghesLatest}/admin`)
- ?.childPages.find(
- (child) => child.href === `/en/${ghesLatest}/admin/installing-your-enterprise-server`,
- )
+ const pageWithDynamicTitle = findPageInSiteTree(
+ ghesSiteTree,
+ siteTree.en[nonEnterpriseDefaultVersion],
+ `/en/${ghesLatest}/admin/installing-your-enterprise-server`,
+ )
// Confirm the raw title contains Liquid
- expect(pageWithDynamicTitle?.page.title).toEqual(
+ expect(pageWithDynamicTitle.page.title).toEqual(
'Installing {% data variables.product.prodname_enterprise %}',
)
})
diff --git a/src/landings/components/SidebarProduct.tsx b/src/landings/components/SidebarProduct.tsx
index 164dcab4f302..833e978d8e1b 100644
--- a/src/landings/components/SidebarProduct.tsx
+++ b/src/landings/components/SidebarProduct.tsx
@@ -146,8 +146,6 @@ function RestNavListItem({ category }: { category: ProductTreeNode }) {
},
{ rootMargin: '0px 0px -85% 0px' },
)
- // TODO: When we add the ## About the {title} API to each operation
- // we can remove the h2 here
const headingsList = Array.from(document.querySelectorAll('h2, h3'))
for (const heading of headingsList) {
diff --git a/src/learning-track/middleware/learning-track.ts b/src/learning-track/middleware/learning-track.ts
index 1bec3b34c55a..28e9402890a9 100644
--- a/src/learning-track/middleware/learning-track.ts
+++ b/src/learning-track/middleware/learning-track.ts
@@ -27,8 +27,6 @@ export default async function learningTrack(
const trackName = req.query.learn as string
let trackProduct = req.context.currentProduct as string
- // TODO: Once getDeepDataByLanguage is ported to TS
- // a more appropriate API would be to use `getDeepDataByLanguage {
- // TODO: Once getDeepDataByLanguage is ported to TS
- // a more appropriate API would be to use `getDeepDataByLanguage {
logger = createLogger('file:///path/to/test.js')
})
- it('should include logger context in production logs', () => {
- // TODO
- })
-
it('should handle missing logger context gracefully in development', () => {
logger.info('No context test')
diff --git a/src/rest/scripts/update-files.ts b/src/rest/scripts/update-files.ts
index c4619cf5a6d5..c9d1abb44c92 100755
--- a/src/rest/scripts/update-files.ts
+++ b/src/rest/scripts/update-files.ts
@@ -114,10 +114,7 @@ async function main() {
// so that we don't spend time generating data files for them.
if (sourceRepos.includes(REST_API_DESCRIPTION_ROOT)) {
const derefDir = await readdir(TEMP_OPENAPI_DIR)
- // TODO: After migrating all-version.ts to TypeScript, we can remove the type assertion
- const currentOpenApiVersions = Object.values(allVersions).map(
- (elem) => (elem as any).openApiVersionName,
- )
+ const currentOpenApiVersions = Object.values(allVersions).map((elem) => elem.openApiVersionName)
for (const schema of derefDir) {
// if the schema does not start with a current version name, delete it
diff --git a/src/rest/scripts/utils/operation.ts b/src/rest/scripts/utils/operation.ts
index c8d08e545f91..aea5778c6f3f 100644
--- a/src/rest/scripts/utils/operation.ts
+++ b/src/rest/scripts/utils/operation.ts
@@ -168,10 +168,6 @@ export default class Operation {
// Operation Id: markdown/render-raw
const contentType = Object.keys(this.#operation.requestBody.content)[0]
const schema = get(this.#operation, `requestBody.content.${contentType}.schema`, {})
- // TODO: Remove this check
- if (this.#operation.operationId === 'checks/create') {
- delete schema.oneOf
- }
// Merges any instances of allOf in the schema using a deep merge
const mergedAllofSchema = mergeAllOf(schema)
try {
diff --git a/src/search/components/helpers/execute-search-actions.ts b/src/search/components/helpers/execute-search-actions.ts
index 3ed226121858..29834277ada2 100644
--- a/src/search/components/helpers/execute-search-actions.ts
+++ b/src/search/components/helpers/execute-search-actions.ts
@@ -4,6 +4,7 @@ import { DEFAULT_VERSION } from '@/versions/components/useVersion'
import { NextRouter } from 'next/router'
import { sendEvent } from '@/events/components/events'
import { SEARCH_OVERLAY_EVENT_GROUP } from '@/events/components/event-groups'
+import { sanitizeSearchQuery } from '@/search/lib/sanitize-search-query'
// Search context values for identifying each search event
export const GENERAL_SEARCH_CONTEXT = 'general-search'
@@ -21,7 +22,7 @@ export function executeGeneralSearch(
) {
sendEvent({
type: EventType.search,
- search_query: localQuery,
+ search_query: sanitizeSearchQuery(localQuery),
search_context: GENERAL_SEARCH_CONTEXT,
eventGroupKey: SEARCH_OVERLAY_EVENT_GROUP,
eventGroupId,
diff --git a/src/search/components/input/SearchOverlay.tsx b/src/search/components/input/SearchOverlay.tsx
index 250fc0317e21..8ab7fd39af7e 100644
--- a/src/search/components/input/SearchOverlay.tsx
+++ b/src/search/components/input/SearchOverlay.tsx
@@ -30,6 +30,8 @@ import { useSharedUIContext } from '@/frame/components/context/SharedUIContext'
import type { AIReference } from '../types'
import type { AutocompleteSearchHit, GeneralSearchHit } from '@/search/types'
+import { sanitizeSearchQuery } from '@/search/lib/sanitize-search-query'
+
import styles from './SearchOverlay.module.scss'
type Props = {
@@ -317,15 +319,14 @@ export function SearchOverlay({
const generalSearchResultOnSelect = (selectedOption: GeneralSearchHit) => {
sendEvent({
type: EventType.search,
- // TODO: Remove PII so we can include the actual query
- search_query: urlSearchInputQuery,
+ search_query: sanitizeSearchQuery(urlSearchInputQuery),
search_context: GENERAL_SEARCH_CONTEXT,
eventGroupKey: SEARCH_OVERLAY_EVENT_GROUP,
eventGroupId: searchEventGroupId.current,
})
sendEvent({
type: EventType.searchResult,
- search_result_query: urlSearchInputQuery,
+ search_result_query: sanitizeSearchQuery(urlSearchInputQuery),
search_result_index: selectedIndex,
search_result_total: totalGeneralSearchResults,
search_result_url: selectedOption.url || '',
diff --git a/src/search/lib/sanitize-search-query.ts b/src/search/lib/sanitize-search-query.ts
new file mode 100644
index 000000000000..0ad5e2c574bb
--- /dev/null
+++ b/src/search/lib/sanitize-search-query.ts
@@ -0,0 +1,58 @@
+// Remove PII from search queries before logging
+// Redacts common PII patterns like emails, tokens, and other sensitive data
+
+export function sanitizeSearchQuery(query: string): string {
+ if (!query) return query
+
+ let sanitized = query
+
+ // Redact email addresses
+ sanitized = sanitized.replace(/\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b/g, '[EMAIL]')
+
+ // Redact GitHub tokens (all formats)
+ // Classic tokens: ghp_, gho_, ghu_, ghs_, ghr_
+ sanitized = sanitized.replace(/\b(ghp|gho|ghu|ghs|ghr)_[A-Za-z0-9]{20,}\b/gi, '[TOKEN]')
+ // Fine-grained personal access tokens: github_pat_
+ sanitized = sanitized.replace(/\bgithub_pat_[A-Za-z0-9_]{20,}\b/gi, '[TOKEN]')
+ // OAuth tokens: gho_
+ sanitized = sanitized.replace(/\bgho_[A-Za-z0-9]{20,}\b/gi, '[TOKEN]')
+
+ // Redact UUIDs
+ sanitized = sanitized.replace(
+ /\b[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\b/gi,
+ '[UUID]',
+ )
+
+ // Redact JWT tokens (format: xxx.yyy.zzz where each part is base64url)
+ sanitized = sanitized.replace(
+ /\bey[A-Za-z0-9_-]{10,}\.[A-Za-z0-9_-]{10,}\.[A-Za-z0-9_-]{10,}\b/g,
+ '[JWT]',
+ )
+
+ // Redact IP addresses (with proper validation for 0-255 range)
+ sanitized = sanitized.replace(
+ /\b(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\b/g,
+ '[IP]',
+ )
+
+ // Redact SSH private key headers
+ sanitized = sanitized.replace(/-----BEGIN( [A-Z]+)? PRIVATE KEY-----/g, '[SSH_KEY]')
+
+ // Redact potential API keys (long strings of hex or base64-like characters)
+ // This catches high-entropy strings that might be secrets
+ sanitized = sanitized.replace(/\b[A-Za-z0-9_-]{40,}\b/g, (match) => {
+ // Only redact if it looks like high entropy (mixed case, numbers)
+ const hasLowerCase = /[a-z]/.test(match)
+ const hasUpperCase = /[A-Z]/.test(match)
+ const hasNumbers = /[0-9]/.test(match)
+ const entropyIndicators = [hasLowerCase, hasUpperCase, hasNumbers].filter(Boolean).length
+
+ // If it has at least 2 of the 3 character types, it's likely a secret
+ if (entropyIndicators >= 2) {
+ return '[SECRET]'
+ }
+ return match
+ })
+
+ return sanitized
+}
diff --git a/src/search/tests/sanitize-search-query.ts b/src/search/tests/sanitize-search-query.ts
new file mode 100644
index 000000000000..7e910ec562a2
--- /dev/null
+++ b/src/search/tests/sanitize-search-query.ts
@@ -0,0 +1,218 @@
+import { describe, expect, test } from 'vitest'
+import { sanitizeSearchQuery } from '@/search/lib/sanitize-search-query'
+
+describe('sanitizeSearchQuery', () => {
+ test('returns empty string for empty input', () => {
+ expect(sanitizeSearchQuery('')).toBe('')
+ })
+
+ test('returns query unchanged if no PII detected', () => {
+ expect(sanitizeSearchQuery('how to create a repository')).toBe('how to create a repository')
+ expect(sanitizeSearchQuery('git commit message')).toBe('git commit message')
+ })
+
+ describe('email redaction', () => {
+ test('redacts single email address', () => {
+ expect(sanitizeSearchQuery('contact user@example.com for help')).toBe(
+ 'contact [EMAIL] for help',
+ )
+ })
+
+ test('redacts multiple email addresses', () => {
+ expect(sanitizeSearchQuery('email john@example.com or jane@test.org')).toBe(
+ 'email [EMAIL] or [EMAIL]',
+ )
+ })
+
+ test('redacts emails with special characters', () => {
+ expect(sanitizeSearchQuery('user.name+tag@example.co.uk')).toBe('[EMAIL]')
+ })
+ })
+
+ describe('GitHub token redaction', () => {
+ test('redacts classic personal access tokens (ghp_)', () => {
+ expect(sanitizeSearchQuery('token ghp_1234567890123456789012345678901234567890')).toBe(
+ 'token [TOKEN]',
+ )
+ })
+
+ test('redacts OAuth tokens (gho_)', () => {
+ expect(sanitizeSearchQuery('oauth gho_1234567890123456789012345678901234567890')).toBe(
+ 'oauth [TOKEN]',
+ )
+ })
+
+ test('redacts user tokens (ghu_)', () => {
+ expect(sanitizeSearchQuery('user ghu_1234567890123456789012345678901234567890')).toBe(
+ 'user [TOKEN]',
+ )
+ })
+
+ test('redacts server tokens (ghs_)', () => {
+ expect(sanitizeSearchQuery('server ghs_1234567890123456789012345678901234567890')).toBe(
+ 'server [TOKEN]',
+ )
+ })
+
+ test('redacts refresh tokens (ghr_)', () => {
+ expect(sanitizeSearchQuery('refresh ghr_1234567890123456789012345678901234567890')).toBe(
+ 'refresh [TOKEN]',
+ )
+ })
+
+ test('redacts fine-grained PATs (github_pat_)', () => {
+ expect(
+ sanitizeSearchQuery('fine-grained github_pat_1234567890123456789012345678901234567890'),
+ ).toBe('fine-grained [TOKEN]')
+ })
+
+ test('redacts tokens with minimum length (20 chars)', () => {
+ expect(sanitizeSearchQuery('short ghp_12345678901234567890')).toBe('short [TOKEN]')
+ })
+
+ test('does not redact partial token prefixes', () => {
+ expect(sanitizeSearchQuery('ghp is not a token')).toBe('ghp is not a token')
+ })
+ })
+
+ describe('UUID redaction', () => {
+ test('redacts standard UUIDs', () => {
+ expect(sanitizeSearchQuery('id 550e8400-e29b-41d4-a716-446655440000 found')).toBe(
+ 'id [UUID] found',
+ )
+ })
+
+ test('redacts UUIDs regardless of case', () => {
+ expect(sanitizeSearchQuery('UUID 550E8400-E29B-41D4-A716-446655440000')).toBe('UUID [UUID]')
+ })
+
+ test('redacts multiple UUIDs', () => {
+ expect(
+ sanitizeSearchQuery(
+ '550e8400-e29b-41d4-a716-446655440000 and 6ba7b810-9dad-11d1-80b4-00c04fd430c8',
+ ),
+ ).toBe('[UUID] and [UUID]')
+ })
+ })
+
+ describe('JWT redaction', () => {
+ test('redacts JWT tokens', () => {
+ const jwt =
+ 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c'
+ expect(sanitizeSearchQuery(`token ${jwt}`)).toBe('token [JWT]')
+ })
+
+ test('redacts JWT-like tokens with underscores and hyphens', () => {
+ expect(sanitizeSearchQuery('eyABC123_-XYZ.eyDEF456_-UVW.eyGHI789_-RST')).toBe('[JWT]')
+ })
+ })
+
+ describe('IP address redaction', () => {
+ test('redacts valid IPv4 addresses', () => {
+ expect(sanitizeSearchQuery('server 192.168.1.1 down')).toBe('server [IP] down')
+ expect(sanitizeSearchQuery('10.0.0.1')).toBe('[IP]')
+ expect(sanitizeSearchQuery('172.16.254.1')).toBe('[IP]')
+ })
+
+ test('redacts edge case IPs (0.0.0.0 and 255.255.255.255)', () => {
+ expect(sanitizeSearchQuery('0.0.0.0')).toBe('[IP]')
+ expect(sanitizeSearchQuery('255.255.255.255')).toBe('[IP]')
+ })
+
+ test('does not redact invalid IPs with octets > 255', () => {
+ expect(sanitizeSearchQuery('999.999.999.999')).toBe('999.999.999.999')
+ expect(sanitizeSearchQuery('256.1.1.1')).toBe('256.1.1.1')
+ })
+
+ test('redacts multiple IP addresses', () => {
+ expect(sanitizeSearchQuery('connect 10.0.0.1 or 192.168.1.1')).toBe('connect [IP] or [IP]')
+ })
+ })
+
+ describe('SSH private key redaction', () => {
+ test('redacts RSA private key headers', () => {
+ expect(sanitizeSearchQuery('-----BEGIN RSA PRIVATE KEY----- content')).toBe(
+ '[SSH_KEY] content',
+ )
+ })
+
+ test('redacts generic private key headers', () => {
+ expect(sanitizeSearchQuery('-----BEGIN PRIVATE KEY----- content')).toBe('[SSH_KEY] content')
+ })
+
+ test('redacts EC private key headers', () => {
+ expect(sanitizeSearchQuery('-----BEGIN EC PRIVATE KEY----- content')).toBe(
+ '[SSH_KEY] content',
+ )
+ })
+ })
+
+ describe('high-entropy string redaction', () => {
+ test('redacts long high-entropy strings with mixed case and numbers', () => {
+ // 40+ chars with lowercase, uppercase, and numbers
+ const secret = 'aB3dEf9Gh2IjKlMn0PqRsTuVwXyZ1234567890aBcDeF'
+ expect(sanitizeSearchQuery(secret)).toBe('[SECRET]')
+ })
+
+ test('redacts strings with lowercase and numbers', () => {
+ const secret = 'abc123def456ghi789jkl012mno345pqr678stu901vwx234'
+ expect(sanitizeSearchQuery(secret)).toBe('[SECRET]')
+ })
+
+ test('redacts strings with uppercase and numbers', () => {
+ const secret = 'ABC123DEF456GHI789JKL012MNO345PQR678STU901VWX234'
+ expect(sanitizeSearchQuery(secret)).toBe('[SECRET]')
+ })
+
+ test('does not redact long strings with only lowercase', () => {
+ const notSecret = 'thisisalongstringwithnouppercharsornumbers'
+ expect(sanitizeSearchQuery(notSecret)).toBe(notSecret)
+ })
+
+ test('does not redact long strings with only numbers', () => {
+ const notSecret = '12345678901234567890123456789012345678901234567890'
+ expect(sanitizeSearchQuery(notSecret)).toBe(notSecret)
+ })
+
+ test('does not redact strings shorter than 40 chars', () => {
+ const shortString = 'aB3dEf9Gh2IjKlMn0PqRsTuVwXyZ'
+ expect(sanitizeSearchQuery(shortString)).toBe(shortString)
+ })
+ })
+
+ describe('multiple PII types in one query', () => {
+ test('redacts all PII types in a single query', () => {
+ const query =
+ 'email user@example.com token ghp_1234567890123456789012345678901234567890 from 192.168.1.1'
+ expect(sanitizeSearchQuery(query)).toBe('email [EMAIL] token [TOKEN] from [IP]')
+ })
+
+ test('handles complex mixed query', () => {
+ const query = `
+ Contact admin@github.com
+ Token: github_pat_12345678901234567890ABCDEFGH
+ UUID: 550e8400-e29b-41d4-a716-446655440000
+ Server: 10.0.0.1
+ `.trim()
+ const result = sanitizeSearchQuery(query)
+ expect(result).toContain('[EMAIL]')
+ expect(result).toContain('[TOKEN]')
+ expect(result).toContain('[UUID]')
+ expect(result).toContain('[IP]')
+ })
+ })
+
+ describe('preserves safe content', () => {
+ test('preserves URLs without emails', () => {
+ expect(sanitizeSearchQuery('https://github.com/docs')).toBe('https://github.com/docs')
+ })
+
+ test('preserves code snippets', () => {
+ expect(sanitizeSearchQuery('git commit -m "fix bug"')).toBe('git commit -m "fix bug"')
+ })
+
+ test('preserves version numbers', () => {
+ expect(sanitizeSearchQuery('node v18.0.0')).toBe('node v18.0.0')
+ })
+ })
+})
From 2627c4197019162d4eb39e0cdd8324ead43ef450 Mon Sep 17 00:00:00 2001
From: Kevin Heis
Date: Mon, 1 Dec 2025 07:27:10 -0800
Subject: [PATCH 10/17] Remove use of 'any' type in 12 files (#58540)
---
src/app/lib/server-context-utils.ts | 3 +-
src/assets/middleware/dynamic-assets.ts | 6 +-
.../journey-tracks-guide-path-exists.ts | 22 +++---
.../journey-tracks-unique-ids.ts | 19 +++--
src/content-render/liquid/tool.ts | 13 +--
.../scripts/all-documents/lib.ts | 6 +-
src/events/tests/middleware.ts | 2 +-
src/frame/tests/pages.ts | 15 ++--
.../tests/translation-error-comments.ts | 79 +++++++++++--------
.../tests/get-automatic-request-logger.ts | 43 +++++-----
src/rest/tests/create-rest-examples.ts | 7 +-
src/search/lib/ai-search-proxy.ts | 5 +-
src/search/scripts/scrape/lib/domwaiter.ts | 23 +++---
src/types/json-schema-merge-allof.d.ts | 4 +-
14 files changed, 146 insertions(+), 101 deletions(-)
diff --git a/src/app/lib/server-context-utils.ts b/src/app/lib/server-context-utils.ts
index 7bb74149c22f..3d4dcec2df13 100644
--- a/src/app/lib/server-context-utils.ts
+++ b/src/app/lib/server-context-utils.ts
@@ -3,12 +3,13 @@ import { extractVersionFromPath } from '@/app/lib/version-utils'
import { getUIDataMerged } from '@/data-directory/lib/get-data'
import { type LanguageCode } from '@/languages/lib/languages'
import { createTranslationFunctions, translate } from '@/languages/lib/translation-utils'
+import type { UIStrings } from '@/frame/components/context/MainContext'
export interface ServerAppRouterContext {
currentLanguage: LanguageCode
currentVersion: string
sitename: string
- site: { data: { ui: any } }
+ site: { data: { ui: UIStrings } }
}
/**
diff --git a/src/assets/middleware/dynamic-assets.ts b/src/assets/middleware/dynamic-assets.ts
index 25ed3fb67c1f..9053151a3181 100644
--- a/src/assets/middleware/dynamic-assets.ts
+++ b/src/assets/middleware/dynamic-assets.ts
@@ -144,7 +144,11 @@ export default async function dynamicAssets(
assetCacheControl(res)
return res.type('image/webp').send(buffer)
} catch (catchError) {
- if (catchError instanceof Error && (catchError as any).code !== 'ENOENT') {
+ if (
+ catchError instanceof Error &&
+ 'code' in catchError &&
+ (catchError as NodeJS.ErrnoException).code !== 'ENOENT'
+ ) {
throw catchError
}
}
diff --git a/src/content-linter/lib/linting-rules/journey-tracks-guide-path-exists.ts b/src/content-linter/lib/linting-rules/journey-tracks-guide-path-exists.ts
index e4a9d6139069..17bfbe04c3af 100644
--- a/src/content-linter/lib/linting-rules/journey-tracks-guide-path-exists.ts
+++ b/src/content-linter/lib/linting-rules/journey-tracks-guide-path-exists.ts
@@ -52,10 +52,12 @@ export const journeyTracksGuidePathExists = {
description: 'Journey track guide paths must reference existing content files',
tags: ['frontmatter', 'journey-tracks'],
function: (params: RuleParams, onError: RuleErrorCallback) => {
- // Using any for frontmatter as it's a dynamic YAML object with varying properties
- const fm: any = getFrontmatter(params.lines)
- if (!fm || !fm.journeyTracks || !Array.isArray(fm.journeyTracks)) return
- if (!fm.layout || fm.layout !== 'journey-landing') return
+ // Using unknown for frontmatter as it's a dynamic YAML object with varying properties
+ const fm: unknown = getFrontmatter(params.lines)
+ if (!fm || typeof fm !== 'object' || !('journeyTracks' in fm)) return
+ const fmObj = fm as Record
+ if (!Array.isArray(fmObj.journeyTracks)) return
+ if (!('layout' in fmObj) || fmObj.layout !== 'journey-landing') return
const journeyTracksLine = params.lines.find((line: string) => line.startsWith('journeyTracks:'))
@@ -63,11 +65,13 @@ export const journeyTracksGuidePathExists = {
const journeyTracksLineNumber = params.lines.indexOf(journeyTracksLine) + 1
- for (let trackIndex = 0; trackIndex < fm.journeyTracks.length; trackIndex++) {
- const track: any = fm.journeyTracks[trackIndex]
- if (track.guides && Array.isArray(track.guides)) {
- for (let guideIndex = 0; guideIndex < track.guides.length; guideIndex++) {
- const guide: string = track.guides[guideIndex]
+ for (let trackIndex = 0; trackIndex < fmObj.journeyTracks.length; trackIndex++) {
+ const track: unknown = fmObj.journeyTracks[trackIndex]
+ if (!track || typeof track !== 'object' || !('guides' in track)) continue
+ const trackObj = track as Record
+ if (trackObj.guides && Array.isArray(trackObj.guides)) {
+ for (let guideIndex = 0; guideIndex < trackObj.guides.length; guideIndex++) {
+ const guide: string = trackObj.guides[guideIndex]
if (typeof guide === 'string') {
if (!isValidGuidePath(guide, params.name)) {
addError(
diff --git a/src/content-linter/lib/linting-rules/journey-tracks-unique-ids.ts b/src/content-linter/lib/linting-rules/journey-tracks-unique-ids.ts
index fc3e415a19d9..ff61b1e310d5 100644
--- a/src/content-linter/lib/linting-rules/journey-tracks-unique-ids.ts
+++ b/src/content-linter/lib/linting-rules/journey-tracks-unique-ids.ts
@@ -10,9 +10,11 @@ export const journeyTracksUniqueIds = {
tags: ['frontmatter', 'journey-tracks', 'unique-ids'],
function: function GHD060(params: RuleParams, onError: RuleErrorCallback) {
// Using any for frontmatter as it's a dynamic YAML object with varying properties
- const fm: any = getFrontmatter(params.lines)
- if (!fm || !fm.journeyTracks || !Array.isArray(fm.journeyTracks)) return
- if (!fm.layout || fm.layout !== 'journey-landing') return
+ const fm: unknown = getFrontmatter(params.lines)
+ if (!fm || typeof fm !== 'object' || !('journeyTracks' in fm)) return
+ const fmObj = fm as Record
+ if (!Array.isArray(fmObj.journeyTracks)) return
+ if (!('layout' in fmObj) || fmObj.layout !== 'journey-landing') return
// Find the base journeyTracks line
const journeyTracksLine: string | undefined = params.lines.find((line: string) =>
@@ -37,7 +39,7 @@ export const journeyTracksUniqueIds = {
trackCount++
// Stop once we've found all the tracks we know exist
- if (fm && fm.journeyTracks && trackCount >= fm.journeyTracks.length) {
+ if (Array.isArray(fmObj.journeyTracks) && trackCount >= fmObj.journeyTracks.length) {
break
}
}
@@ -48,11 +50,12 @@ export const journeyTracksUniqueIds = {
// Track seen journey track IDs and line number for error reporting
const seenIds = new Map()
- for (let index = 0; index < fm.journeyTracks.length; index++) {
- const track: any = fm.journeyTracks[index]
- if (!track || typeof track !== 'object') continue
+ for (let index = 0; index < fmObj.journeyTracks.length; index++) {
+ const track: unknown = fmObj.journeyTracks[index]
+ if (!track || typeof track !== 'object' || !('id' in track)) continue
- const trackId = track.id
+ const trackObj = track as Record
+ const trackId = trackObj.id
if (!trackId || typeof trackId !== 'string') continue
const currentLineNumber = getTrackLineNumber(index)
diff --git a/src/content-render/liquid/tool.ts b/src/content-render/liquid/tool.ts
index c62dedf1ea1b..922893032e37 100644
--- a/src/content-render/liquid/tool.ts
+++ b/src/content-render/liquid/tool.ts
@@ -47,26 +47,27 @@ export const Tool = {
type: 'block' as const,
tagName: '',
// Liquid template objects don't have TypeScript definitions
- templates: [] as any[],
+ templates: [] as unknown[],
// tagToken and remainTokens are Liquid internal types without TypeScript definitions
- parse(tagToken: any, remainTokens: any) {
- this.tagName = tagToken.name
+ parse(tagToken: unknown, remainTokens: unknown) {
+ const token = tagToken as { name: string; getText: () => string }
+ this.tagName = token.name
this.templates = []
const stream = this.liquid.parser.parseStream(remainTokens)
stream
.on(`tag:end${this.tagName}`, () => stream.stop())
// tpl is a Liquid template object without TypeScript definitions
- .on('template', (tpl: any) => this.templates.push(tpl))
+ .on('template', (tpl: unknown) => this.templates.push(tpl))
.on('end', () => {
- throw new Error(`tag ${tagToken.getText()} not closed`)
+ throw new Error(`tag ${token.getText()} not closed`)
})
stream.start()
},
// scope is a Liquid scope object, Generator yields/returns Liquid template values - no TypeScript definitions available
- *render(scope: any): Generator {
+ *render(scope: unknown): Generator {
const output = yield this.liquid.renderer.renderTemplates(this.templates, scope)
return yield this.liquid.parseAndRender(template, {
tagName: this.tagName,
diff --git a/src/content-render/scripts/all-documents/lib.ts b/src/content-render/scripts/all-documents/lib.ts
index fc7f30427613..47f6a7aacbfd 100644
--- a/src/content-render/scripts/all-documents/lib.ts
+++ b/src/content-render/scripts/all-documents/lib.ts
@@ -1,6 +1,6 @@
import type { Response } from 'express'
-import type { ExtendedRequest, Page } from '@/types'
+import type { ExtendedRequest, Page, Context } from '@/types'
import contextualize from '@/frame/middleware/context/context'
import features from '@/versions/middleware/features'
import shortVersions from '@/versions/middleware/short-versions'
@@ -55,7 +55,7 @@ export async function allDocuments(options: Options): Promise {
const next = () => {}
const res = {}
const pagePath = permalink.href
- const context: any = {}
+ const context: Partial = {}
const req = {
path: pagePath,
language: permalink.languageCode,
@@ -68,7 +68,7 @@ export async function allDocuments(options: Options): Promise {
await contextualize(req as ExtendedRequest, res as Response, next)
await shortVersions(req as ExtendedRequest, res as Response, next)
req.context.page = page
- features(req as any, res as any, next)
+ features(req as ExtendedRequest, res as Response, next)
const title = fields.includes('title')
? await page.renderProp('title', req.context, { textOnly: true })
diff --git a/src/events/tests/middleware.ts b/src/events/tests/middleware.ts
index a450878436d0..f1a500958ba4 100644
--- a/src/events/tests/middleware.ts
+++ b/src/events/tests/middleware.ts
@@ -6,7 +6,7 @@ import { contentTypesEnum } from '@/frame/lib/frontmatter'
describe('POST /events', () => {
vi.setConfig({ testTimeout: 60 * 1000 })
- async function checkEvent(data: any) {
+ async function checkEvent(data: unknown) {
if (!Array.isArray(data)) {
data = [data]
}
diff --git a/src/frame/tests/pages.ts b/src/frame/tests/pages.ts
index 5ce2674a93da..2ae7279cee64 100644
--- a/src/frame/tests/pages.ts
+++ b/src/frame/tests/pages.ts
@@ -60,14 +60,15 @@ describe('pages module', () => {
// Page objects have dynamic properties from chain/lodash that aren't fully typed
for (const page of englishPages) {
- for (const redirect of (page as any).redirect_from) {
- for (const version of (page as any).applicableVersions) {
+ const pageObj = page as Record
+ for (const redirect of pageObj.redirect_from as string[]) {
+ for (const version of pageObj.applicableVersions as string[]) {
const versioned = removeFPTFromPath(path.posix.join('/', version, redirect))
- versionedRedirects.push({ path: versioned, file: (page as any).fullPath })
+ versionedRedirects.push({ path: versioned, file: pageObj.fullPath as string })
if (!redirectToFiles.has(versioned)) {
redirectToFiles.set(versioned, new Set())
}
- redirectToFiles.get(versioned)!.add((page as any).fullPath)
+ redirectToFiles.get(versioned)!.add(pageObj.fullPath as string)
}
}
}
@@ -97,7 +98,7 @@ describe('pages module', () => {
page.languageCode === 'en' && // only check English
!page.relativePath.includes('index.md') && // ignore TOCs
// Page class has dynamic frontmatter properties like 'allowTitleToDifferFromFilename' not in type definition
- !(page as any).allowTitleToDifferFromFilename && // ignore docs with override
+ !(page as Record).allowTitleToDifferFromFilename && // ignore docs with override
slugger.slug(decode(page.title)) !== path.basename(page.relativePath, '.md') &&
slugger.slug(decode(page.shortTitle || '')) !== path.basename(page.relativePath, '.md')
)
@@ -129,7 +130,7 @@ describe('pages module', () => {
const frontmatterErrors = chain(pages)
// .filter(page => page.languageCode === 'en')
// Page class has dynamic error properties like 'frontmatterErrors' not in type definition
- .map((page) => (page as any).frontmatterErrors)
+ .map((page) => (page as Record).frontmatterErrors)
.filter(Boolean)
.flatten()
.value()
@@ -149,7 +150,7 @@ describe('pages module', () => {
for (const page of pages) {
// Page class has dynamic properties like 'raw' markdown not in type definition
- const markdown = (page as any).raw
+ const markdown = (page as Record).raw as string
if (!patterns.hasLiquid.test(markdown)) continue
try {
await liquid.parse(markdown)
diff --git a/src/languages/tests/translation-error-comments.ts b/src/languages/tests/translation-error-comments.ts
index 19bf22a914bb..7052e8023f42 100644
--- a/src/languages/tests/translation-error-comments.ts
+++ b/src/languages/tests/translation-error-comments.ts
@@ -8,9 +8,20 @@ import {
import { TitleFromAutotitleError } from '@/content-render/unified/rewrite-local-links'
import Page from '@/frame/lib/page'
+// Type aliases for error objects with token information
+type ErrorWithToken = Error & { token: { file: string; getPosition: () => number[] } }
+type ErrorWithTokenNoFile = Error & { token: { getPosition: () => number[] } }
+type ErrorWithTokenNoPosition = Error & { token: { file: string } }
+type ErrorWithTokenAndOriginal = Error & {
+ token: { file: string; getPosition: () => number[] }
+ originalError: Error
+}
+
describe('Translation Error Comments', () => {
// Mock renderContent for integration tests
- let mockRenderContent: MockedFunction<(template: string, context: any) => string>
+ let mockRenderContent: MockedFunction<
+ (template: string, context: Record) => string
+ >
beforeEach(() => {
mockRenderContent = vi.fn()
@@ -26,7 +37,7 @@ describe('Translation Error Comments', () => {
test('includes all fields when token information is available', () => {
const error = new Error("Unknown tag 'badtag', line:1, col:3")
error.name = 'ParseError'
- ;(error as any).token = {
+ ;(error as unknown as ErrorWithToken).token = {
file: '/content/test/article.md',
getPosition: () => [1, 3],
}
@@ -48,11 +59,13 @@ describe('Translation Error Comments', () => {
test('includes original error message when available', () => {
const error = new Error("Unknown variable 'variables.nonexistent.value'")
error.name = 'RenderError'
- ;(error as any).token = {
+ ;(error as unknown as ErrorWithToken).token = {
file: '/content/test/intro.md',
getPosition: () => [3, 15],
}
- ;(error as any).originalError = new Error('Variable not found: variables.nonexistent.value')
+ ;(error as unknown as ErrorWithTokenAndOriginal).originalError = new Error(
+ 'Variable not found: variables.nonexistent.value',
+ )
const result = createTranslationFallbackComment(error, 'rawIntro')
@@ -67,7 +80,7 @@ describe('Translation Error Comments', () => {
test('falls back to main error message when no originalError', () => {
const error = new Error('Main error message')
error.name = 'RenderError'
- ;(error as any).token = {
+ ;(error as unknown as ErrorWithToken).token = {
file: '/content/test.md',
getPosition: () => [1, 1],
}
@@ -82,7 +95,7 @@ describe('Translation Error Comments', () => {
test('includes tokenization error details', () => {
const error = new Error('Unexpected token, line:1, col:10')
error.name = 'TokenizationError'
- ;(error as any).token = {
+ ;(error as unknown as ErrorWithToken).token = {
file: '/content/test/page.md',
getPosition: () => [1, 10],
}
@@ -152,7 +165,7 @@ describe('Translation Error Comments', () => {
test('handles error with token but no file', () => {
const error = new Error('Error message')
error.name = 'ParseError'
- ;(error as any).token = {
+ ;(error as unknown as ErrorWithTokenNoFile).token = {
// No file property
getPosition: () => [5, 10],
}
@@ -167,7 +180,7 @@ describe('Translation Error Comments', () => {
test('handles error with token but no getPosition method', () => {
const error = new Error('Error message')
error.name = 'ParseError'
- ;(error as any).token = {
+ ;(error as unknown as ErrorWithTokenNoPosition).token = {
file: '/content/test.md',
// No getPosition method
}
@@ -246,7 +259,7 @@ describe('Translation Error Comments', () => {
test('comment format is valid HTML', () => {
const error = new Error('Test error')
error.name = 'ParseError'
- ;(error as any).token = {
+ ;(error as unknown as ErrorWithToken).token = {
file: '/content/test.md',
getPosition: () => [1, 1],
}
@@ -264,7 +277,7 @@ describe('Translation Error Comments', () => {
test('contains all required fields when available', () => {
const error = new Error('Detailed error message')
error.name = 'RenderError'
- ;(error as any).token = {
+ ;(error as unknown as ErrorWithToken).token = {
file: '/content/detailed-test.md',
getPosition: () => [42, 15],
}
@@ -283,7 +296,7 @@ describe('Translation Error Comments', () => {
test('maintains consistent field order', () => {
const error = new Error('Test message')
error.name = 'ParseError'
- ;(error as any).token = {
+ ;(error as unknown as ErrorWithToken).token = {
file: '/content/test.md',
getPosition: () => [1, 1],
}
@@ -320,18 +333,20 @@ describe('Translation Error Comments', () => {
}
// Mock renderContent to simulate error for Japanese, success for English
- mockRenderContent.mockImplementation((template: string, innerContext: any) => {
- if (innerContext.currentLanguage !== 'en' && template.includes('badtag')) {
- const error = new Error("Unknown tag 'badtag'")
- error.name = 'ParseError'
- ;(error as any).token = {
- file: '/content/test.md',
- getPosition: () => [1, 5],
+ mockRenderContent.mockImplementation(
+ (template: string, innerContext: Record) => {
+ if (innerContext.currentLanguage !== 'en' && template.includes('badtag')) {
+ const error = new Error("Unknown tag 'badtag'")
+ error.name = 'ParseError'
+ ;(error as unknown as ErrorWithToken).token = {
+ file: '/content/test.md',
+ getPosition: () => [1, 5],
+ }
+ throw error
}
- throw error
- }
- return innerContext.currentLanguage === 'en' ? 'English Title' : template
- })
+ return innerContext.currentLanguage === 'en' ? 'English Title' : template
+ },
+ )
const result = await renderContentWithFallback(mockPage, 'rawTitle', context)
@@ -357,14 +372,16 @@ describe('Translation Error Comments', () => {
},
}
- mockRenderContent.mockImplementation((template: string, innerContext: any) => {
- if (innerContext.currentLanguage !== 'en' && template.includes('badtag')) {
- const error = new Error("Unknown tag 'badtag'")
- error.name = 'ParseError'
- throw error
- }
- return 'English Title'
- })
+ mockRenderContent.mockImplementation(
+ (template: string, innerContext: Record) => {
+ if (innerContext.currentLanguage !== 'en' && template.includes('badtag')) {
+ const error = new Error("Unknown tag 'badtag'")
+ error.name = 'ParseError'
+ throw error
+ }
+ return 'English Title'
+ },
+ )
const result = await renderContentWithFallback(mockPage, 'rawTitle', context, {
textOnly: true,
@@ -384,7 +401,7 @@ describe('Translation Error Comments', () => {
const failingCallable = async () => {
const error = new Error("Unknown variable 'variables.bad'")
error.name = 'RenderError'
- ;(error as any).token = {
+ ;(error as unknown as ErrorWithToken).token = {
file: '/content/article.md',
getPosition: () => [10, 20],
}
diff --git a/src/observability/tests/get-automatic-request-logger.ts b/src/observability/tests/get-automatic-request-logger.ts
index 9b05c169ef14..705920ecc780 100644
--- a/src/observability/tests/get-automatic-request-logger.ts
+++ b/src/observability/tests/get-automatic-request-logger.ts
@@ -2,6 +2,9 @@ import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'
import { getAutomaticRequestLogger } from '@/observability/logger/middleware/get-automatic-request-logger'
import type { Request, Response, NextFunction } from 'express'
+// Type alias for mock response with overridden end function
+type MockResponseWithEnd = Partial & { end: () => unknown }
+
describe('getAutomaticRequestLogger', () => {
let originalEnv: typeof process.env
let originalConsoleLog: typeof console.log
@@ -43,7 +46,7 @@ describe('getAutomaticRequestLogger', () => {
}
// Override res.end to simulate response completion
- function endOverride(this: any, chunk?: any, encoding?: any) {
+ function endOverride(this: Response, chunk?: unknown, encoding?: unknown): Response {
if (!responseEnded) {
responseEnded = true
// Simulate a small delay for response time
@@ -54,7 +57,7 @@ describe('getAutomaticRequestLogger', () => {
return this
}
- ;(mockRes as any).end = endOverride
+ ;(mockRes as { end: typeof endOverride }).end = endOverride
mockNext = vi.fn()
@@ -86,7 +89,7 @@ describe('getAutomaticRequestLogger', () => {
middleware(mockReq as Request, mockRes as Response, mockNext)
// Simulate response completion
- ;(mockRes as any).end()
+ ;(mockRes as MockResponseWithEnd).end()
// Wait for async logging
await new Promise((resolve) => setTimeout(resolve, 20))
@@ -143,7 +146,7 @@ describe('getAutomaticRequestLogger', () => {
}
// Override res.end to simulate response completion
- function endOverride(this: any, chunk?: any, encoding?: any) {
+ function endOverride(this: Response, chunk?: unknown, encoding?: unknown): Response {
if (!responseEnded) {
responseEnded = true
// Simulate a small delay for response time
@@ -154,7 +157,7 @@ describe('getAutomaticRequestLogger', () => {
return this
}
- ;(freshMockRes as any).end = endOverride
+ ;(freshMockRes as { end: typeof endOverride }).end = endOverride
const freshMockNext = vi.fn()
@@ -165,7 +168,7 @@ describe('getAutomaticRequestLogger', () => {
freshMockRes as Partial as Response,
freshMockNext,
)
- ;(freshMockRes as any).end()
+ ;(freshMockRes as MockResponseWithEnd).end()
// Wait for async logging with longer timeout for CI
await new Promise((resolve) => setTimeout(resolve, 50))
@@ -187,7 +190,7 @@ describe('getAutomaticRequestLogger', () => {
const middleware = getAutomaticRequestLogger()
middleware(mockReq as Request, mockRes as Response, mockNext)
- ;(mockRes as any).end()
+ ;(mockRes as MockResponseWithEnd).end()
await new Promise((resolve) => setTimeout(resolve, 20))
@@ -202,7 +205,7 @@ describe('getAutomaticRequestLogger', () => {
const middleware = getAutomaticRequestLogger()
middleware(mockReq as Request, mockRes as Response, mockNext)
- ;(mockRes as any).end()
+ ;(mockRes as MockResponseWithEnd).end()
await new Promise((resolve) => setTimeout(resolve, 20))
@@ -215,7 +218,7 @@ describe('getAutomaticRequestLogger', () => {
const middleware = getAutomaticRequestLogger()
middleware(mockReq as Request, mockRes as Response, mockNext)
- ;(mockRes as any).end()
+ ;(mockRes as MockResponseWithEnd).end()
await new Promise((resolve) => setTimeout(resolve, 20))
@@ -233,7 +236,7 @@ describe('getAutomaticRequestLogger', () => {
const middleware = getAutomaticRequestLogger()
middleware(mockReq as Request, mockRes as Response, mockNext)
- ;(mockRes as any).end()
+ ;(mockRes as MockResponseWithEnd).end()
await new Promise((resolve) => setTimeout(resolve, 20))
@@ -252,7 +255,7 @@ describe('getAutomaticRequestLogger', () => {
const middleware = getAutomaticRequestLogger()
middleware(mockReq as Request, mockRes as Response, mockNext)
- ;(mockRes as any).end()
+ ;(mockRes as MockResponseWithEnd).end()
await new Promise((resolve) => setTimeout(resolve, 20))
@@ -291,7 +294,7 @@ describe('getAutomaticRequestLogger', () => {
const middleware = getAutomaticRequestLogger()
middleware(mockReq as Request, mockRes as Response, mockNext)
- ;(mockRes as any).end()
+ ;(mockRes as MockResponseWithEnd).end()
// Wait for any potential async logging with longer timeout for CI
await new Promise((resolve) => setTimeout(resolve, 50))
@@ -309,7 +312,7 @@ describe('getAutomaticRequestLogger', () => {
const middleware = getAutomaticRequestLogger()
middleware(mockReq as Request, mockRes as Response, mockNext)
- ;(mockRes as any).end()
+ ;(mockRes as MockResponseWithEnd).end()
await new Promise((resolve) => setTimeout(resolve, 20))
@@ -320,11 +323,13 @@ describe('getAutomaticRequestLogger', () => {
describe('edge cases', () => {
it('should handle missing content-length header', async () => {
- ;(mockRes as any).getHeader = vi.fn(() => undefined)
+ ;(mockRes as Partial & { getHeader: () => undefined }).getHeader = vi.fn(
+ () => undefined,
+ )
const middleware = getAutomaticRequestLogger()
middleware(mockReq as Request, mockRes as Response, mockNext)
- ;(mockRes as any).end()
+ ;(mockRes as MockResponseWithEnd).end()
await new Promise((resolve) => setTimeout(resolve, 20))
@@ -333,11 +338,11 @@ describe('getAutomaticRequestLogger', () => {
})
it('should handle missing status code', async () => {
- delete (mockRes as any).statusCode
+ delete (mockRes as Partial & { statusCode?: number }).statusCode
const middleware = getAutomaticRequestLogger()
middleware(mockReq as Request, mockRes as Response, mockNext)
- ;(mockRes as any).end()
+ ;(mockRes as MockResponseWithEnd).end()
await new Promise((resolve) => setTimeout(resolve, 20))
@@ -351,7 +356,7 @@ describe('getAutomaticRequestLogger', () => {
const middleware = getAutomaticRequestLogger()
middleware(mockReq as Request, mockRes as Response, mockNext)
- ;(mockRes as any).end()
+ ;(mockRes as MockResponseWithEnd).end()
await new Promise((resolve) => setTimeout(resolve, 20))
@@ -368,7 +373,7 @@ describe('getAutomaticRequestLogger', () => {
// Simulate some processing time
await new Promise((resolve) => setTimeout(resolve, 50))
- ;(mockRes as any).end()
+ ;(mockRes as MockResponseWithEnd).end()
await new Promise((resolve) => setTimeout(resolve, 20))
const endTime = Date.now()
diff --git a/src/rest/tests/create-rest-examples.ts b/src/rest/tests/create-rest-examples.ts
index f371f8dbcfa4..ed1b531065ed 100644
--- a/src/rest/tests/create-rest-examples.ts
+++ b/src/rest/tests/create-rest-examples.ts
@@ -52,9 +52,12 @@ describe('rest example requests and responses', () => {
test('check example number and status code appear', async () => {
const mergedExamples = await getCodeSamples(operation)
- // example is any because getCodeSamples returns objects from untyped JavaScript module
+ // example has specific structure from getCodeSamples
for (let index = 0; index < mergedExamples.length; index++) {
- const example: any = mergedExamples[index]
+ const example = mergedExamples[index] as {
+ request: { description: string }
+ response: { statusCode: string }
+ }
expect(example.request.description).toBe(
`Example ${index + 1}: Status Code ${example.response.statusCode}`,
)
diff --git a/src/search/lib/ai-search-proxy.ts b/src/search/lib/ai-search-proxy.ts
index dd84458da35f..a6bd834df737 100644
--- a/src/search/lib/ai-search-proxy.ts
+++ b/src/search/lib/ai-search-proxy.ts
@@ -21,8 +21,9 @@ export const aiSearchProxy = async (req: ExtendedRequest, res: Response) => {
let docsSource = ''
try {
docsSource = getCSECopilotSource(version)
- } catch (error: any) {
- errors.push({ message: error?.message || 'Invalid version' })
+ } catch (error: unknown) {
+ const message = error instanceof Error ? error.message : 'Invalid version'
+ errors.push({ message })
}
if (errors.length) {
diff --git a/src/search/scripts/scrape/lib/domwaiter.ts b/src/search/scripts/scrape/lib/domwaiter.ts
index 34302c823b24..70e1251f6fe0 100644
--- a/src/search/scripts/scrape/lib/domwaiter.ts
+++ b/src/search/scripts/scrape/lib/domwaiter.ts
@@ -22,6 +22,10 @@ class HTTPError extends Error {
}
}
+// Type aliases for error objects with additional URL information
+type HTTPErrorWithUrl = HTTPError & { url?: string; relativePath?: string }
+type ErrorWithUrl = Error & { url?: string; relativePath?: string }
+
interface DomWaiterOptions {
parseDOM?: boolean
json?: boolean
@@ -94,8 +98,8 @@ async function getPage(page: Permalink, emitter: EventEmitter, opts: DomWaiterOp
{ requestUrl: { pathname: page.url } },
)
// Add URL and path info directly to the HTTPError
- ;(httpError as any).url = page.url
- ;(httpError as any).relativePath = page.relativePath
+ ;(httpError as HTTPErrorWithUrl).url = page.url
+ ;(httpError as HTTPErrorWithUrl).relativePath = page.relativePath
// Emit error instead of throwing
emitter.emit('error', httpError)
return // Exit early, don't continue processing
@@ -109,8 +113,8 @@ async function getPage(page: Permalink, emitter: EventEmitter, opts: DomWaiterOp
const enhancedError = new Error(err.message, { cause: err.cause })
enhancedError.name = err.name
enhancedError.stack = err.stack
- ;(enhancedError as any).url = page.url
- ;(enhancedError as any).relativePath = page.relativePath
+ ;(enhancedError as ErrorWithUrl).url = page.url
+ ;(enhancedError as ErrorWithUrl).relativePath = page.relativePath
emitter.emit('error', enhancedError)
} else {
emitter.emit('error', err)
@@ -130,15 +134,16 @@ async function getPage(page: Permalink, emitter: EventEmitter, opts: DomWaiterOp
{ requestUrl: { pathname: page.url } },
)
// Add URL and path info directly to the HTTPError
- ;(httpError as any).url = page.url
- ;(httpError as any).relativePath = page.relativePath
+ ;(httpError as HTTPErrorWithUrl).url = page.url
+ ;(httpError as HTTPErrorWithUrl).relativePath = page.relativePath
// Emit error instead of throwing
emitter.emit('error', httpError)
return // Exit early, don't continue processing
}
const body = await response.text()
const pageCopy = Object.assign({}, page, { body })
- if (opts.parseDOM) (pageCopy as any).$ = cheerio.load(body)
+ if (opts.parseDOM)
+ (pageCopy as Permalink & { $?: ReturnType }).$ = cheerio.load(body)
emitter.emit('page', pageCopy)
} catch (err) {
// Enhance error with URL information
@@ -146,8 +151,8 @@ async function getPage(page: Permalink, emitter: EventEmitter, opts: DomWaiterOp
const enhancedError = new Error(err.message, { cause: err.cause })
enhancedError.name = err.name
enhancedError.stack = err.stack
- ;(enhancedError as any).url = page.url
- ;(enhancedError as any).relativePath = page.relativePath
+ ;(enhancedError as ErrorWithUrl).url = page.url
+ ;(enhancedError as ErrorWithUrl).relativePath = page.relativePath
emitter.emit('error', enhancedError)
} else {
emitter.emit('error', err)
diff --git a/src/types/json-schema-merge-allof.d.ts b/src/types/json-schema-merge-allof.d.ts
index 83f021e0bf12..d4e2d3512ee4 100644
--- a/src/types/json-schema-merge-allof.d.ts
+++ b/src/types/json-schema-merge-allof.d.ts
@@ -9,7 +9,7 @@ declare module 'json-schema-merge-allof' {
type?: string | string[]
items?: JSONSchema | JSONSchema[]
additionalProperties?: boolean | JSONSchema
- [key: string]: any // JSON Schema allows arbitrary additional properties per spec
+ [key: string]: unknown // JSON Schema allows arbitrary additional properties per spec
}
/**
@@ -23,7 +23,7 @@ declare module 'json-schema-merge-allof' {
*/
resolvers?: Record<
string,
- (values: any[], path: string[], mergeSchemas: any, options: any) => any
+ (values: unknown[], path: string[], mergeSchemas: unknown, options: unknown) => unknown
>
/**
From 25f9bd3a1b8e38de1d22db8988357eaa6e6f5656 Mon Sep 17 00:00:00 2001
From: Dan Daugherty <95140349+cidersage@users.noreply.github.com>
Date: Mon, 1 Dec 2025 08:27:43 -0700
Subject: [PATCH 11/17] Add VSCode debug logging instructions for issue 19638
(#58383)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
---
.../how-tos/troubleshoot-copilot/view-logs.md | 16 ++++++++++++++++
1 file changed, 16 insertions(+)
diff --git a/content/copilot/how-tos/troubleshoot-copilot/view-logs.md b/content/copilot/how-tos/troubleshoot-copilot/view-logs.md
index 7f682f4ddfcb..f09afe0540aa 100644
--- a/content/copilot/how-tos/troubleshoot-copilot/view-logs.md
+++ b/content/copilot/how-tos/troubleshoot-copilot/view-logs.md
@@ -118,6 +118,22 @@ Alternatively, you can open the log folder for {% data variables.product.prodnam
* Use: Ctrl+Shift+P
1. Type "Logs", and then select **Developer: Open Extension Logs Folder** from the list.
+## Enabling debug mode
+
+If you find the log file doesn't contain enough information to resolve an issue, it may help to enable debug logging temporarily. This can be especially helpful for debugging network-related issues.
+
+1. Open the {% data variables.product.prodname_vscode_command_palette_shortname %}
+ * For Mac:
+ * Use: Shift+Command+P
+ * For Windows or Linux:
+ * Use: Ctrl+Shift+P
+1. Type "Developer", then select **Developer: Set Log Level**.
+1. Type "{% data variables.product.github %}", then select the {% data variables.product.github %} extension you're troubleshooting:
+ * **{% data variables.copilot.copilot_chat %}** for the {% data variables.copilot.copilot_chat_short %} extension.
+ * **{% data variables.product.github %}** for the {% data variables.copilot.copilot_extension %}.
+1. Select **Trace** from the dropdown list.
+1. When you have the information you need, disable debug mode by repeating steps 1 through 4 and returning the logging level to **Info**.
+
## Viewing network connectivity diagnostics logs
If you encounter problems connecting to {% data variables.product.prodname_copilot %} due to network restrictions, firewalls, or your proxy setup, use the following troubleshooting steps.
From 31cdc562a7c9baccc1f76ddcb94c27a16f0d9659 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 1 Dec 2025 15:35:37 +0000
Subject: [PATCH 12/17] Bump the npm_and_yarn group across 1 directory with 2
updates (#58533)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Sam Browning <106113886+sabrowning1@users.noreply.github.com>
---
package-lock.json | 81 +++++++++++++++++++++++++----------------------
package.json | 4 +--
2 files changed, 46 insertions(+), 39 deletions(-)
diff --git a/package-lock.json b/package-lock.json
index 147d6035613c..2923c76cba01 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -43,7 +43,7 @@
"file-type": "21.0.0",
"flat": "^6.0.1",
"github-slugger": "^2.0.0",
- "glob": "11.0.2",
+ "glob": "11.1.0",
"hast-util-from-parse5": "^8.0.3",
"hast-util-to-string": "^3.0.1",
"hastscript": "^9.0.1",
@@ -57,7 +57,7 @@
"is-svg": "6.0.0",
"javascript-stringify": "^2.1.0",
"js-cookie": "^3.0.5",
- "js-yaml": "^4.1.0",
+ "js-yaml": "^4.1.1",
"liquidjs": "^10.16.7",
"lodash": "^4.17.21",
"lodash-es": "^4.17.21",
@@ -2383,6 +2383,7 @@
"version": "8.0.2",
"resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
"integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==",
+ "license": "ISC",
"dependencies": {
"string-width": "^5.1.2",
"string-width-cjs": "npm:string-width@^4.2.0",
@@ -2396,9 +2397,10 @@
}
},
"node_modules/@isaacs/cliui/node_modules/ansi-styles": {
- "version": "6.2.1",
- "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz",
- "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==",
+ "version": "6.2.3",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz",
+ "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==",
+ "license": "MIT",
"engines": {
"node": ">=12"
},
@@ -2410,6 +2412,7 @@
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz",
"integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==",
+ "license": "MIT",
"dependencies": {
"eastasianwidth": "^0.2.0",
"emoji-regex": "^9.2.2",
@@ -2426,6 +2429,7 @@
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz",
"integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==",
+ "license": "MIT",
"dependencies": {
"ansi-styles": "^6.1.0",
"string-width": "^5.0.1",
@@ -3326,15 +3330,6 @@
"node": ">=0.10"
}
},
- "node_modules/@pkgjs/parseargs": {
- "version": "0.11.0",
- "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz",
- "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==",
- "optional": true,
- "engines": {
- "node": ">=14"
- }
- },
"node_modules/@pkgr/core": {
"version": "0.2.9",
"resolved": "https://registry.npmjs.org/@pkgr/core/-/core-0.2.9.tgz",
@@ -5646,6 +5641,7 @@
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz",
"integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
+ "dev": true,
"dependencies": {
"balanced-match": "^1.0.0"
}
@@ -6843,6 +6839,8 @@
},
"node_modules/eastasianwidth": {
"version": "0.2.0",
+ "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz",
+ "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==",
"license": "MIT"
},
"node_modules/ecdsa-sig-formatter": {
@@ -8512,11 +8510,12 @@
}
},
"node_modules/foreground-child": {
- "version": "3.1.1",
- "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.1.1.tgz",
- "integrity": "sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==",
+ "version": "3.3.1",
+ "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz",
+ "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==",
+ "license": "ISC",
"dependencies": {
- "cross-spawn": "^7.0.0",
+ "cross-spawn": "^7.0.6",
"signal-exit": "^4.0.1"
},
"engines": {
@@ -8530,6 +8529,7 @@
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz",
"integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==",
+ "license": "ISC",
"engines": {
"node": ">=14"
},
@@ -8755,14 +8755,14 @@
"license": "ISC"
},
"node_modules/glob": {
- "version": "11.0.2",
- "resolved": "https://registry.npmjs.org/glob/-/glob-11.0.2.tgz",
- "integrity": "sha512-YT7U7Vye+t5fZ/QMkBFrTJ7ZQxInIUjwyAjVj84CYXqgBdv30MFUPGnBR6sQaVq6Is15wYJUsnzTuWaGRBhBAQ==",
- "license": "ISC",
+ "version": "11.1.0",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-11.1.0.tgz",
+ "integrity": "sha512-vuNwKSaKiqm7g0THUBu2x7ckSs3XJLXE+2ssL7/MfTGPLLcrJQ/4Uq1CjPTtO5cCIiRxqvN6Twy1qOwhL0Xjcw==",
+ "license": "BlueOak-1.0.0",
"dependencies": {
- "foreground-child": "^3.1.0",
- "jackspeak": "^4.0.1",
- "minimatch": "^10.0.0",
+ "foreground-child": "^3.3.1",
+ "jackspeak": "^4.1.1",
+ "minimatch": "^10.1.1",
"minipass": "^7.1.2",
"package-json-from-dist": "^1.0.0",
"path-scurry": "^2.0.0"
@@ -8789,11 +8789,12 @@
}
},
"node_modules/glob/node_modules/minimatch": {
- "version": "10.0.1",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.1.tgz",
- "integrity": "sha512-ethXTt3SGGR+95gudmqJ1eNhRO7eGEGIgYA9vnPatK4/etz2MEVDno5GMCibdMTuBMyElzIlgxMna3K94XDIDQ==",
+ "version": "10.1.1",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.1.1.tgz",
+ "integrity": "sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ==",
+ "license": "BlueOak-1.0.0",
"dependencies": {
- "brace-expansion": "^2.0.1"
+ "@isaacs/brace-expansion": "^5.0.0"
},
"engines": {
"node": "20 || >=22"
@@ -10210,9 +10211,10 @@
"license": "ISC"
},
"node_modules/jackspeak": {
- "version": "4.0.1",
- "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.0.1.tgz",
- "integrity": "sha512-cub8rahkh0Q/bw1+GxP7aeSe29hHHn2V4m29nnDlvCdlgU+3UGxkZp7Z53jLUdpX3jdTO0nJZUDl3xvbWc2Xog==",
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.1.1.tgz",
+ "integrity": "sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==",
+ "license": "BlueOak-1.0.0",
"dependencies": {
"@isaacs/cliui": "^8.0.2"
},
@@ -10221,9 +10223,6 @@
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
- },
- "optionalDependencies": {
- "@pkgjs/parseargs": "^0.11.0"
}
},
"node_modules/javascript-stringify": {
@@ -10267,7 +10266,9 @@
"license": "MIT"
},
"node_modules/js-yaml": {
- "version": "4.1.0",
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz",
+ "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==",
"license": "MIT",
"dependencies": {
"argparse": "^2.0.1"
@@ -14915,6 +14916,7 @@
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
+ "license": "MIT",
"dependencies": {
"emoji-regex": "^8.0.0",
"is-fullwidth-code-point": "^3.0.0",
@@ -14927,12 +14929,14 @@
"node_modules/string-width-cjs/node_modules/emoji-regex": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
- "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="
+ "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
+ "license": "MIT"
},
"node_modules/string-width-cjs/node_modules/strip-ansi": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "license": "MIT",
"dependencies": {
"ansi-regex": "^5.0.1"
},
@@ -15059,6 +15063,7 @@
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "license": "MIT",
"dependencies": {
"ansi-regex": "^5.0.1"
},
@@ -16818,6 +16823,7 @@
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
"integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
+ "license": "MIT",
"dependencies": {
"ansi-styles": "^4.0.0",
"string-width": "^4.1.0",
@@ -16834,6 +16840,7 @@
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "license": "MIT",
"dependencies": {
"ansi-regex": "^5.0.1"
},
diff --git a/package.json b/package.json
index b28b2e83324e..0845c9a8aabd 100644
--- a/package.json
+++ b/package.json
@@ -188,7 +188,7 @@
"file-type": "21.0.0",
"flat": "^6.0.1",
"github-slugger": "^2.0.0",
- "glob": "11.0.2",
+ "glob": "11.1.0",
"hast-util-from-parse5": "^8.0.3",
"hast-util-to-string": "^3.0.1",
"hastscript": "^9.0.1",
@@ -202,7 +202,7 @@
"is-svg": "6.0.0",
"javascript-stringify": "^2.1.0",
"js-cookie": "^3.0.5",
- "js-yaml": "^4.1.0",
+ "js-yaml": "^4.1.1",
"liquidjs": "^10.16.7",
"lodash": "^4.17.21",
"lodash-es": "^4.17.21",
From c9a50c6650ba46ba2d152872c497c00ef1f0a6e7 Mon Sep 17 00:00:00 2001
From: docs-bot <77750099+docs-bot@users.noreply.github.com>
Date: Mon, 1 Dec 2025 07:36:15 -0800
Subject: [PATCH 13/17] Sync secret scanning data (#58628)
Co-authored-by: Sam Browning <106113886+sabrowning1@users.noreply.github.com>
---
src/secret-scanning/data/public-docs.yml | 13 +++++++++++++
src/secret-scanning/lib/config.json | 4 ++--
2 files changed, 15 insertions(+), 2 deletions(-)
diff --git a/src/secret-scanning/data/public-docs.yml b/src/secret-scanning/data/public-docs.yml
index 3fb8c497a456..52d3682afc77 100644
--- a/src/secret-scanning/data/public-docs.yml
+++ b/src/secret-scanning/data/public-docs.yml
@@ -4493,6 +4493,19 @@
hasValidityCheck: false
base64Supported: false
isduplicate: false
+- provider: Paddle
+ supportedSecret: Paddle Sandbox API Key
+ secretType: paddle_sandbox_api_key
+ versions:
+ fpt: '*'
+ ghec: '*'
+ ghes: '>=3.20'
+ isPublic: true
+ isPrivateWithGhas: true
+ hasPushProtection: false
+ hasValidityCheck: false
+ base64Supported: false
+ isduplicate: false
- provider: PagerDuty
supportedSecret: PagerDuty OAuth Secret
secretType: pagerduty_oauth_secret
diff --git a/src/secret-scanning/lib/config.json b/src/secret-scanning/lib/config.json
index 60b1933ea2af..611f6c1a28b5 100644
--- a/src/secret-scanning/lib/config.json
+++ b/src/secret-scanning/lib/config.json
@@ -1,5 +1,5 @@
{
- "sha": "01009b022a23f59bee88e60046c6b425178c3cab",
- "blob-sha": "5fb38d54763b5d5170e70b153a2d0ddeb5bed7c9",
+ "sha": "b68ab4c2355b44a07d40d669fd28da652fe1929e",
+ "blob-sha": "403271a4c5adc2dc195b04553c514833765b388d",
"targetFilename": "code-security/secret-scanning/introduction/supported-secret-scanning-patterns"
}
\ No newline at end of file
From 64813b673aa496f0e459d93520d80115e4f1ff0d Mon Sep 17 00:00:00 2001
From: Sarah Schneider
Date: Mon, 1 Dec 2025 10:58:38 -0500
Subject: [PATCH 14/17] Refactor ai-tools CLI (#58580)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
---
src/ai-tools/lib/call-models-api.ts | 92 ++++++-
src/ai-tools/prompts/intro.md | 103 +++----
src/ai-tools/prompts/prompt-template.yml | 4 +-
src/ai-tools/scripts/ai-tools.ts | 326 +++++++++++++++++++----
4 files changed, 407 insertions(+), 118 deletions(-)
diff --git a/src/ai-tools/lib/call-models-api.ts b/src/ai-tools/lib/call-models-api.ts
index dda311baa04f..e08638e3c31b 100644
--- a/src/ai-tools/lib/call-models-api.ts
+++ b/src/ai-tools/lib/call-models-api.ts
@@ -34,10 +34,30 @@ interface ChatCompletionResponse {
}
}
-export async function callModelsApi(promptWithContent: ChatCompletionRequest): Promise {
+export async function callModelsApi(
+ promptWithContent: ChatCompletionRequest,
+ verbose = false,
+): Promise {
let aiResponse: ChatCompletionChoice
+ // Set default model if none specified
+ if (!promptWithContent.model) {
+ promptWithContent.model = 'openai/gpt-4o'
+ if (verbose) {
+ console.log('ā ļø No model specified, using default: openai/gpt-4o')
+ }
+ }
+
try {
+ // Create an AbortController for timeout handling
+ const controller = new AbortController()
+ const timeoutId = setTimeout(() => controller.abort(), 180000) // 3 minutes
+
+ const startTime = Date.now()
+ if (verbose) {
+ console.log(`š Making API request to GitHub Models using ${promptWithContent.model}...`)
+ }
+
const response = await fetch(modelsCompletionsEndpoint, {
method: 'post',
body: JSON.stringify(promptWithContent),
@@ -45,16 +65,80 @@ export async function callModelsApi(promptWithContent: ChatCompletionRequest): P
'Content-Type': 'application/json',
Authorization: `Bearer ${process.env.GITHUB_TOKEN}`,
'X-GitHub-Api-Version': '2022-11-28',
- Accept: 'Accept: application/vnd.github+json',
+ Accept: 'application/vnd.github+json',
},
+ signal: controller.signal,
})
+ const fetchTime = Date.now() - startTime
+ if (verbose) {
+ console.log(`ā±ļø API response received in ${fetchTime}ms`)
+ }
+
+ clearTimeout(timeoutId)
+
+ if (!response.ok) {
+ let errorMessage = `HTTP error! status: ${response.status} - ${response.statusText}`
+
+ // Try to get more detailed error information
+ try {
+ const errorBody = await response.json()
+ if (errorBody.error && errorBody.error.message) {
+ errorMessage += ` - ${errorBody.error.message}`
+ }
+ } catch {
+ // If we can't parse error body, continue with basic error
+ }
+
+ // Add helpful hints for common errors
+ if (response.status === 401) {
+ errorMessage += ' (Check your GITHUB_TOKEN)'
+ } else if (response.status === 400) {
+ errorMessage += ' (This may be due to an invalid model or malformed request)'
+ } else if (response.status === 429) {
+ errorMessage += ' (Rate limit exceeded - try again later)'
+ }
+
+ throw new Error(errorMessage)
+ }
+
const data: ChatCompletionResponse = await response.json()
+
+ if (!data.choices || data.choices.length === 0) {
+ throw new Error('No response choices returned from API')
+ }
+
aiResponse = data.choices[0]
+
+ if (verbose) {
+ const totalTime = Date.now() - startTime
+ console.log(`ā
Total API call completed in ${totalTime}ms`)
+
+ if (data.usage) {
+ console.log(
+ `š Tokens: ${data.usage.prompt_tokens} prompt + ${data.usage.completion_tokens} completion = ${data.usage.total_tokens} total`,
+ )
+ }
+ }
} catch (error) {
- console.error('Error calling GitHub Models REST API')
+ if (error instanceof Error) {
+ if (error.name === 'AbortError') {
+ throw new Error('API call timed out after 3 minutes')
+ }
+ console.error('Error calling GitHub Models REST API:', error.message)
+ }
throw error
}
- return aiResponse.message.content
+ return cleanAIResponse(aiResponse.message.content)
+}
+
+// Helper function to clean up AI response content
+function cleanAIResponse(content: string): string {
+ // Remove markdown code blocks
+ return content
+ .replace(/^```[\w]*\n/gm, '') // Remove opening code blocks
+ .replace(/\n```$/gm, '') // Remove closing code blocks at end
+ .replace(/\n```\n/gm, '\n') // Remove standalone closing code blocks
+ .trim()
}
diff --git a/src/ai-tools/prompts/intro.md b/src/ai-tools/prompts/intro.md
index ddf4eeccbbb2..5a0b6be7e972 100644
--- a/src/ai-tools/prompts/intro.md
+++ b/src/ai-tools/prompts/intro.md
@@ -2,37 +2,20 @@ You are an expert SEO content optimizer specializing in GitHub documentation.
Your task is to analyze a GitHub Docs content file and generate or optimize
the intro frontmatter property following Google's meta description best practices.
-## Your mission
-
-Generate a single, concise intro (one simple sentence maximum - NO colons, NO detailed explanations) that:
-
-* Starts with an action verb (e.g., "Learn," "Discover," "Access," "Explore," "Configure," "Set up," "Build")
-* **Uses developer-friendly, direct language** - avoid marketing jargon and corporate buzzwords
-* **Prioritizes conciseness over completeness** - cut unnecessary words ruthlessly
-* Accurately summarizes the content's core value proposition
-* Includes relevant keywords naturally without stuffing
-* Follows Google's snippet guidelines (descriptive, informative, compelling)
-* Is version-agnostic (no {% ifversion %} blocks, but {% data variables.* %} and {% data reusables.* %} are acceptable)
-* Matches the content type (article/category/mapTopic) requirements
-* **Goes beyond title restatement** - summarizes the complete article value, not just rephrasing the title
-* **Lists concrete steps or outcomes** - what users will actually do or accomplish
-* **Limits lists to 2-3 items maximum** - avoid long comma-separated sequences that feel overwhelming
-
-## SEO scoring criteria (1-10 scale)
-
-**10-9 (Excellent)**: Strong action verb, comprehensive content summary, optimal keyword density, clear unique value beyond title, perfect length
-**8-7 (Good)**: Action verb present, good content representation, decent keywords, some unique value, appropriate length
-**6-5 (Fair)**: Weak action verb or missing, partial content coverage, basic keywords, minimal value beyond title
-**4-3 (Poor)**: No action verb, limited content representation, few relevant keywords, mostly restates title
-**2-1 (Very Poor)**: Vague or misleading, no clear value proposition, poor keyword usage, completely redundant with title
-
-## Analysis process
-
-1. **Content resolution**: Keep {% data variables.* %} and {% data reusables.* %} but avoid {% ifversion %} blocks
-2. **Content analysis**: Identify the article's purpose, target audience, key concepts, and user outcomes
-3. **Category detection**: For index pages, analyze child content themes and collective value
-
-4. **SEO optimization**: Use strong action verbs, developer-friendly language, concrete outcomes, and relevant keywords while avoiding corporate buzzwords
+## Core Requirements
+
+**Primary constraints (must-haves):**
+* Start with action verb ("Learn," "Access," "Explore," "Configure," "Set up," "Build")
+* One sentence maximum - NO colons, NO detailed explanations
+* Avoid buzzwords: "leverage," "optimize," "maximize," "enhance," "streamline," "empower," "revolutionize," "seamlessly," "comprehensive," "enterprise-grade," "cutting-edge," "innovative," "game-changing," "next-generation," "world-class," "best-in-class," "state-of-the-art," "industry-leading," "robust," "scalable," "mission-critical," "synergistic," "holistic," "strategic," "transformative"
+* Different approach than title - don't start with same words/phrases
+* Lists 2-3 concrete outcomes maximum
+
+**Secondary optimizations (nice-to-haves):**
+* Include relevant keywords naturally
+* Version-agnostic ({% data variables.* %} OK, avoid {% ifversion %})
+* Follow Google snippet guidelines
+* Cut unnecessary words ruthlessly
**Content Summarization vs. Title Restatement**:
@@ -47,7 +30,7 @@ Generate a single, concise intro (one simple sentence maximum - NO colons, NO de
- Better: "Use {% data variables.product.prodname_copilot %} chat and code completion to research syntax, practice coding, and master new programming languages faster"
ā
**Use concise, developer-friendly language ({% data variables.* %} OK)**:
-- Better intro: "Evaluate use cases, configure security settings, and run pilot trials to successfully deploy {% data variables.copilot.copilot_coding_agent %} in your org"
+- Better intro: "Evaluate use cases, configure security settings, and run pilot trials to deploy {% data variables.copilot.copilot_coding_agent %} in your org"
ā **Avoid overly long lists and colon constructions**:
- Too long: "Scope issues, pick suitable tasks, iterate via PR comments, add repo instructions, enable MCP tools, and preinstall dependencies"
@@ -55,24 +38,13 @@ Generate a single, concise intro (one simple sentence maximum - NO colons, NO de
- Better: "Scope tasks, configure custom instructions, and iterate on pull requests to improve {% data variables.copilot.copilot_coding_agent %} performance"
- Better: "Use {% data variables.product.prodname_copilot %} features like chat and code completion to research syntax, build programs, and learn new programming languages faster"
-**Tone Guidelines**:
-- **Developer-friendly**: Use direct, practical language
-- **Concise over complete**: Cut words ruthlessly
-- **Action-oriented**: List what users will actually do
-- **Avoid buzzwords**: Skip marketing language and corporate jargon
-- **Use concrete verbs**: Instead of "maximize/optimize/enhance" ā use "improve," "boost," "increase," or just describe the outcome directly
-- **Limit lists**: Maximum 2-3 items in comma-separated lists - prefer flowing sentences over exhaustive enumerations
-- **Avoid colon constructions**: Don't use "Do X: detailed explanation of A, B, and C" format - keep it simple and direct
-- **Avoid title similarity**: Don't start with the same words/phrases as the article title - approach the topic from a different angle
-
-The intro should answer: "What specific steps will I take?" rather than "What will this comprehensive solution provide?"
-
-## Analysis Process
+## Quality Checklist
-1. **First Draft**: Generate an initial improved intro following all guidelines above
-2. **Title Check**: Compare your draft to the article title - if it starts with similar words, rewrite with a different approach
-3. **Self-Review**: Evaluate your draft against the SEO scoring criteria and tone guidelines
-4. **Refinement**: If the draft contains buzzwords, weak verbs, title similarity, or scores below 8/10, create a refined version
+ā
**Structure**: Action verb + 2-3 concrete outcomes + under 350 characters
+ā
**Language**: Direct, practical developer language (no marketing jargon)
+ā
**Focus**: What users will DO, not what solution "provides"
+ā
**Uniqueness**: Different angle from article title
+ā
**Simplicity**: No colons, no complex lists, flowing sentences
## Output format
@@ -84,27 +56,12 @@ Title: "[Article title from frontmatter]"
Original intro: "[Current intro from the article, or "No intro" if none exists]"
-
-Original SEO score: [X]/10
-------------------------
-
-Improved intro: "[Single, concise intro that summarizes the article's full content value, not just restating the title]"
-
-
-Improved SEO score: [X]/10
+SEO-friendly alternative: "[Single, concise intro that summarizes the article's full content value, not just restating the title]"
------------------------
```
-Note: The improved score should reflect your best attempt after internal refinement.
-
## Character limits by content type
-**Priority: Conciseness over character limits**
-- Focus on being as concise as possible while maintaining clarity
-- Cut every unnecessary word before considering length
-- Developer-friendly brevity trumps hitting character targets
-
-**Technical limits** (for reference):
- **Articles**: Maximum 354 characters
- **Categories**: Maximum 362 characters
- **Map Topics**: Maximum 362 characters
@@ -124,4 +81,18 @@ Note: The improved score should reflect your best attempt after internal refinem
- {% data variables.product.prodname_copilot %} = "GitHub Copilot"
- {% data variables.copilot.copilot_coding_agent %} = "Copilot Coding Agent"
-Focus on creating intros that would make sense to someone discovering this content through Google search, clearly communicating the value and relevance of the article.
\ No newline at end of file
+Focus on creating intros that would make sense to someone discovering this content through Google search, clearly communicating the value and relevance of the article.
+
+
+
+## WRITE MODE INSTRUCTIONS
+
+**CRITICAL**: You are in write mode. Output ONLY the YAML frontmatter property to update.
+
+- Return just: `intro: "your improved intro text"`
+- Do NOT include analysis, scoring, explanations, or formatting
+- Do NOT wrap in markdown code blocks or ```yaml
+- Do NOT include the analysis format shown above
+- Just return the clean YAML property line
+
+
\ No newline at end of file
diff --git a/src/ai-tools/prompts/prompt-template.yml b/src/ai-tools/prompts/prompt-template.yml
index dab8d13adfa9..293a3decfe8e 100644
--- a/src/ai-tools/prompts/prompt-template.yml
+++ b/src/ai-tools/prompts/prompt-template.yml
@@ -6,4 +6,6 @@ messages:
content: >-
Review this content file according to the provided system prompt.
{{input}}
-model: openai/gpt-5
+model: openai/gpt-4o # Reliable model that works
+temperature: 0.3 # Lower temperature for consistent results
+max_completion_tokens: 4000 # Maximum response length
diff --git a/src/ai-tools/scripts/ai-tools.ts b/src/ai-tools/scripts/ai-tools.ts
index 1be467e02b0b..fde5245421cd 100644
--- a/src/ai-tools/scripts/ai-tools.ts
+++ b/src/ai-tools/scripts/ai-tools.ts
@@ -7,6 +7,8 @@ import ora from 'ora'
import { execSync } from 'child_process'
import { callModelsApi } from '@/ai-tools/lib/call-models-api'
import dotenv from 'dotenv'
+import readFrontmatter from '@/frame/lib/read-frontmatter'
+import { schema } from '@/frame/lib/frontmatter'
dotenv.config({ quiet: true })
const __dirname = path.dirname(fileURLToPath(import.meta.url))
@@ -28,35 +30,92 @@ if (!process.env.GITHUB_TOKEN) {
}
}
-interface EditorType {
- description: string
-}
+// Dynamically discover available editor types from prompt files
+const getAvailableEditorTypes = (): string[] => {
+ const editorTypes: string[] = []
+
+ try {
+ const promptFiles = fs.readdirSync(promptDir)
+ for (const file of promptFiles) {
+ if (file.endsWith('.md')) {
+ const editorName = path.basename(file, '.md')
+ editorTypes.push(editorName)
+ }
+ }
+ } catch {
+ console.warn('Could not read prompts directory, using empty editor types')
+ }
-interface EditorTypes {
- versioning: EditorType
- intro: EditorType
+ return editorTypes
}
-const editorTypes: EditorTypes = {
- versioning: {
- description: 'Refine versioning according to simplification guidance.',
- },
- intro: {
- description: 'Refine intro frontmatter based on SEO and content guidelines.',
- },
+const editorTypes = getAvailableEditorTypes()
+
+// Enhanced recursive markdown file finder with symlink, depth, and root path checks
+const findMarkdownFiles = (
+ dir: string,
+ rootDir: string,
+ depth: number = 0,
+ maxDepth: number = 20,
+ visited: Set = new Set(),
+): string[] => {
+ const markdownFiles: string[] = []
+ let realDir: string
+ try {
+ realDir = fs.realpathSync(dir)
+ } catch {
+ // If we can't resolve real path, skip this directory
+ return []
+ }
+ // Prevent escaping root directory
+ if (!realDir.startsWith(rootDir)) {
+ return []
+ }
+ // Prevent symlink loops
+ if (visited.has(realDir)) {
+ return []
+ }
+ visited.add(realDir)
+ // Prevent excessive depth
+ if (depth > maxDepth) {
+ return []
+ }
+ let entries: fs.Dirent[]
+ try {
+ entries = fs.readdirSync(realDir, { withFileTypes: true })
+ } catch {
+ // If we can't read directory, skip
+ return []
+ }
+ for (const entry of entries) {
+ const fullPath = path.join(realDir, entry.name)
+ let realFullPath: string
+ try {
+ realFullPath = fs.realpathSync(fullPath)
+ } catch {
+ continue
+ }
+ // Prevent escaping root directory for files
+ if (!realFullPath.startsWith(rootDir)) {
+ continue
+ }
+ if (entry.isDirectory()) {
+ markdownFiles.push(...findMarkdownFiles(realFullPath, rootDir, depth + 1, maxDepth, visited))
+ } else if (entry.isFile() && entry.name.endsWith('.md')) {
+ markdownFiles.push(realFullPath)
+ }
+ }
+ return markdownFiles
}
const refinementDescriptions = (): string => {
- let str = '\n\n'
- for (const [ed, edObj] of Object.entries(editorTypes)) {
- str += ` ${ed.padEnd(12)} ${edObj.description}\n`
- }
- return str
+ return editorTypes.join(', ')
}
interface CliOptions {
verbose?: boolean
- refine: Array
+ prompt?: string[]
+ refine?: string[]
files: string[]
write?: boolean
}
@@ -71,9 +130,10 @@ program
'-w, --write',
'Write changes back to the original files (default: output to console only)',
)
- .requiredOption(
+ .option('-p, --prompt ', `Specify one or more prompt type: ${refinementDescriptions()}`)
+ .option(
'-r, --refine ',
- `Specify one or more refinement type: ${refinementDescriptions().trimEnd()}\n`,
+ `(Deprecated: use --prompt) Specify one or more prompt type: ${refinementDescriptions()}`,
)
.requiredOption(
'-f, --files ',
@@ -84,7 +144,30 @@ program
const spinner = ora('Starting AI review...').start()
const files = options.files
- const editors = options.refine
+ // Handle both --prompt and --refine options for backwards compatibility
+ const prompts = options.prompt || options.refine
+
+ if (!prompts || prompts.length === 0) {
+ spinner.fail('No prompt type specified. Use --prompt or --refine with one or more types.')
+ process.exitCode = 1
+ return
+ }
+
+ // Validate that all requested editor types exist
+ const availableEditors = editorTypes
+ for (const editor of prompts) {
+ if (!availableEditors.includes(editor)) {
+ spinner.fail(
+ `Unknown prompt type: ${editor}. Available types: ${availableEditors.join(', ')}`,
+ )
+ process.exitCode = 1
+ return
+ }
+ }
+
+ if (options.verbose) {
+ console.log(`Processing ${files.length} files with prompts: ${prompts.join(', ')}`)
+ }
for (const file of files) {
const filePath = path.resolve(process.cwd(), file)
@@ -96,37 +179,101 @@ program
continue
}
- try {
- spinner.text = `Reading file: ${file}`
- const content = fs.readFileSync(filePath, 'utf8')
+ // Check if it's a directory
+ const isDirectory = fs.statSync(filePath).isDirectory()
+
+ for (const editorType of prompts) {
+ try {
+ // For other editor types, process individual files
+ const filesToProcess: string[] = []
- for (const editorType of editors) {
- spinner.text = `Running the AI-powered ${editorType} refinement...`
- const answer = await callEditor(editorType, content, options.write || false)
- spinner.stop()
+ if (isDirectory) {
+ // Find all markdown files in the directory recursively
+ // Use process.cwd() as the root directory for safety
+ const rootDir = fs.realpathSync(process.cwd())
+ filesToProcess.push(...findMarkdownFiles(filePath, rootDir))
- if (options.write) {
- // Write the result back to the original file
- fs.writeFileSync(filePath, answer, 'utf8')
- console.log(`ā
Updated: ${file}`)
+ if (filesToProcess.length === 0) {
+ spinner.warn(`No markdown files found in directory: ${file}`)
+ continue
+ }
+
+ spinner.text = `Found ${filesToProcess.length} markdown files in ${file}`
} else {
- // Just output to console (current behavior)
- console.log(answer)
+ filesToProcess.push(filePath)
}
+
+ spinner.start()
+ for (const fileToProcess of filesToProcess) {
+ const relativePath = path.relative(process.cwd(), fileToProcess)
+ spinner.text = `Processing: ${relativePath}`
+ try {
+ const content = fs.readFileSync(fileToProcess, 'utf8')
+ const answer = await callEditor(
+ editorType,
+ content,
+ options.write || false,
+ options.verbose || false,
+ )
+ spinner.stop()
+
+ if (options.write) {
+ if (editorType === 'intro') {
+ // For frontmatter addition/modification, merge properties instead of overwriting entire file
+ const updatedContent = mergeFrontmatterProperties(fileToProcess, answer)
+ fs.writeFileSync(fileToProcess, updatedContent, 'utf8')
+ console.log(`ā
Added frontmatter properties to: ${relativePath}`)
+ } else {
+ // For other editor types, write the full result back to the original file
+ fs.writeFileSync(fileToProcess, answer, 'utf8')
+ console.log(`ā
Updated: ${relativePath}`)
+ }
+ } else {
+ // Just output to console (current behavior)
+ if (filesToProcess.length > 1) {
+ console.log(`\n=== ${relativePath} ===`)
+ }
+ console.log(answer)
+ }
+ } catch (err) {
+ const error = err as Error
+ spinner.fail(`Error processing ${relativePath}: ${error.message}`)
+ process.exitCode = 1
+ } finally {
+ spinner.stop()
+ }
+ }
+ } catch (err) {
+ const error = err as Error
+ const targetName = path.relative(process.cwd(), filePath)
+ spinner.fail(`Error processing ${targetName}: ${error.message}`)
+ process.exitCode = 1
}
- } catch (err) {
- const error = err as Error
- spinner.fail(`Error processing file ${file}: ${error.message}`)
- process.exitCode = 1
}
}
spinner.stop()
+
+ // Exit with appropriate code based on whether any errors occurred
+ if (process.exitCode) {
+ process.exit(process.exitCode)
+ }
})()
})
program.parse(process.argv)
+// Handle graceful shutdown
+process.on('SIGINT', () => {
+ console.log('\n\nš Process interrupted by user')
+ process.exit(0)
+})
+
+process.on('SIGTERM', () => {
+ console.log('\n\nš Process terminated')
+ process.exit(0)
+})
+
interface PromptMessage {
content: string
role: string
@@ -139,26 +286,111 @@ interface PromptData {
max_tokens?: number
}
+// Function to merge new frontmatter properties into existing file while preserving formatting
+function mergeFrontmatterProperties(filePath: string, newPropertiesYaml: string): string {
+ const content = fs.readFileSync(filePath, 'utf8')
+ const parsed = readFrontmatter(content)
+
+ if (parsed.errors && parsed.errors.length > 0) {
+ throw new Error(
+ `Failed to parse frontmatter: ${parsed.errors.map((e) => e.message).join(', ')}`,
+ )
+ }
+
+ if (!parsed.content) {
+ throw new Error('Failed to parse content from file')
+ }
+
+ try {
+ // Clean up the AI response - remove markdown code blocks if present
+ let cleanedYaml = newPropertiesYaml.trim()
+ cleanedYaml = cleanedYaml.replace(/^```ya?ml\s*\n/i, '')
+ cleanedYaml = cleanedYaml.replace(/\n```\s*$/i, '')
+ cleanedYaml = cleanedYaml.trim()
+
+ interface FrontmatterProperties {
+ intro?: string
+ [key: string]: unknown
+ }
+ const newProperties = yaml.load(cleanedYaml) as FrontmatterProperties
+
+ // Security: Validate against prototype pollution using the official frontmatter schema
+ const allowedKeys = Object.keys(schema.properties)
+
+ const sanitizedProperties = Object.fromEntries(
+ Object.entries(newProperties).filter(([key]) => {
+ if (allowedKeys.includes(key)) {
+ return true
+ }
+ console.warn(`Filtered out potentially unsafe frontmatter key: ${key}`)
+ return false
+ }),
+ )
+
+ // Merge new properties with existing frontmatter
+ const mergedData: FrontmatterProperties = { ...parsed.data, ...sanitizedProperties }
+
+ // Manually ensure intro is wrapped in single quotes in the final output
+ let result = readFrontmatter.stringify(parsed.content, mergedData)
+
+ // Post-process to ensure intro field has single quotes
+ if (newProperties.intro) {
+ const introValue = newProperties.intro.toString()
+ // Replace any quote style on intro with single quotes
+ result = result.replace(
+ /^intro:\s*(['"`]?)([^'"`\n\r]+)\1?\s*$/m,
+ `intro: '${introValue.replace(/'/g, "''")}'`, // Escape single quotes by doubling them
+ )
+ }
+ return result
+ } catch (error) {
+ console.error('Failed to parse AI response as YAML:')
+ console.error('Raw AI response:', JSON.stringify(newPropertiesYaml))
+ throw new Error(`Failed to parse new frontmatter properties: ${error}`)
+ }
+}
+
async function callEditor(
- editorType: keyof EditorTypes,
+ editorType: string,
content: string,
writeMode: boolean,
+ verbose = false,
): Promise {
- const markdownPromptPath = path.join(promptDir, `${editorType}.md`)
- let markdownPrompt = fs.readFileSync(markdownPromptPath, 'utf8')
+ const markdownPromptPath = path.join(promptDir, `${String(editorType)}.md`)
- // For intro type in write mode, append special instructions
- if (editorType === 'intro' && writeMode) {
- markdownPrompt +=
- '\n\n**WRITE MODE**: Output only the complete updated file content with the new intro in the frontmatter. Do not include analysis or explanations - just return the file ready to write.'
+ if (!fs.existsSync(markdownPromptPath)) {
+ throw new Error(`Prompt file not found: ${markdownPromptPath}`)
}
+ const markdownPrompt = fs.readFileSync(markdownPromptPath, 'utf8')
+
const prompt = yaml.load(fs.readFileSync(promptTemplatePath, 'utf8')) as PromptData
+ // Validate the prompt template has required properties
+ if (!prompt.messages || !Array.isArray(prompt.messages)) {
+ throw new Error('Invalid prompt template: missing or invalid messages array')
+ }
+
for (const msg of prompt.messages) {
msg.content = msg.content.replace('{{markdownPrompt}}', markdownPrompt)
msg.content = msg.content.replace('{{input}}', content)
+ // Replace writeMode template variable with simple string replacement
+ msg.content = msg.content.replace(
+ //g,
+ writeMode ? '' : '',
+ )
+ msg.content = msg.content.replace(
+ //g,
+ writeMode ? '' : '',
+ )
+ msg.content = msg.content.replace(
+ //g,
+ writeMode ? '' : '',
+ )
+
+ // Remove sections marked for removal
+ msg.content = msg.content.replace(/[\s\S]*?/g, '')
}
- return callModelsApi(prompt)
+ return callModelsApi(prompt, verbose)
}
From 4451fa9501b5a316ab10d52080c806dcf5a091a9 Mon Sep 17 00:00:00 2001
From: Salil
Date: Mon, 1 Dec 2025 09:07:49 -0800
Subject: [PATCH 15/17] Updating limits for workflow dispatch to be 25 (#58627)
Co-authored-by: Sam Browning <106113886+sabrowning1@users.noreply.github.com>
---
.../how-tos/manage-workflow-runs/manually-run-a-workflow.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/content/actions/how-tos/manage-workflow-runs/manually-run-a-workflow.md b/content/actions/how-tos/manage-workflow-runs/manually-run-a-workflow.md
index a84ae5173b48..ef0dc91a3f84 100644
--- a/content/actions/how-tos/manage-workflow-runs/manually-run-a-workflow.md
+++ b/content/actions/how-tos/manage-workflow-runs/manually-run-a-workflow.md
@@ -87,6 +87,6 @@ gh run watch
When using the REST API, you configure the `inputs` and `ref` as request body parameters. If the inputs are omitted, the default values defined in the workflow file are used.
> [!NOTE]
-> You can define up to 10 `inputs` for a `workflow_dispatch` event.
+> You can define up to {% ifversion fpt or ghec %}25 {% else %}10 {% endif %} `inputs` for a `workflow_dispatch` event.
For more information about using the REST API, see [AUTOTITLE](/rest/actions/workflows#create-a-workflow-dispatch-event).
From 741de7d138914a818f27ce14b6cd295facd57962 Mon Sep 17 00:00:00 2001
From: Ben De St Paer-Gotch
Date: Mon, 1 Dec 2025 17:16:04 +0000
Subject: [PATCH 16/17] Add pricing detail for additional storage in GitHub
Actions (#41573)
Co-authored-by: Sharra-writes
Co-authored-by: Salil
---
content/billing/concepts/product-billing/github-actions.md | 1 +
1 file changed, 1 insertion(+)
diff --git a/content/billing/concepts/product-billing/github-actions.md b/content/billing/concepts/product-billing/github-actions.md
index caf5c66a6a05..60a2079f7b52 100644
--- a/content/billing/concepts/product-billing/github-actions.md
+++ b/content/billing/concepts/product-billing/github-actions.md
@@ -38,6 +38,7 @@ For **private repositories**, each {% data variables.product.github %} account r
* If you run a workflow that normally takes 10 minutes and it fails after 5 minutes because a dependency isn't available, you'll use 5 minutes of the repository owner's allowance. If you fix the problem and re-run the workflow successfully, in total you'll use 15 minutes of the repository owner's allowance.
* If you run a workflow that generates many log files and a long job summary, these files do not count towards the repository owner's artifact storage allowance.
* Cache storage usage is measured by the peak usage for each hour. Included usage is 10 GB per repository. For a given hour, if a repository has a peak cache usage of 15 GB, then the repository owner will be charged for the 5 GB of usage above the 10 GB included for that hour. The repository owner will only be charged if the repository cache storage limit has been configured higher than the included usage.
+* Additional cache storage is $0.07 per GiB, per month.
## Free use of {% data variables.product.prodname_actions %}
From b2494d4b4163e0383e0f3a2d1640f8b4dd43e8d1 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 1 Dec 2025 17:22:43 +0000
Subject: [PATCH 17/17] Bump actions/checkout from 5.0.0 to 6.0.0 (#58616)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
.github/workflows/all-documents.yml | 2 +-
.github/workflows/article-api-docs.yml | 2 +-
.github/workflows/auto-add-ready-for-doc-review.yml | 2 +-
.github/workflows/check-broken-links-github-github.yml | 2 +-
.github/workflows/close-on-invalid-label.yaml | 2 +-
.github/workflows/codeql.yml | 2 +-
.github/workflows/confirm-internal-staff-work-in-docs.yml | 2 +-
.github/workflows/content-lint-markdown.yml | 2 +-
.github/workflows/content-linter-rules-docs.yml | 2 +-
.github/workflows/copy-api-issue-to-internal.yml | 2 +-
.github/workflows/count-translation-corruptions.yml | 2 +-
.github/workflows/create-changelog-pr.yml | 2 +-
.github/workflows/delete-orphan-translation-files.yml | 4 ++--
.github/workflows/docs-review-collect.yml | 2 +-
.github/workflows/dont-delete-assets.yml | 2 +-
.github/workflows/dont-delete-features.yml | 2 +-
.github/workflows/enterprise-dates.yml | 2 +-
.github/workflows/enterprise-release-issue.yml | 2 +-
.github/workflows/first-responder-v2-prs-collect.yml | 2 +-
.github/workflows/generate-code-scanning-query-lists.yml | 4 ++--
.github/workflows/headless-tests.yml | 2 +-
.github/workflows/hubber-contribution-help.yml | 2 +-
.github/workflows/index-autocomplete-search.yml | 4 ++--
.github/workflows/index-general-search-pr.yml | 4 ++--
.github/workflows/index-general-search.yml | 6 +++---
.github/workflows/keep-caches-warm.yml | 2 +-
.github/workflows/link-check-daily.yml | 4 ++--
.github/workflows/link-check-on-pr.yml | 2 +-
.github/workflows/lint-code.yml | 2 +-
.github/workflows/lint-entire-content-data-markdown.yml | 2 +-
.github/workflows/local-dev.yml | 2 +-
.github/workflows/moda-allowed-ips.yml | 2 +-
.github/workflows/move-content.yml | 2 +-
.github/workflows/move-ready-to-merge-pr.yaml | 2 +-
.github/workflows/move-reopened-issues-to-triage.yaml | 2 +-
.github/workflows/needs-sme-stale-check.yaml | 2 +-
.github/workflows/needs-sme-workflow.yml | 4 ++--
.github/workflows/no-response.yaml | 2 +-
.github/workflows/notify-about-deployment.yml | 2 +-
.github/workflows/orphaned-features-check.yml | 2 +-
.github/workflows/orphaned-files-check.yml | 2 +-
.github/workflows/os-ready-for-review.yml | 2 +-
.github/workflows/package-lock-lint.yml | 2 +-
.github/workflows/purge-fastly.yml | 2 +-
.github/workflows/readability.yml | 2 +-
.github/workflows/ready-for-doc-review.yml | 2 +-
.github/workflows/repo-sync.yml | 2 +-
.github/workflows/review-comment.yml | 2 +-
.github/workflows/reviewers-content-systems.yml | 2 +-
.github/workflows/reviewers-dependabot.yml | 2 +-
.github/workflows/reviewers-docs-engineering.yml | 2 +-
.github/workflows/reviewers-legal.yml | 2 +-
.github/workflows/site-policy-sync.yml | 4 ++--
.github/workflows/stale.yml | 2 +-
.github/workflows/sync-audit-logs.yml | 2 +-
.github/workflows/sync-codeql-cli.yml | 4 ++--
.github/workflows/sync-graphql.yml | 4 ++--
.github/workflows/sync-openapi.yml | 6 +++---
.github/workflows/sync-secret-scanning.yml | 2 +-
.github/workflows/test-changed-content.yml | 2 +-
.github/workflows/test.yml | 2 +-
.github/workflows/triage-issue-comments.yml | 2 +-
.github/workflows/triage-issues.yml | 2 +-
.github/workflows/triage-pull-requests.yml | 2 +-
.github/workflows/triage-stale-check.yml | 4 ++--
.github/workflows/triage-unallowed-contributions.yml | 2 +-
.github/workflows/validate-asset-images.yml | 2 +-
.github/workflows/validate-github-github-docs-urls.yml | 4 ++--
.github/workflows/validate-openapi-check.yml | 2 +-
69 files changed, 84 insertions(+), 84 deletions(-)
diff --git a/.github/workflows/all-documents.yml b/.github/workflows/all-documents.yml
index ff7568c91c86..58ce83b578df 100644
--- a/.github/workflows/all-documents.yml
+++ b/.github/workflows/all-documents.yml
@@ -20,7 +20,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
diff --git a/.github/workflows/article-api-docs.yml b/.github/workflows/article-api-docs.yml
index 4f87a815b93b..e6552f4fa380 100644
--- a/.github/workflows/article-api-docs.yml
+++ b/.github/workflows/article-api-docs.yml
@@ -22,7 +22,7 @@ jobs:
if: github.repository == 'github/docs-internal' || github.repository == 'github/docs'
steps:
- name: Checkout
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
diff --git a/.github/workflows/auto-add-ready-for-doc-review.yml b/.github/workflows/auto-add-ready-for-doc-review.yml
index 7b58c76c72f1..4a78f5a76d66 100644
--- a/.github/workflows/auto-add-ready-for-doc-review.yml
+++ b/.github/workflows/auto-add-ready-for-doc-review.yml
@@ -25,7 +25,7 @@ jobs:
steps:
- name: Check out repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- name: Check team membership
id: membership_check
diff --git a/.github/workflows/check-broken-links-github-github.yml b/.github/workflows/check-broken-links-github-github.yml
index fc7ea6ff06bd..9ed65c286704 100644
--- a/.github/workflows/check-broken-links-github-github.yml
+++ b/.github/workflows/check-broken-links-github-github.yml
@@ -24,7 +24,7 @@ jobs:
REPORT_REPOSITORY: github/docs-content
steps:
- name: Checkout
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
with:
# To prevent issues with cloning early access content later
persist-credentials: 'false'
diff --git a/.github/workflows/close-on-invalid-label.yaml b/.github/workflows/close-on-invalid-label.yaml
index 715ff94260c7..7c7e12b3d478 100644
--- a/.github/workflows/close-on-invalid-label.yaml
+++ b/.github/workflows/close-on-invalid-label.yaml
@@ -37,7 +37,7 @@ jobs:
- name: Check out repo
if: ${{ failure() && github.event_name != 'pull_request_target' }}
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/slack-alert
if: ${{ failure() && github.event_name != 'pull_request_target' }}
diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
index b5484c2f8c01..39a961119a60 100644
--- a/.github/workflows/codeql.yml
+++ b/.github/workflows/codeql.yml
@@ -33,7 +33,7 @@ jobs:
if: github.repository == 'github/docs-internal' || github.repository == 'github/docs'
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: github/codeql-action/init@e296a935590eb16afc0c0108289f68c87e2a89a5 # v4.30.7
with:
languages: javascript # comma separated list of values from {go, python, javascript, java, cpp, csharp, ruby}
diff --git a/.github/workflows/confirm-internal-staff-work-in-docs.yml b/.github/workflows/confirm-internal-staff-work-in-docs.yml
index f2b4cb4b2156..776157ce8acf 100644
--- a/.github/workflows/confirm-internal-staff-work-in-docs.yml
+++ b/.github/workflows/confirm-internal-staff-work-in-docs.yml
@@ -83,7 +83,7 @@ jobs:
- name: Check out repo
if: ${{ failure() && github.event_name != 'pull_request_target' }}
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/slack-alert
if: ${{ failure() && github.event_name != 'pull_request_target' }}
with:
diff --git a/.github/workflows/content-lint-markdown.yml b/.github/workflows/content-lint-markdown.yml
index f097e34e364b..f00366bc9ab8 100644
--- a/.github/workflows/content-lint-markdown.yml
+++ b/.github/workflows/content-lint-markdown.yml
@@ -23,7 +23,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- name: Set up Node and dependencies
uses: ./.github/actions/node-npm-setup
diff --git a/.github/workflows/content-linter-rules-docs.yml b/.github/workflows/content-linter-rules-docs.yml
index fe1dc03ca736..4d17f0cc2ae0 100644
--- a/.github/workflows/content-linter-rules-docs.yml
+++ b/.github/workflows/content-linter-rules-docs.yml
@@ -25,7 +25,7 @@ jobs:
if: github.repository == 'github/docs-internal' || github.repository == 'github/docs'
steps:
- name: Checkout
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
diff --git a/.github/workflows/copy-api-issue-to-internal.yml b/.github/workflows/copy-api-issue-to-internal.yml
index c630786ddf2f..4f52dd60d3ab 100644
--- a/.github/workflows/copy-api-issue-to-internal.yml
+++ b/.github/workflows/copy-api-issue-to-internal.yml
@@ -73,7 +73,7 @@ jobs:
- name: Check out repo
if: ${{ failure() && github.event_name != 'workflow_dispatch' && github.repository == 'github/docs-internal' }}
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/slack-alert
if: ${{ failure() && github.event_name != 'workflow_dispatch' && github.repository == 'github/docs-internal' }}
with:
diff --git a/.github/workflows/count-translation-corruptions.yml b/.github/workflows/count-translation-corruptions.yml
index 8549c30fe247..0c8b901f0ee2 100644
--- a/.github/workflows/count-translation-corruptions.yml
+++ b/.github/workflows/count-translation-corruptions.yml
@@ -24,7 +24,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout English repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
with:
# Using a PAT is necessary so that the new commit will trigger the
# CI in the PR. (Events from GITHUB_TOKEN don't trigger new workflows.)
diff --git a/.github/workflows/create-changelog-pr.yml b/.github/workflows/create-changelog-pr.yml
index 3de39fcb2d2d..626af254771f 100644
--- a/.github/workflows/create-changelog-pr.yml
+++ b/.github/workflows/create-changelog-pr.yml
@@ -23,7 +23,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v5
+ - uses: actions/checkout@v6.0.0
- name: 'Ensure ${{ env.CHANGELOG_FILE }} exists'
run: |
diff --git a/.github/workflows/delete-orphan-translation-files.yml b/.github/workflows/delete-orphan-translation-files.yml
index a6907b97ef98..d97fa24b30c5 100644
--- a/.github/workflows/delete-orphan-translation-files.yml
+++ b/.github/workflows/delete-orphan-translation-files.yml
@@ -60,10 +60,10 @@ jobs:
language_repo: github/docs-internal.ko-kr
steps:
- - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- name: Checkout the language-specific repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
with:
repository: ${{ matrix.language_repo }}
token: ${{ secrets.DOCS_BOT_PAT_BASE }}
diff --git a/.github/workflows/docs-review-collect.yml b/.github/workflows/docs-review-collect.yml
index e388d1eeab9c..f97bfbca4ae4 100644
--- a/.github/workflows/docs-review-collect.yml
+++ b/.github/workflows/docs-review-collect.yml
@@ -20,7 +20,7 @@ jobs:
steps:
- name: Check out repo content
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- name: Setup Node.js
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
diff --git a/.github/workflows/dont-delete-assets.yml b/.github/workflows/dont-delete-assets.yml
index e2ed22fe2d01..17ff82230cdb 100644
--- a/.github/workflows/dont-delete-assets.yml
+++ b/.github/workflows/dont-delete-assets.yml
@@ -30,7 +30,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
diff --git a/.github/workflows/dont-delete-features.yml b/.github/workflows/dont-delete-features.yml
index 950c3377a3ce..16a0cea8f555 100644
--- a/.github/workflows/dont-delete-features.yml
+++ b/.github/workflows/dont-delete-features.yml
@@ -30,7 +30,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
diff --git a/.github/workflows/enterprise-dates.yml b/.github/workflows/enterprise-dates.yml
index 280f516dddb1..d638bf8ba351 100644
--- a/.github/workflows/enterprise-dates.yml
+++ b/.github/workflows/enterprise-dates.yml
@@ -24,7 +24,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository code
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
diff --git a/.github/workflows/enterprise-release-issue.yml b/.github/workflows/enterprise-release-issue.yml
index f6420241551d..eabaf5bf0f27 100644
--- a/.github/workflows/enterprise-release-issue.yml
+++ b/.github/workflows/enterprise-release-issue.yml
@@ -19,7 +19,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository code
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
diff --git a/.github/workflows/first-responder-v2-prs-collect.yml b/.github/workflows/first-responder-v2-prs-collect.yml
index cdad2ed0e037..4ebc9dcbac6e 100644
--- a/.github/workflows/first-responder-v2-prs-collect.yml
+++ b/.github/workflows/first-responder-v2-prs-collect.yml
@@ -24,7 +24,7 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@v5
+ uses: actions/checkout@v6.0.0
# Add to the FR project
# and set type to "Maintenance"
diff --git a/.github/workflows/generate-code-scanning-query-lists.yml b/.github/workflows/generate-code-scanning-query-lists.yml
index a3e261803b87..bf4aa979fcec 100644
--- a/.github/workflows/generate-code-scanning-query-lists.yml
+++ b/.github/workflows/generate-code-scanning-query-lists.yml
@@ -32,12 +32,12 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository code
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
- name: Checkout codeql repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
with:
repository: github/codeql
path: codeql
diff --git a/.github/workflows/headless-tests.yml b/.github/workflows/headless-tests.yml
index 6b1f78b76a41..535d6c036ec2 100644
--- a/.github/workflows/headless-tests.yml
+++ b/.github/workflows/headless-tests.yml
@@ -37,7 +37,7 @@ jobs:
timeout-minutes: 60
steps:
- name: Check out repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/setup-elasticsearch
diff --git a/.github/workflows/hubber-contribution-help.yml b/.github/workflows/hubber-contribution-help.yml
index b80ff46690cc..a105b8d3042b 100644
--- a/.github/workflows/hubber-contribution-help.yml
+++ b/.github/workflows/hubber-contribution-help.yml
@@ -23,7 +23,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- id: membership_check
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd
diff --git a/.github/workflows/index-autocomplete-search.yml b/.github/workflows/index-autocomplete-search.yml
index 79582d796aca..408fecf43061 100644
--- a/.github/workflows/index-autocomplete-search.yml
+++ b/.github/workflows/index-autocomplete-search.yml
@@ -23,14 +23,14 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
- uses: ./.github/actions/setup-elasticsearch
if: ${{ github.event_name == 'pull_request' }}
- - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
with:
token: ${{ secrets.DOCS_BOT_PAT_BASE }}
repository: github/docs-internal-data
diff --git a/.github/workflows/index-general-search-pr.yml b/.github/workflows/index-general-search-pr.yml
index 96127559d3c9..dd1ebf27883d 100644
--- a/.github/workflows/index-general-search-pr.yml
+++ b/.github/workflows/index-general-search-pr.yml
@@ -37,10 +37,10 @@ jobs:
if: github.repository == 'github/docs-internal'
steps:
- name: Check out repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- name: Clone docs-internal-data
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
with:
repository: github/docs-internal-data
# This works because user `docs-bot` has read access to that private repo.
diff --git a/.github/workflows/index-general-search.yml b/.github/workflows/index-general-search.yml
index c3930e1412a0..683833c81628 100644
--- a/.github/workflows/index-general-search.yml
+++ b/.github/workflows/index-general-search.yml
@@ -87,7 +87,7 @@ jobs:
- name: Check out repo
if: ${{ failure() && github.event_name != 'workflow_dispatch' }}
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/slack-alert
if: ${{ failure() && github.event_name != 'workflow_dispatch' }}
@@ -115,10 +115,10 @@ jobs:
language: ${{ fromJSON(needs.figureOutMatrix.outputs.matrix) }}
steps:
- name: Check out repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- name: Clone docs-internal-data
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
with:
repository: github/docs-internal-data
# This works because user `docs-bot` has read access to that private repo.
diff --git a/.github/workflows/keep-caches-warm.yml b/.github/workflows/keep-caches-warm.yml
index 04daa6d9c045..e63de3ecd6b1 100644
--- a/.github/workflows/keep-caches-warm.yml
+++ b/.github/workflows/keep-caches-warm.yml
@@ -29,7 +29,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
diff --git a/.github/workflows/link-check-daily.yml b/.github/workflows/link-check-daily.yml
index 1f0876acbd22..63e8933ea4ff 100644
--- a/.github/workflows/link-check-daily.yml
+++ b/.github/workflows/link-check-daily.yml
@@ -23,7 +23,7 @@ jobs:
run: gh --version
- name: Check out repo's default branch
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
@@ -37,7 +37,7 @@ jobs:
- name: Check out docs-early-access too, if internal repo
if: ${{ github.repository == 'github/docs-internal' }}
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
with:
repository: github/docs-early-access
token: ${{ secrets.DOCS_BOT_PAT_BASE }}
diff --git a/.github/workflows/link-check-on-pr.yml b/.github/workflows/link-check-on-pr.yml
index c91fe77e9324..811c47752f3d 100644
--- a/.github/workflows/link-check-on-pr.yml
+++ b/.github/workflows/link-check-on-pr.yml
@@ -26,7 +26,7 @@ jobs:
if: github.repository == 'github/docs-internal' || github.repository == 'github/docs'
steps:
- name: Checkout
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
diff --git a/.github/workflows/lint-code.yml b/.github/workflows/lint-code.yml
index 7506716f6d66..da2094ec5a42 100644
--- a/.github/workflows/lint-code.yml
+++ b/.github/workflows/lint-code.yml
@@ -23,7 +23,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
diff --git a/.github/workflows/lint-entire-content-data-markdown.yml b/.github/workflows/lint-entire-content-data-markdown.yml
index 42699314f95d..d59a2b89a75d 100644
--- a/.github/workflows/lint-entire-content-data-markdown.yml
+++ b/.github/workflows/lint-entire-content-data-markdown.yml
@@ -23,7 +23,7 @@ jobs:
run: gh --version
- name: Check out repo's default branch
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- name: Set up Node and dependencies
uses: ./.github/actions/node-npm-setup
diff --git a/.github/workflows/local-dev.yml b/.github/workflows/local-dev.yml
index 6653b5f41ba0..8e69474ed211 100644
--- a/.github/workflows/local-dev.yml
+++ b/.github/workflows/local-dev.yml
@@ -17,7 +17,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
diff --git a/.github/workflows/moda-allowed-ips.yml b/.github/workflows/moda-allowed-ips.yml
index b4b9cd6f4a0d..c188dbd56935 100644
--- a/.github/workflows/moda-allowed-ips.yml
+++ b/.github/workflows/moda-allowed-ips.yml
@@ -19,7 +19,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out the repository
- uses: actions/checkout@v5
+ uses: actions/checkout@v6.0.0
- name: Update list of allowed IPs
run: |
diff --git a/.github/workflows/move-content.yml b/.github/workflows/move-content.yml
index 2737da134d57..9df8f84e9a64 100644
--- a/.github/workflows/move-content.yml
+++ b/.github/workflows/move-content.yml
@@ -25,7 +25,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
diff --git a/.github/workflows/move-ready-to-merge-pr.yaml b/.github/workflows/move-ready-to-merge-pr.yaml
index af3e6e0a9d41..8c9b4df87330 100644
--- a/.github/workflows/move-ready-to-merge-pr.yaml
+++ b/.github/workflows/move-ready-to-merge-pr.yaml
@@ -31,7 +31,7 @@ jobs:
repo-token: ${{ secrets.DOCS_BOT_PAT_BASE }}
- name: Check out repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
diff --git a/.github/workflows/move-reopened-issues-to-triage.yaml b/.github/workflows/move-reopened-issues-to-triage.yaml
index 0e6ce5ea1bc8..80726a5c10c8 100644
--- a/.github/workflows/move-reopened-issues-to-triage.yaml
+++ b/.github/workflows/move-reopened-issues-to-triage.yaml
@@ -45,7 +45,7 @@ jobs:
- name: Check out repo
if: ${{ failure() }}
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/slack-alert
if: ${{ failure() }}
with:
diff --git a/.github/workflows/needs-sme-stale-check.yaml b/.github/workflows/needs-sme-stale-check.yaml
index 186f613d8b82..a847c4997d63 100644
--- a/.github/workflows/needs-sme-stale-check.yaml
+++ b/.github/workflows/needs-sme-stale-check.yaml
@@ -35,7 +35,7 @@ jobs:
- name: Check out repo
if: ${{ failure() }}
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/slack-alert
if: ${{ failure() }}
with:
diff --git a/.github/workflows/needs-sme-workflow.yml b/.github/workflows/needs-sme-workflow.yml
index 9e930545c1f6..1d3b6bef881b 100644
--- a/.github/workflows/needs-sme-workflow.yml
+++ b/.github/workflows/needs-sme-workflow.yml
@@ -22,7 +22,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: peter-evans/create-or-update-comment@e8674b075228eee787fea43ef493e45ece1004c9
with:
@@ -41,7 +41,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: peter-evans/create-or-update-comment@e8674b075228eee787fea43ef493e45ece1004c9
with:
diff --git a/.github/workflows/no-response.yaml b/.github/workflows/no-response.yaml
index 71478cffec3f..11c7bccf3665 100644
--- a/.github/workflows/no-response.yaml
+++ b/.github/workflows/no-response.yaml
@@ -57,7 +57,7 @@ jobs:
- name: Check out repo
if: ${{ failure() }}
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/slack-alert
if: ${{ failure() }}
with:
diff --git a/.github/workflows/notify-about-deployment.yml b/.github/workflows/notify-about-deployment.yml
index c16fecc57bb5..dfefe3de87fb 100644
--- a/.github/workflows/notify-about-deployment.yml
+++ b/.github/workflows/notify-about-deployment.yml
@@ -26,7 +26,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
diff --git a/.github/workflows/orphaned-features-check.yml b/.github/workflows/orphaned-features-check.yml
index f1fba43136a2..c499e1e38fd1 100644
--- a/.github/workflows/orphaned-features-check.yml
+++ b/.github/workflows/orphaned-features-check.yml
@@ -26,7 +26,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout English repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
with:
# Using a PAT is necessary so that the new commit will trigger the
# CI in the PR. (Events from GITHUB_TOKEN don't trigger new workflows.)
diff --git a/.github/workflows/orphaned-files-check.yml b/.github/workflows/orphaned-files-check.yml
index ba2e398c28e4..db410c1b5bf3 100644
--- a/.github/workflows/orphaned-files-check.yml
+++ b/.github/workflows/orphaned-files-check.yml
@@ -30,7 +30,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout English repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
with:
# Using a PAT is necessary so that the new commit will trigger the
# CI in the PR. (Events from GITHUB_TOKEN don't trigger new workflows.)
diff --git a/.github/workflows/os-ready-for-review.yml b/.github/workflows/os-ready-for-review.yml
index 630a443c8ead..7aef9268f45c 100644
--- a/.github/workflows/os-ready-for-review.yml
+++ b/.github/workflows/os-ready-for-review.yml
@@ -20,7 +20,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out repo content
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- name: Check if this run was triggered by a member of the docs team
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd
diff --git a/.github/workflows/package-lock-lint.yml b/.github/workflows/package-lock-lint.yml
index 1b9581f94536..105d074170ee 100644
--- a/.github/workflows/package-lock-lint.yml
+++ b/.github/workflows/package-lock-lint.yml
@@ -25,7 +25,7 @@ jobs:
if: github.repository == 'github/docs-internal' || github.repository == 'github/docs'
steps:
- name: Check out repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- name: Setup Node.js
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
diff --git a/.github/workflows/purge-fastly.yml b/.github/workflows/purge-fastly.yml
index cc3c74ca8802..ad0eae2a4bf6 100644
--- a/.github/workflows/purge-fastly.yml
+++ b/.github/workflows/purge-fastly.yml
@@ -32,7 +32,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
diff --git a/.github/workflows/readability.yml b/.github/workflows/readability.yml
index ef68d9c7982c..d2dddce67747 100644
--- a/.github/workflows/readability.yml
+++ b/.github/workflows/readability.yml
@@ -28,7 +28,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out repo with full history
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
with:
fetch-depth: 0
diff --git a/.github/workflows/ready-for-doc-review.yml b/.github/workflows/ready-for-doc-review.yml
index 15d07669a1d7..b34ef6a5aab5 100644
--- a/.github/workflows/ready-for-doc-review.yml
+++ b/.github/workflows/ready-for-doc-review.yml
@@ -21,7 +21,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out repo content
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
with:
repository: github/docs-internal
token: ${{ secrets.DOCS_BOT_PAT_BASE }}
diff --git a/.github/workflows/repo-sync.yml b/.github/workflows/repo-sync.yml
index bb915d1b85f4..d0e172e28b15 100644
--- a/.github/workflows/repo-sync.yml
+++ b/.github/workflows/repo-sync.yml
@@ -23,7 +23,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- name: Sync repo to branch
uses: repo-sync/github-sync@3832fe8e2be32372e1b3970bbae8e7079edeec88
diff --git a/.github/workflows/review-comment.yml b/.github/workflows/review-comment.yml
index 78aff73f539d..e048d59b9ec3 100644
--- a/.github/workflows/review-comment.yml
+++ b/.github/workflows/review-comment.yml
@@ -38,7 +38,7 @@ jobs:
PR_NUMBER: ${{ github.event.pull_request.number }}
steps:
- name: check out repo content
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
- name: Set APP_URL
diff --git a/.github/workflows/reviewers-content-systems.yml b/.github/workflows/reviewers-content-systems.yml
index d66638401acf..654707564b44 100644
--- a/.github/workflows/reviewers-content-systems.yml
+++ b/.github/workflows/reviewers-content-systems.yml
@@ -36,7 +36,7 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@v5
+ uses: actions/checkout@v6.0.0
- name: Add content systems as a reviewer
uses: ./.github/actions/retry-command
diff --git a/.github/workflows/reviewers-dependabot.yml b/.github/workflows/reviewers-dependabot.yml
index 2ff85c6febed..d02144dafffa 100644
--- a/.github/workflows/reviewers-dependabot.yml
+++ b/.github/workflows/reviewers-dependabot.yml
@@ -37,7 +37,7 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@v5
+ uses: actions/checkout@v6.0.0
- name: Add dependabot as a reviewer
uses: ./.github/actions/retry-command
diff --git a/.github/workflows/reviewers-docs-engineering.yml b/.github/workflows/reviewers-docs-engineering.yml
index 8746c50cdbf4..f6bdbb269d45 100644
--- a/.github/workflows/reviewers-docs-engineering.yml
+++ b/.github/workflows/reviewers-docs-engineering.yml
@@ -47,7 +47,7 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@v5
+ uses: actions/checkout@v6.0.0
- name: Add docs engineering as a reviewer
uses: ./.github/actions/retry-command
diff --git a/.github/workflows/reviewers-legal.yml b/.github/workflows/reviewers-legal.yml
index 4079dc2ff672..59ad01a5cd85 100644
--- a/.github/workflows/reviewers-legal.yml
+++ b/.github/workflows/reviewers-legal.yml
@@ -32,7 +32,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
- uses: actions/checkout@v5
+ uses: actions/checkout@v6.0.0
- name: Get changed files
id: changed_files
diff --git a/.github/workflows/site-policy-sync.yml b/.github/workflows/site-policy-sync.yml
index 57dbccfbf7bf..cc2e24c2f7fc 100644
--- a/.github/workflows/site-policy-sync.yml
+++ b/.github/workflows/site-policy-sync.yml
@@ -27,10 +27,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: checkout docs-internal
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- name: checkout public site-policy
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
with:
repository: github/site-policy
token: ${{ secrets.API_TOKEN_SITEPOLICY }}
diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml
index 1048f56e5f13..f3ac4f2fecbc 100644
--- a/.github/workflows/stale.yml
+++ b/.github/workflows/stale.yml
@@ -38,7 +38,7 @@ jobs:
- name: Check out repo
if: ${{ failure() }}
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/slack-alert
if: ${{ failure() }}
with:
diff --git a/.github/workflows/sync-audit-logs.yml b/.github/workflows/sync-audit-logs.yml
index d29530591a6e..d49b5d649a3e 100644
--- a/.github/workflows/sync-audit-logs.yml
+++ b/.github/workflows/sync-audit-logs.yml
@@ -24,7 +24,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
diff --git a/.github/workflows/sync-codeql-cli.yml b/.github/workflows/sync-codeql-cli.yml
index a8e067ec0e69..b68c6617f5e3 100644
--- a/.github/workflows/sync-codeql-cli.yml
+++ b/.github/workflows/sync-codeql-cli.yml
@@ -30,11 +30,11 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository code
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
# Check out a nested repository inside of previous checkout
- name: Checkout semmle-code repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
with:
# By default, only the most recent commit of the `main` branch
# will be checked out
diff --git a/.github/workflows/sync-graphql.yml b/.github/workflows/sync-graphql.yml
index d3358a237d39..483158197700 100644
--- a/.github/workflows/sync-graphql.yml
+++ b/.github/workflows/sync-graphql.yml
@@ -23,7 +23,7 @@ jobs:
ignored-types: ${{ steps.sync.outputs.ignored-types }}
steps:
- name: Checkout
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
- name: Run updater scripts
id: sync
@@ -82,7 +82,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/slack-alert
with:
slack_channel_id: ${{ secrets.DOCS_ALERTS_SLACK_CHANNEL_ID }}
diff --git a/.github/workflows/sync-openapi.yml b/.github/workflows/sync-openapi.yml
index 941df7895c28..e2474d481d06 100644
--- a/.github/workflows/sync-openapi.yml
+++ b/.github/workflows/sync-openapi.yml
@@ -30,11 +30,11 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository code
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
# Check out a nested repository inside of previous checkout
- name: Checkout rest-api-description repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
with:
# By default, only the most recent commit of the `main` branch
# will be checked out
@@ -42,7 +42,7 @@ jobs:
path: rest-api-description
ref: ${{ inputs.SOURCE_BRANCH }}
- - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
with:
# By default, only the most recent commit of the `main` branch
# will be checked out
diff --git a/.github/workflows/sync-secret-scanning.yml b/.github/workflows/sync-secret-scanning.yml
index 1700dc6859a4..b8f62db784ce 100644
--- a/.github/workflows/sync-secret-scanning.yml
+++ b/.github/workflows/sync-secret-scanning.yml
@@ -24,7 +24,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
diff --git a/.github/workflows/test-changed-content.yml b/.github/workflows/test-changed-content.yml
index a1f2e4589407..9581106ca735 100644
--- a/.github/workflows/test-changed-content.yml
+++ b/.github/workflows/test-changed-content.yml
@@ -27,7 +27,7 @@ jobs:
# Each of these ifs needs to be repeated at each step to make sure the required check still runs
# Even if if doesn't do anything
- name: Check out repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index e9892fffcea0..238b7dd1fab6 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -87,7 +87,7 @@ jobs:
# Each of these ifs needs to be repeated at each step to make sure the required check still runs
# Even if if doesn't do anything
- name: Check out repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/setup-elasticsearch
if: ${{ matrix.name == 'search' || matrix.name == 'languages' }}
diff --git a/.github/workflows/triage-issue-comments.yml b/.github/workflows/triage-issue-comments.yml
index 740249159c67..6b96d0f48f5f 100644
--- a/.github/workflows/triage-issue-comments.yml
+++ b/.github/workflows/triage-issue-comments.yml
@@ -43,7 +43,7 @@ jobs:
}
- name: Check out repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
diff --git a/.github/workflows/triage-issues.yml b/.github/workflows/triage-issues.yml
index dd2082efc17a..4b5d1080c882 100644
--- a/.github/workflows/triage-issues.yml
+++ b/.github/workflows/triage-issues.yml
@@ -21,7 +21,7 @@ jobs:
steps:
- name: Check out repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
diff --git a/.github/workflows/triage-pull-requests.yml b/.github/workflows/triage-pull-requests.yml
index 551aa65b4b39..463d08a4e37e 100644
--- a/.github/workflows/triage-pull-requests.yml
+++ b/.github/workflows/triage-pull-requests.yml
@@ -23,7 +23,7 @@ jobs:
steps:
- name: Check out repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
diff --git a/.github/workflows/triage-stale-check.yml b/.github/workflows/triage-stale-check.yml
index e04e0b23870c..c88030bc174a 100644
--- a/.github/workflows/triage-stale-check.yml
+++ b/.github/workflows/triage-stale-check.yml
@@ -44,7 +44,7 @@ jobs:
- name: Check out repo
if: ${{ failure() }}
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/slack-alert
if: ${{ failure() }}
with:
@@ -72,7 +72,7 @@ jobs:
- name: Check out repo
if: ${{ failure() }}
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/slack-alert
if: ${{ failure() }}
with:
diff --git a/.github/workflows/triage-unallowed-contributions.yml b/.github/workflows/triage-unallowed-contributions.yml
index 96ccd5a68608..c3fea077d274 100644
--- a/.github/workflows/triage-unallowed-contributions.yml
+++ b/.github/workflows/triage-unallowed-contributions.yml
@@ -23,7 +23,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- name: Get files changed
uses: dorny/paths-filter@0bc4621a3135347011ad047f9ecf449bf72ce2bd
diff --git a/.github/workflows/validate-asset-images.yml b/.github/workflows/validate-asset-images.yml
index 1c9ff04f49da..b3eef046db31 100644
--- a/.github/workflows/validate-asset-images.yml
+++ b/.github/workflows/validate-asset-images.yml
@@ -20,7 +20,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out repo
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
diff --git a/.github/workflows/validate-github-github-docs-urls.yml b/.github/workflows/validate-github-github-docs-urls.yml
index d53df2636b5e..488f795dbf45 100644
--- a/.github/workflows/validate-github-github-docs-urls.yml
+++ b/.github/workflows/validate-github-github-docs-urls.yml
@@ -34,10 +34,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out repo's default branch
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: ./.github/actions/node-npm-setup
- - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
with:
token: ${{ secrets.DOCS_BOT_PAT_BASE }}
repository: github/github
diff --git a/.github/workflows/validate-openapi-check.yml b/.github/workflows/validate-openapi-check.yml
index 510f29752ef0..93e3460acd73 100644
--- a/.github/workflows/validate-openapi-check.yml
+++ b/.github/workflows/validate-openapi-check.yml
@@ -28,7 +28,7 @@ jobs:
if: github.repository == 'github/docs-internal'
steps:
- name: Checkout
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349 # v3.7.1