Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .github/workflows/lint-entire-content-data-markdown.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
name: 'Lint entire content and data markdown files'

# **What it does**: Lints our content markdown weekly to ensure the content matches the specified styleguide. If errors exists, it opens a PR for the Docs content team to review.
# **What it does**: Lints our content markdown weekly to ensure the content matches the specified styleguide. If errors or warnings exist, it opens an issue for the Docs content team to review.
# **Why we have it**: Extra precaution to run linter on the entire content/data directories.
# **Who does it impact**: Docs content.

Expand Down Expand Up @@ -32,7 +32,7 @@ jobs:
id: linting-content-data
timeout-minutes: 10
continue-on-error: true
run: npm run lint-content -- --errors-only --paths content data --output-file /tmp/error-lints.json
run: npm run lint-content -- --paths content data --output-file /tmp/lint-results.json

- name: Open issue in docs-content
if: ${{ always() && steps.linting-content-data.outcome == 'failure' }}
Expand All @@ -41,7 +41,7 @@ jobs:
REPORT_AUTHOR: docs-bot
REPORT_LABEL: broken content markdown report
REPORT_REPOSITORY: github/docs-content
run: npm run post-lints -- --path /tmp/error-lints.json
run: npm run post-lints -- --path /tmp/lint-results.json

- uses: ./.github/actions/slack-alert
if: ${{ failure() && github.event_name != 'workflow_dispatch' }}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ When you run your updated workflows, they will build your artifacts and generate

```yaml
- name: Generate SBOM attestation
uses: actions/attest-sbom@v1
uses: actions/attest-sbom@v2
with:
subject-path: 'PATH/TO/ARTIFACT'
sbom-path: 'PATH/TO/SBOM'
Expand All @@ -161,7 +161,7 @@ When you run your updated workflows, they will build your artifacts and generate

```yaml
- name: Generate SBOM attestation
uses: actions/attest-sbom@v1
uses: actions/attest-sbom@v2
with:
subject-name: {% raw %}${{ env.REGISTRY }}/PATH/TO/IMAGE{% endraw %}
subject-digest: 'sha256:fedcba0...'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,10 @@ Use this table to find a suitable model quickly, see more detail in the sections
| {% data variables.copilot.copilot_o3 %} | Deep reasoning and debugging | Multi-step problem solving and architecture-level code analysis | Reasoning |
| {% data variables.copilot.copilot_o3_mini %} | Fast help with simple or repetitive tasks | Quick responses for code snippets, explanations, and prototyping | Lower latency |
| {% data variables.copilot.copilot_o4_mini %} | Fast help with simple or repetitive tasks | Fast, reliable answers to lightweight coding questions | Lower latency |
| {% data variables.copilot.copilot_claude_opus %} | Deep reasoning and debugging | Advanced agentic workflows over large codebases, long-horizon projects | Reasoning |
| {% data variables.copilot.copilot_claude_opus %} | Deep reasoning and debugging | Complex problem-solving challenges, sophisticated reasoning | Reasoning, vision |
| {% data variables.copilot.copilot_claude_sonnet_35 %} | Fast help with simple or repetitive tasks | Quick responses for code, syntax, and documentation | Agent mode |
| {% data variables.copilot.copilot_claude_sonnet_37 %} | Deep reasoning and debugging | Structured reasoning across large, complex codebases | Agent mode |
| {% data variables.copilot.copilot_claude_sonnet_40 %} | Deep reasoning and debugging | High-performance code review, bug fixes, and efficient research workflows | Agent mode |
| {% data variables.copilot.copilot_claude_sonnet_40 %} | Deep reasoning and debugging | Performance and practicality, perfectly balanced for coding workflows | Agent mode, vision |
| {% data variables.copilot.copilot_gemini_25_pro %} | Deep reasoning and debugging | Complex code generation, debugging, and research workflows | Reasoning |
| {% data variables.copilot.copilot_gemini_flash %} | Working with visuals (diagrams, screenshots) | Real-time responses and visual reasoning for UI and diagram-based tasks | Visual |

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,11 +40,11 @@ This table lists the AI models available in {% data variables.product.prodname_c
| {% data variables.copilot.copilot_o3 %} | OpenAI | {% data variables.release-phases.public_preview_caps %} | {% octicon "x" aria-label="Not included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |
| {% data variables.copilot.copilot_o3_mini %} | OpenAI | GA | {% octicon "x" aria-label="Not included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |
| {% data variables.copilot.copilot_o4_mini %} | OpenAI | {% data variables.release-phases.public_preview_caps %} | {% octicon "x" aria-label="Not included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |
| {% data variables.copilot.copilot_claude_opus %} | Anthropic | {% data variables.release-phases.public_preview_caps %} | {% octicon "x" aria-label="Not included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |
| {% data variables.copilot.copilot_claude_opus %} | Anthropic | GA | {% octicon "x" aria-label="Not included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |
| {% data variables.copilot.copilot_claude_sonnet_35 %} | Anthropic | GA | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |
| {% data variables.copilot.copilot_claude_sonnet_37 %} | Anthropic | GA | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |
| {% data variables.copilot.copilot_claude_sonnet_37 %} Thinking | Anthropic | GA | {% octicon "x" aria-label="Not included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |
| {% data variables.copilot.copilot_claude_sonnet_40 %} | Anthropic | {% data variables.release-phases.public_preview_caps %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |
| {% data variables.copilot.copilot_claude_sonnet_40 %} | Anthropic | GA | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |
| {% data variables.copilot.copilot_gemini_25_pro %} | Google | {% data variables.release-phases.public_preview_caps %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |
| {% data variables.copilot.copilot_gemini_flash %} | Google | GA | {% octicon "x" aria-label="Not included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |

Expand All @@ -65,11 +65,11 @@ The following table shows which models are available in each client.
| {% data variables.copilot.copilot_o3 %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |
| {% data variables.copilot.copilot_o3_mini %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |
| {% data variables.copilot.copilot_o4_mini %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |
| {% data variables.copilot.copilot_claude_opus %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "x" aria-label="Not included" %} | {% octicon "x" aria-label="Not included" %} | {% octicon "x" aria-label="Not included" %} | {% octicon "x" aria-label="Not included" %} |
| {% data variables.copilot.copilot_claude_opus %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |
| {% data variables.copilot.copilot_claude_sonnet_35 %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |
| {% data variables.copilot.copilot_claude_sonnet_37 %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |{% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |
| {% data variables.copilot.copilot_claude_sonnet_37 %} Thinking | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |
| {% data variables.copilot.copilot_claude_sonnet_40 %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "x" aria-label="Not included" %} | {% octicon "x" aria-label="Not included" %} | {% octicon "x" aria-label="Not included" %} | {% octicon "x" aria-label="Not included" %} |
| {% data variables.copilot.copilot_claude_sonnet_40 %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |
| {% data variables.copilot.copilot_gemini_25_pro %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |
| {% data variables.copilot.copilot_gemini_flash %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} | {% octicon "check" aria-label="Included" %} |

Expand Down
9 changes: 9 additions & 0 deletions data/reusables/organizations/additional-permissions.md
Original file line number Diff line number Diff line change
Expand Up @@ -58,3 +58,12 @@ For more information, see [AUTOTITLE](/discussions).
* Dismiss or reopen {% data variables.product.prodname_dependabot_alerts %}
* View {% data variables.product.prodname_secret_scanning %} results
* Dismiss or reopen {% data variables.product.prodname_secret_scanning %} results

### Actions

* Manage {% data variables.product.prodname_actions %} general settings
* Manage runners
* Manage secrets
* Manage variables
* Manage environments (including environment secrets and variables)

16 changes: 9 additions & 7 deletions src/content-linter/scripts/post-lints.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ import { createReportIssue, linkReports } from '#src/workflows/issue-report.js'
// the entire content and data directories based on our
// markdownlint.js rules.
//
// If errors are found, it will open up a new issue in the
// If errors or warnings are found, it will open up a new issue in the
// docs-content repo with the label "broken content markdown report".
//
// The Content FR will go through the issue and update the content and
Expand All @@ -21,18 +21,20 @@ import { createReportIssue, linkReports } from '#src/workflows/issue-report.js'
// [end-readme]

program
.description('Opens an issue for Content FR with the errors from the weekly content/data linter.')
.description(
'Opens an issue for Content FR with the errors and warnings from the weekly content/data linter.',
)
.option(
'-p, --path <path>',
'provide a path to the errors output json file that will be in the issue body',
'provide a path to the errors and warnings output json file that will be in the issue body',
)
.parse(process.argv)

const { path } = program.opts()

main()
async function main() {
const errors = fs.readFileSync(`${path}`, 'utf8')
const lintResults = fs.readFileSync(`${path}`, 'utf8')
const core = coreLib
const { REPORT_REPOSITORY, REPORT_AUTHOR, REPORT_LABEL } = process.env

Expand All @@ -41,18 +43,18 @@ async function main() {
// or open an issue report, you might get cryptic error messages from Octokit.
getEnvInputs(['GITHUB_TOKEN'])

core.info(`Creating issue for errors...`)
core.info(`Creating issue for errors and warnings...`)

let reportBody = 'The following files have markdown lint warnings/errors:\n\n'
for (const [file, flaws] of Object.entries(JSON.parse(errors))) {
for (const [file, flaws] of Object.entries(JSON.parse(lintResults))) {
reportBody += `File: \`${file}\`:\n`
reportBody += `\`\`\`json\n${JSON.stringify(flaws, null, 2)}\n\`\`\`\n`
}

const reportProps = {
core,
octokit,
reportTitle: `Error(s) in content markdown file(s)`,
reportTitle: `Error(s) and warning(s) in content markdown file(s)`,
reportBody,
reportRepository: REPORT_REPOSITORY,
reportLabel: REPORT_LABEL,
Expand Down
12 changes: 2 additions & 10 deletions src/frame/middleware/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import { createProxyMiddleware } from 'http-proxy-middleware'
import events from '@/events/middleware.js'
import anchorRedirect from '@/rest/api/anchor-redirect.js'
import aiSearch from '@/search/middleware/ai-search'
import aiSearchLocalProxy from '@/search/middleware/ai-search-local-proxy'
import search from '@/search/middleware/search-routes.js'
import pageList from '@/article-api/middleware/pagelist'
import article from '@/article-api/middleware/article'
Expand Down Expand Up @@ -31,16 +32,7 @@ if (process.env.CSE_COPILOT_ENDPOINT || process.env.NODE_ENV === 'test') {
console.log(
'Proxying AI Search requests to docs.github.com. To use the cse-copilot endpoint, set the CSE_COPILOT_ENDPOINT environment variable.',
)
router.use(
'/ai-search',
createProxyMiddleware({
target: 'https://docs.github.com',
changeOrigin: true,
pathRewrite: function (path, req: ExtendedRequest) {
return req.originalUrl
},
}),
)
router.use(aiSearchLocalProxy)
}
if (process.env.ELASTICSEARCH_URL) {
router.use('/search', search)
Expand Down
3 changes: 0 additions & 3 deletions src/search/components/input/SearchOverlay.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -481,9 +481,6 @@ export function SearchOverlay({
}
}
} else if (event.key === 'Enter') {
if (searchLoading) {
return
}
event.preventDefault()
let pressedGroupKey = SEARCH_OVERLAY_EVENT_GROUP
let pressedGroupId = searchEventGroupId
Expand Down
70 changes: 70 additions & 0 deletions src/search/middleware/ai-search-local-proxy.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
// When in local development we want to proxy to the ai-search route at docs.github.com

import { Router, Request, Response, NextFunction } from 'express'
import got from 'got'
import { pipeline } from 'node:stream'

const router = Router()

const hopByHop = new Set([
'connection',
'keep-alive',
'proxy-authenticate',
'proxy-authorization',
'te',
'trailers',
'transfer-encoding',
'upgrade',
])

function filterRequestHeaders(src: Request['headers']) {
const out: Record<string, string | string[]> = {}
for (const [key, value] of Object.entries(src)) {
if (!value) continue
const k = key.toLowerCase()
if (hopByHop.has(k) || k === 'cookie' || k === 'host') continue
out[key] = value
}
out['accept'] = 'application/x-ndjson'
out['content-type'] = 'application/json'
return out
}

router.post('/ai-search/v1', async (req: Request, res: Response, next: NextFunction) => {
try {
const upstream = got.stream.post('https://docs.github.com/api/ai-search/v1', {
headers: filterRequestHeaders(req.headers),
body: JSON.stringify(req.body ?? {}),
decompress: false,
throwHttpErrors: false,
retry: { limit: 0 },
})

upstream.on('response', (uRes) => {
res.status(uRes.statusCode || 500)

for (const [k, v] of Object.entries(uRes.headers)) {
if (!v) continue
const key = k.toLowerCase()
// Never forward hop-by-hop; got already handles chunked → strip content-length
if (hopByHop.has(key) || key === 'content-length') continue
res.setHeader(k, v as string)
}
res.flushHeaders?.()
})

pipeline(upstream, res, (err) => {
if (err) {
console.error('[ai-search proxy] pipeline error:', err)
if (!res.headersSent) res.status(502).end('Bad Gateway')
}
})

upstream.on('error', (err) => console.error('[ai-search proxy] upstream error:', err))
} catch (err) {
console.error('[ai-search proxy] request failed:', err)
next(err)
}
})

export default router
Loading