From 130499166dce10c8fcaed5e9dfe9d5a029b716df Mon Sep 17 00:00:00 2001 From: jerilynzheng Date: Thu, 22 Jan 2026 00:09:50 -0800 Subject: [PATCH 1/2] docs: Add Vercel AI Gateway endpoint documentation - Add configuration example for ai-gateway provider - Document supported features (model fetching, streaming, vision, reasoning, tool calling) - Add environment variable setup instructions Co-Authored-By: Claude Sonnet 4.5 --- .../ai_endpoints/ai-gateway.mdx | 52 +++++++++++++++++++ 1 file changed, 52 insertions(+) create mode 100644 pages/docs/configuration/librechat_yaml/ai_endpoints/ai-gateway.mdx diff --git a/pages/docs/configuration/librechat_yaml/ai_endpoints/ai-gateway.mdx b/pages/docs/configuration/librechat_yaml/ai_endpoints/ai-gateway.mdx new file mode 100644 index 000000000..6784c7855 --- /dev/null +++ b/pages/docs/configuration/librechat_yaml/ai_endpoints/ai-gateway.mdx @@ -0,0 +1,52 @@ +--- +title: Vercel AI Gateway +description: Example configuration for Vercel AI Gateway +--- + +# [Vercel AI Gateway](https://vercel.com/docs/ai-gateway) + +> Vercel AI Gateway: [vercel.com/docs/ai-gateway](https://vercel.com/docs/ai-gateway) + +**Notes:** + +- **Known:** icon provided, fetching list of models is recommended. + +- Vercel AI Gateway provides a unified API for multiple AI providers including OpenAI, Anthropic, Google, and others through a single endpoint. + +- `stop` is recommended to be dropped as different underlying models use different stop tokens. + +- **Reasoning models:** AI Gateway supports extended thinking via the `reasoning` parameter with `enabled`, `effort` (none/minimal/low/medium/high/xhigh), and `max_tokens` options. Response includes `message.reasoning` with the thinking content. + +```yaml + - name: "ai-gateway" + # For `apiKey` and `baseURL`, you can use environment variables that you define. + # recommended environment variables: + apiKey: "${AI_GATEWAY_API_KEY}" + baseURL: "https://ai-gateway.vercel.sh/v1" + models: + default: ["gpt-5.2", "claude-sonnet-4-5"] + fetch: true + titleConvo: true + titleModel: "gpt-5.2" + # Recommended: Drop the stop parameter from the request as models use a variety of stop tokens. + dropParams: ["stop"] + modelDisplayLabel: "Vercel AI Gateway" +``` + +## Environment Variables + +Add the following to your `.env` file: + +```bash +AI_GATEWAY_API_KEY=your_api_key_here +``` + +## Supported Features + +| Feature | Notes | +|---------|-------| +| Model fetching | Via `/v1/models` endpoint | +| Streaming | Full SSE support | +| Vision/Images | Depends on underlying model | +| Reasoning | Via `reasoning` parameter with `enabled` and `effort` | +| Tool calling | OpenAI-compatible function calling format | From 9f1544eef004920e9d58c5f4f82d32e7c5b1b82d Mon Sep 17 00:00:00 2001 From: jerilynzheng Date: Thu, 22 Jan 2026 21:45:53 -0800 Subject: [PATCH 2/2] docs: Update endpoint name to 'Vercel' in configuration --- .../librechat_yaml/ai_endpoints/ai-gateway.mdx | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/pages/docs/configuration/librechat_yaml/ai_endpoints/ai-gateway.mdx b/pages/docs/configuration/librechat_yaml/ai_endpoints/ai-gateway.mdx index 6784c7855..651dc0ac2 100644 --- a/pages/docs/configuration/librechat_yaml/ai_endpoints/ai-gateway.mdx +++ b/pages/docs/configuration/librechat_yaml/ai_endpoints/ai-gateway.mdx @@ -9,7 +9,7 @@ description: Example configuration for Vercel AI Gateway **Notes:** -- **Known:** icon provided, fetching list of models is recommended. +- **Known:** icon provided, fetching list of models is recommended. Use endpoint name `"Vercel"` in your configuration. - Vercel AI Gateway provides a unified API for multiple AI providers including OpenAI, Anthropic, Google, and others through a single endpoint. @@ -18,19 +18,20 @@ description: Example configuration for Vercel AI Gateway - **Reasoning models:** AI Gateway supports extended thinking via the `reasoning` parameter with `enabled`, `effort` (none/minimal/low/medium/high/xhigh), and `max_tokens` options. Response includes `message.reasoning` with the thinking content. ```yaml - - name: "ai-gateway" + - name: "Vercel" # For `apiKey` and `baseURL`, you can use environment variables that you define. # recommended environment variables: apiKey: "${AI_GATEWAY_API_KEY}" baseURL: "https://ai-gateway.vercel.sh/v1" + titleConvo: true models: - default: ["gpt-5.2", "claude-sonnet-4-5"] + default: + - "openai/gpt-5-mini" + - "google/gemini-2.5-flash" + - "deepseek/deepseek-v3.1" + - "anthropic/claude-3-haiku" fetch: true - titleConvo: true - titleModel: "gpt-5.2" - # Recommended: Drop the stop parameter from the request as models use a variety of stop tokens. - dropParams: ["stop"] - modelDisplayLabel: "Vercel AI Gateway" + titleModel: "openai/gpt-5-mini" ``` ## Environment Variables