|
| 1 | +--- |
| 2 | +title: Baseten |
| 3 | +pcx_content_type: get-started |
| 4 | +--- |
| 5 | + |
| 6 | +import { Render, Tabs, TabItem } from "~/components"; |
| 7 | + |
| 8 | +[Baseten](https://www.baseten.co/) provides infrastructure for building and deploying machine learning models at scale. Baseten offers access to various language models through a unified chat completions API. |
| 9 | + |
| 10 | +## Endpoint |
| 11 | + |
| 12 | +```txt |
| 13 | +https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/baseten |
| 14 | +``` |
| 15 | + |
| 16 | +## Prerequisites |
| 17 | + |
| 18 | +When making requests to Baseten, ensure you have the following: |
| 19 | + |
| 20 | +- Your AI Gateway Account ID. |
| 21 | +- Your AI Gateway gateway name. |
| 22 | +- An active Baseten API token. |
| 23 | +- The name of the Baseten model you want to use. |
| 24 | + |
| 25 | +## OpenAI-compatible chat completions API |
| 26 | + |
| 27 | +Baseten provides an OpenAI-compatible chat completions API for supported models. |
| 28 | + |
| 29 | +### cURL |
| 30 | + |
| 31 | +```bash title="Example fetch request" |
| 32 | +curl https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/baseten/v1/chat/completions \ |
| 33 | + --header 'Authorization: Bearer {baseten_api_token}' \ |
| 34 | + --header 'Content-Type: application/json' \ |
| 35 | + --data '{ |
| 36 | + "model": "openai/gpt-oss-120b", |
| 37 | + "messages": [ |
| 38 | + { |
| 39 | + "role": "user", |
| 40 | + "content": "What is Cloudflare?" |
| 41 | + } |
| 42 | + ] |
| 43 | + }' |
| 44 | +``` |
| 45 | + |
| 46 | +### Use OpenAI SDK with JavaScript |
| 47 | + |
| 48 | +```js title="JavaScript" |
| 49 | +import OpenAI from "openai"; |
| 50 | + |
| 51 | +const apiKey = "{baseten_api_token}"; |
| 52 | +const accountId = "{account_id}"; |
| 53 | +const gatewayId = "{gateway_id}"; |
| 54 | +const baseURL = `https://gateway.ai.cloudflare.com/v1/${accountId}/${gatewayId}/baseten`; |
| 55 | + |
| 56 | +const openai = new OpenAI({ |
| 57 | + apiKey, |
| 58 | + baseURL, |
| 59 | +}); |
| 60 | + |
| 61 | +const model = "openai/gpt-oss-120b"; |
| 62 | +const messages = [{ role: "user", content: "What is Cloudflare?" }]; |
| 63 | + |
| 64 | +const chatCompletion = await openai.chat.completions.create({ |
| 65 | + model, |
| 66 | + messages, |
| 67 | +}); |
| 68 | + |
| 69 | +console.log(chatCompletion); |
| 70 | +``` |
| 71 | + |
| 72 | +<Render |
| 73 | + file="chat-completions-providers" |
| 74 | + product="ai-gateway" |
| 75 | + params={{ |
| 76 | + name: "Baseten", |
| 77 | + jsonexample: ` |
| 78 | +{ |
| 79 | + "model": "baseten/{model}" |
| 80 | +}` |
| 81 | + }} |
| 82 | +/> |
| 83 | + |
| 84 | +## Model-specific endpoints |
| 85 | + |
| 86 | +For models that don't use the OpenAI-compatible API, you can access them through their specific model endpoints. |
| 87 | + |
| 88 | +### cURL |
| 89 | + |
| 90 | +```bash title="Example fetch request" |
| 91 | +curl https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/baseten/model/{model_id} \ |
| 92 | + --header 'Authorization: Bearer {baseten_api_token}' \ |
| 93 | + --header 'Content-Type: application/json' \ |
| 94 | + --data '{ |
| 95 | + "prompt": "What is Cloudflare?", |
| 96 | + "max_tokens": 100 |
| 97 | + }' |
| 98 | +``` |
| 99 | + |
| 100 | +### Use with JavaScript |
| 101 | + |
| 102 | +```js title="JavaScript" |
| 103 | +const accountId = "{account_id}"; |
| 104 | +const gatewayId = "{gateway_id}"; |
| 105 | +const basetenApiToken = "{baseten_api_token}"; |
| 106 | +const modelId = "{model_id}"; |
| 107 | +const baseURL = `https://gateway.ai.cloudflare.com/v1/${accountId}/${gatewayId}/baseten`; |
| 108 | + |
| 109 | +const response = await fetch(`${baseURL}/model/${modelId}`, { |
| 110 | + method: "POST", |
| 111 | + headers: { |
| 112 | + "Authorization": `Bearer ${basetenApiToken}`, |
| 113 | + "Content-Type": "application/json", |
| 114 | + }, |
| 115 | + body: JSON.stringify({ |
| 116 | + prompt: "What is Cloudflare?", |
| 117 | + max_tokens: 100, |
| 118 | + }), |
| 119 | +}); |
| 120 | + |
| 121 | +const result = await response.json(); |
| 122 | +console.log(result); |
| 123 | +``` |
0 commit comments