Skip to content
This repository was archived by the owner on Apr 11, 2025. It is now read-only.

Commit 9bc4ef4

Browse files
authored
Merge pull request #1 from xsharov/main
Handle HEAD "/" requests by returning StatusOK
2 parents f140fdd + 17d8f31 commit 9bc4ef4

File tree

3 files changed

+81
-6
lines changed

3 files changed

+81
-6
lines changed

main.go

Lines changed: 70 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,46 @@
11
package main
22

33
import (
4+
"bufio"
45
"encoding/json"
56
"errors"
67
"io"
78
"log/slog"
89
"net/http"
910
"os"
11+
"strings"
1012
"time"
1113

1214
"github.com/gin-gonic/gin"
1315
openai "github.com/sashabaranov/go-openai"
1416
)
1517

18+
var modelFilter map[string]struct{}
19+
20+
func loadModelFilter(path string) (map[string]struct{}, error) {
21+
file, err := os.Open(path)
22+
if err != nil {
23+
return nil, err
24+
}
25+
defer file.Close()
26+
27+
scanner := bufio.NewScanner(file)
28+
filter := make(map[string]struct{})
29+
30+
for scanner.Scan() {
31+
line := strings.TrimSpace(scanner.Text())
32+
if line != "" {
33+
filter[line] = struct{}{}
34+
}
35+
}
36+
37+
if err := scanner.Err(); err != nil {
38+
return nil, err
39+
}
40+
41+
return filter, nil
42+
}
43+
1644
func main() {
1745
r := gin.Default()
1846
// Load the API key from environment variables or command-line arguments.
@@ -28,9 +56,29 @@ func main() {
2856

2957
provider := NewOpenrouterProvider(apiKey)
3058

59+
filter, err := loadModelFilter("models-filter")
60+
if err != nil {
61+
if os.IsNotExist(err) {
62+
slog.Info("models-filter file not found. Skipping model filtering.")
63+
modelFilter = make(map[string]struct{})
64+
} else {
65+
slog.Error("Error loading models filter", "Error", err)
66+
return
67+
}
68+
} else {
69+
modelFilter = filter
70+
slog.Info("Loaded models from filter:")
71+
for model := range modelFilter {
72+
slog.Info(" - " + model)
73+
}
74+
}
75+
3176
r.GET("/", func(c *gin.Context) {
3277
c.String(http.StatusOK, "Ollama is running")
3378
})
79+
r.HEAD("/", func(c *gin.Context) {
80+
c.String(http.StatusOK, "")
81+
})
3482

3583
r.GET("/api/tags", func(c *gin.Context) {
3684
models, err := provider.GetModels()
@@ -39,8 +87,27 @@ func main() {
3987
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
4088
return
4189
}
42-
// Respond with the list of models
43-
c.JSON(http.StatusOK, gin.H{"models": models})
90+
filter := modelFilter
91+
// Construct a new array of model objects with extra fields
92+
newModels := make([]map[string]interface{}, 0, len(models))
93+
for _, m := range models {
94+
// Если фильтр пустой, значит пропускаем проверку и берём все модели
95+
if len(filter) > 0 {
96+
if _, ok := filter[m.Model]; !ok {
97+
continue
98+
}
99+
}
100+
newModels = append(newModels, map[string]interface{}{
101+
"name": m.Name,
102+
"model": m.Model,
103+
"modified_at": m.ModifiedAt,
104+
"size": 270898672,
105+
"digest": "9077fe9d2ae1a4a41a868836b56b8163731a8fe16621397028c2c76f838c6907",
106+
"details": m.Details,
107+
})
108+
}
109+
110+
c.JSON(http.StatusOK, gin.H{"models": newModels})
44111
})
45112

46113
r.POST("/api/show", func(c *gin.Context) {
@@ -161,5 +228,5 @@ func main() {
161228
}
162229
})
163230

164-
r.Run(":8080")
231+
r.Run(":11434")
165232
}

models-filter sample

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
gemini-2.5-pro-exp-03-25:free
2+
deepseek-chat-v3-0324:free

readme.md

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,17 @@
1-
# Ollama Proxy for OpenRouter
1+
# Enchanted Proxy for OpenRouter
2+
This repository is specifically made for use with the [Enchanted project](https://github.com/gluonfield/enchanted/tree/main).
3+
The original author of this proxy is [marknefedov](https://github.com/marknefedov/ollama-openrouter-proxy).
24

35
## Description
46
This repository provides a proxy server that emulates [Ollama's REST API](https://github.com/ollama/ollama) but forwards requests to [OpenRouter](https://openrouter.ai/). It uses the [sashabaranov/go-openai](https://github.com/sashabaranov/go-openai) library under the hood, with minimal code changes to keep the Ollama API calls the same. This allows you to use Ollama-compatible tooling and clients, but run your requests on OpenRouter-managed models.
57
Currently, it is enough for usage with [Jetbrains AI assistant](https://blog.jetbrains.com/ai/2024/11/jetbrains-ai-assistant-2024-3/#more-control-over-your-chat-experience-choose-between-gemini,-openai,-and-local-models).
68

79
## Features
8-
- **Ollama-like API**: The server listens on `8080` and exposes endpoints similar to Ollama (e.g., `/api/chat`, `/api/tags`).
10+
- **Model Filtering**: You can provide a `models-filter` file in the same directory as the proxy. Each line in this file should contain a single model name. The proxy will only show models that match these entries. If the file doesn’t exist or is empty, no filtering is applied.
11+
12+
**Note**: OpenRouter model names may sometimes include a vendor prefix, for example `deepseek/deepseek-chat-v3-0324:free`. To make sure filtering works correctly, remove the vendor part when adding the name to your `models-filter` file, e.g. `deepseek-chat-v3-0324:free`.
13+
14+
- **Ollama-like API**: The server listens on `11434` and exposes endpoints similar to Ollama (e.g., `/api/chat`, `/api/tags`).
915
- **Model Listing**: Fetch a list of available models from OpenRouter.
1016
- **Model Details**: Retrieve metadata about a specific model.
1117
- **Streaming Chat**: Forward streaming responses from OpenRouter in a chunked JSON format that is compatible with Ollama’s expectations.
@@ -22,7 +28,7 @@ You can provide your **OpenRouter** (OpenAI-compatible) API key through an envir
2228

2329
./ollama-proxy "your-openrouter-api-key"
2430

25-
Once running, the proxy listens on port `8080`. You can make requests to `http://localhost:8080` with your Ollama-compatible tooling.
31+
Once running, the proxy listens on port `11434`. You can make requests to `http://localhost:11434` with your Ollama-compatible tooling.
2632

2733
## Installation
2834
1. **Clone the Repository**:

0 commit comments

Comments
 (0)