launch: replace deprecated OPENAI_BASE_URL with config.toml profile for codex (#15041)

This commit is contained in:
Eva H
2026-04-01 08:43:23 -07:00
committed by GitHub
parent cb0033598e
commit a8292dd85f
3 changed files with 305 additions and 23 deletions

View File

@@ -4,6 +4,7 @@ import (
"fmt"
"os"
"os/exec"
"path/filepath"
"strings"
"github.com/ollama/ollama/envconfig"
@@ -15,8 +16,10 @@ type Codex struct{}
func (c *Codex) String() string { return "Codex" }
const codexProfileName = "ollama-launch"
func (c *Codex) args(model string, extra []string) []string {
args := []string{"--oss"}
args := []string{"--profile", codexProfileName}
if model != "" {
args = append(args, "-m", model)
}
@@ -29,17 +32,95 @@ func (c *Codex) Run(model string, args []string) error {
return err
}
if err := ensureCodexConfig(); err != nil {
return fmt.Errorf("failed to configure codex: %w", err)
}
cmd := exec.Command("codex", c.args(model, args)...)
cmd.Stdin = os.Stdin
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
cmd.Env = append(os.Environ(),
"OPENAI_BASE_URL="+envconfig.Host().String()+"/v1/",
"OPENAI_API_KEY=ollama",
)
return cmd.Run()
}
// ensureCodexConfig writes a [profiles.ollama-launch] section to ~/.codex/config.toml
// with openai_base_url pointing to the local Ollama server.
func ensureCodexConfig() error {
home, err := os.UserHomeDir()
if err != nil {
return err
}
codexDir := filepath.Join(home, ".codex")
if err := os.MkdirAll(codexDir, 0o755); err != nil {
return err
}
configPath := filepath.Join(codexDir, "config.toml")
return writeCodexProfile(configPath)
}
// writeCodexProfile ensures ~/.codex/config.toml has the ollama-launch profile
// and model provider sections with the correct base URL.
func writeCodexProfile(configPath string) error {
baseURL := envconfig.Host().String() + "/v1/"
sections := []struct {
header string
lines []string
}{
{
header: fmt.Sprintf("[profiles.%s]", codexProfileName),
lines: []string{
fmt.Sprintf("openai_base_url = %q", baseURL),
`forced_login_method = "api"`,
fmt.Sprintf("model_provider = %q", codexProfileName),
},
},
{
header: fmt.Sprintf("[model_providers.%s]", codexProfileName),
lines: []string{
`name = "Ollama"`,
fmt.Sprintf("base_url = %q", baseURL),
},
},
}
content, readErr := os.ReadFile(configPath)
text := ""
if readErr == nil {
text = string(content)
}
for _, s := range sections {
block := strings.Join(append([]string{s.header}, s.lines...), "\n") + "\n"
if idx := strings.Index(text, s.header); idx >= 0 {
// Replace the existing section up to the next section header.
rest := text[idx+len(s.header):]
if endIdx := strings.Index(rest, "\n["); endIdx >= 0 {
text = text[:idx] + block + rest[endIdx+1:]
} else {
text = text[:idx] + block
}
} else {
// Append the section.
if text != "" && !strings.HasSuffix(text, "\n") {
text += "\n"
}
if text != "" {
text += "\n"
}
text += block
}
}
return os.WriteFile(configPath, []byte(text), 0o644)
}
func checkCodexVersion() error {
if _, err := exec.LookPath("codex"); err != nil {
return fmt.Errorf("codex is not installed, install with: npm install -g @openai/codex")

View File

@@ -1,7 +1,10 @@
package launch
import (
"os"
"path/filepath"
"slices"
"strings"
"testing"
)
@@ -14,10 +17,10 @@ func TestCodexArgs(t *testing.T) {
args []string
want []string
}{
{"with model", "llama3.2", nil, []string{"--oss", "-m", "llama3.2"}},
{"empty model", "", nil, []string{"--oss"}},
{"with model and profile", "qwen3.5", []string{"-p", "myprofile"}, []string{"--oss", "-m", "qwen3.5", "-p", "myprofile"}},
{"with sandbox flag", "llama3.2", []string{"--sandbox", "workspace-write"}, []string{"--oss", "-m", "llama3.2", "--sandbox", "workspace-write"}},
{"with model", "llama3.2", nil, []string{"--profile", "ollama-launch", "-m", "llama3.2"}},
{"empty model", "", nil, []string{"--profile", "ollama-launch"}},
{"with model and extra args", "qwen3.5", []string{"-p", "myprofile"}, []string{"--profile", "ollama-launch", "-m", "qwen3.5", "-p", "myprofile"}},
{"with sandbox flag", "llama3.2", []string{"--sandbox", "workspace-write"}, []string{"--profile", "ollama-launch", "-m", "llama3.2", "--sandbox", "workspace-write"}},
}
for _, tt := range tests {
@@ -29,3 +32,198 @@ func TestCodexArgs(t *testing.T) {
})
}
}
func TestWriteCodexProfile(t *testing.T) {
t.Run("creates new file when none exists", func(t *testing.T) {
tmpDir := t.TempDir()
configPath := filepath.Join(tmpDir, "config.toml")
if err := writeCodexProfile(configPath); err != nil {
t.Fatal(err)
}
data, err := os.ReadFile(configPath)
if err != nil {
t.Fatal(err)
}
content := string(data)
if !strings.Contains(content, "[profiles.ollama-launch]") {
t.Error("missing [profiles.ollama-launch] header")
}
if !strings.Contains(content, "openai_base_url") {
t.Error("missing openai_base_url key")
}
if !strings.Contains(content, "/v1/") {
t.Error("missing /v1/ suffix in base URL")
}
if !strings.Contains(content, `forced_login_method = "api"`) {
t.Error("missing forced_login_method key")
}
if !strings.Contains(content, `model_provider = "ollama-launch"`) {
t.Error("missing model_provider key")
}
if !strings.Contains(content, "[model_providers.ollama-launch]") {
t.Error("missing [model_providers.ollama-launch] section")
}
if !strings.Contains(content, `name = "Ollama"`) {
t.Error("missing model provider name")
}
})
t.Run("appends profile to existing file without profile", func(t *testing.T) {
tmpDir := t.TempDir()
configPath := filepath.Join(tmpDir, "config.toml")
existing := "[some_other_section]\nkey = \"value\"\n"
os.WriteFile(configPath, []byte(existing), 0o644)
if err := writeCodexProfile(configPath); err != nil {
t.Fatal(err)
}
data, _ := os.ReadFile(configPath)
content := string(data)
if !strings.Contains(content, "[some_other_section]") {
t.Error("existing section was removed")
}
if !strings.Contains(content, "[profiles.ollama-launch]") {
t.Error("missing [profiles.ollama-launch] header")
}
})
t.Run("replaces existing profile section", func(t *testing.T) {
tmpDir := t.TempDir()
configPath := filepath.Join(tmpDir, "config.toml")
existing := "[profiles.ollama-launch]\nopenai_base_url = \"http://old:1234/v1/\"\n\n[model_providers.ollama-launch]\nname = \"Ollama\"\nbase_url = \"http://old:1234/v1/\"\n"
os.WriteFile(configPath, []byte(existing), 0o644)
if err := writeCodexProfile(configPath); err != nil {
t.Fatal(err)
}
data, _ := os.ReadFile(configPath)
content := string(data)
if strings.Contains(content, "old:1234") {
t.Error("old URL was not replaced")
}
if strings.Count(content, "[profiles.ollama-launch]") != 1 {
t.Errorf("expected exactly one [profiles.ollama-launch] section, got %d", strings.Count(content, "[profiles.ollama-launch]"))
}
if strings.Count(content, "[model_providers.ollama-launch]") != 1 {
t.Errorf("expected exactly one [model_providers.ollama-launch] section, got %d", strings.Count(content, "[model_providers.ollama-launch]"))
}
})
t.Run("replaces profile while preserving following sections", func(t *testing.T) {
tmpDir := t.TempDir()
configPath := filepath.Join(tmpDir, "config.toml")
existing := "[profiles.ollama-launch]\nopenai_base_url = \"http://old:1234/v1/\"\n[another_section]\nfoo = \"bar\"\n"
os.WriteFile(configPath, []byte(existing), 0o644)
if err := writeCodexProfile(configPath); err != nil {
t.Fatal(err)
}
data, _ := os.ReadFile(configPath)
content := string(data)
if strings.Contains(content, "old:1234") {
t.Error("old URL was not replaced")
}
if !strings.Contains(content, "[another_section]") {
t.Error("following section was removed")
}
if !strings.Contains(content, "foo = \"bar\"") {
t.Error("following section content was removed")
}
})
t.Run("appends newline to file not ending with newline", func(t *testing.T) {
tmpDir := t.TempDir()
configPath := filepath.Join(tmpDir, "config.toml")
existing := "[other]\nkey = \"val\""
os.WriteFile(configPath, []byte(existing), 0o644)
if err := writeCodexProfile(configPath); err != nil {
t.Fatal(err)
}
data, _ := os.ReadFile(configPath)
content := string(data)
if !strings.Contains(content, "[profiles.ollama-launch]") {
t.Error("missing [profiles.ollama-launch] header")
}
// Should not have double blank lines from missing trailing newline
if strings.Contains(content, "\n\n\n") {
t.Error("unexpected triple newline in output")
}
})
t.Run("uses custom OLLAMA_HOST", func(t *testing.T) {
t.Setenv("OLLAMA_HOST", "http://myhost:9999")
tmpDir := t.TempDir()
configPath := filepath.Join(tmpDir, "config.toml")
if err := writeCodexProfile(configPath); err != nil {
t.Fatal(err)
}
data, _ := os.ReadFile(configPath)
content := string(data)
if !strings.Contains(content, "myhost:9999/v1/") {
t.Errorf("expected custom host in URL, got:\n%s", content)
}
})
}
func TestEnsureCodexConfig(t *testing.T) {
t.Run("creates .codex dir and config.toml", func(t *testing.T) {
tmpDir := t.TempDir()
setTestHome(t, tmpDir)
if err := ensureCodexConfig(); err != nil {
t.Fatal(err)
}
configPath := filepath.Join(tmpDir, ".codex", "config.toml")
data, err := os.ReadFile(configPath)
if err != nil {
t.Fatalf("config.toml not created: %v", err)
}
content := string(data)
if !strings.Contains(content, "[profiles.ollama-launch]") {
t.Error("missing [profiles.ollama-launch] header")
}
if !strings.Contains(content, "openai_base_url") {
t.Error("missing openai_base_url key")
}
})
t.Run("is idempotent", func(t *testing.T) {
tmpDir := t.TempDir()
setTestHome(t, tmpDir)
if err := ensureCodexConfig(); err != nil {
t.Fatal(err)
}
if err := ensureCodexConfig(); err != nil {
t.Fatal(err)
}
configPath := filepath.Join(tmpDir, ".codex", "config.toml")
data, _ := os.ReadFile(configPath)
content := string(data)
if strings.Count(content, "[profiles.ollama-launch]") != 1 {
t.Errorf("expected exactly one [profiles.ollama-launch] section after two calls, got %d", strings.Count(content, "[profiles.ollama-launch]"))
}
if strings.Count(content, "[model_providers.ollama-launch]") != 1 {
t.Errorf("expected exactly one [model_providers.ollama-launch] section after two calls, got %d", strings.Count(content, "[model_providers.ollama-launch]"))
}
})
}

View File

@@ -35,36 +35,39 @@ To use `codex` with Ollama, use the `--oss` flag:
codex --oss
```
### Changing Models
By default, codex will use the local `gpt-oss:20b` model. However, you can specify a different model with the `-m` flag:
To use a specific model, pass the `-m` flag:
```
codex --oss -m gpt-oss:120b
```
### Cloud Models
To use a cloud model:
```
codex --oss -m gpt-oss:120b-cloud
```
### Profile-based setup
## Connecting to ollama.com
Create an [API key](https://ollama.com/settings/keys) from ollama.com and export it as `OLLAMA_API_KEY`.
To use ollama.com directly, edit your `~/.codex/config.toml` file to point to ollama.com.
For a persistent configuration, add an Ollama provider and profiles to `~/.codex/config.toml`:
```toml
model = "gpt-oss:120b"
model_provider = "ollama"
[model_providers.ollama]
[model_providers.ollama-launch]
name = "Ollama"
base_url = "https://ollama.com/v1"
env_key = "OLLAMA_API_KEY"
base_url = "http://localhost:11434/v1"
[profiles.ollama-launch]
model = "gpt-oss:120b"
model_provider = "ollama-launch"
[profiles.ollama-cloud]
model = "gpt-oss:120b-cloud"
model_provider = "ollama-launch"
```
Run `codex` in a new terminal to load the new settings.
Then run:
```
codex --profile ollama-launch
codex --profile ollama-cloud
```