This adds support for easily running Codex backed by a local Ollama instance running our new open source models. See https://github.com/openai/gpt-oss for details. If you pass in `--oss` you'll be prompted to install/launch ollama, and it will automatically download the 20b model and attempt to use it. We'll likely want to expand this with some options later to make the experience smoother for users who can't run the 20b or want to run the 120b. Co-authored-by: Michael Bolin <mbolin@openai.com>
49 lines
1.1 KiB
TOML
49 lines
1.1 KiB
TOML
[workspace]
|
|
members = [
|
|
"ansi-escape",
|
|
"apply-patch",
|
|
"arg0",
|
|
"cli",
|
|
"common",
|
|
"core",
|
|
"exec",
|
|
"execpolicy",
|
|
"file-search",
|
|
"linux-sandbox",
|
|
"login",
|
|
"mcp-client",
|
|
"mcp-server",
|
|
"mcp-types",
|
|
"ollama",
|
|
"tui",
|
|
]
|
|
resolver = "2"
|
|
|
|
[workspace.package]
|
|
version = "0.0.0"
|
|
# Track the edition for all workspace crates in one place. Individual
|
|
# crates can still override this value, but keeping it here means new
|
|
# crates created with `cargo new -w ...` automatically inherit the 2024
|
|
# edition.
|
|
edition = "2024"
|
|
|
|
[workspace.lints]
|
|
rust = {}
|
|
|
|
[workspace.lints.clippy]
|
|
expect_used = "deny"
|
|
unwrap_used = "deny"
|
|
|
|
[profile.release]
|
|
lto = "fat"
|
|
# Because we bundle some of these executables with the TypeScript CLI, we
|
|
# remove everything to make the binary as small as possible.
|
|
strip = "symbols"
|
|
|
|
# See https://github.com/openai/codex/issues/1411 for details.
|
|
codegen-units = 1
|
|
|
|
[patch.crates-io]
|
|
# ratatui = { path = "../../ratatui" }
|
|
ratatui = { git = "https://github.com/nornagon/ratatui", branch = "nornagon-v0.29.0-patch" }
|