Phase 2: Rust Workspace Transformation (Part 1)

- Renamed directory: codex-backend-openapi-models -> llmx-backend-openapi-models
- Updated all Cargo.toml files:
  - Package names: codex-* -> llmx-*
  - Library names: codex_* -> llmx_*
  - Workspace dependencies updated
- Renamed Rust source files:
  - codex*.rs -> llmx*.rs (all modules)
  - codex_conversation -> llmx_conversation
  - codex_delegate -> llmx_delegate
  - codex_message_processor -> llmx_message_processor
  - codex_tool_* -> llmx_tool_*
- Updated all Rust imports:
  - use codex_* -> use llmx_*
  - mod codex* -> mod llmx*
- Updated environment variables in code:
  - CODEX_HOME -> LLMX_HOME
  - .codex -> .llmx paths
- Updated protocol crate lib name for proper linking

Note: Some compilation errors remain (type inference issues) but all
renaming is complete. Will fix compilation in next phase.

🤖 Generated with Claude Code
This commit is contained in:
Sebastian Krüger
2025-11-11 14:29:57 +01:00
parent f237fe560d
commit cb8d941adf
346 changed files with 3256 additions and 3199 deletions

View File

@@ -1,10 +1,10 @@
[package]
edition = "2024"
name = "codex-ollama"
name = "llmx-ollama"
version = { workspace = true }
[lib]
name = "codex_ollama"
name = "llmx_ollama"
path = "src/lib.rs"
[lints]
@@ -13,7 +13,7 @@ workspace = true
[dependencies]
async-stream = { workspace = true }
bytes = { workspace = true }
codex-core = { workspace = true }
llmx-core = { workspace = true }
futures = { workspace = true }
reqwest = { workspace = true, features = ["json", "stream"] }
serde_json = { workspace = true }

View File

@@ -10,10 +10,10 @@ use crate::pull::PullEvent;
use crate::pull::PullProgressReporter;
use crate::url::base_url_to_host_root;
use crate::url::is_openai_compatible_base_url;
use codex_core::BUILT_IN_OSS_MODEL_PROVIDER_ID;
use codex_core::ModelProviderInfo;
use codex_core::WireApi;
use codex_core::config::Config;
use llmx_core::BUILT_IN_OSS_MODEL_PROVIDER_ID;
use llmx_core::ModelProviderInfo;
use llmx_core::WireApi;
use llmx_core::config::Config;
const OLLAMA_CONNECTION_ERROR: &str = "No running Ollama server detected. Start it with: `ollama serve` (after installing). Install instructions: https://github.com/ollama/ollama?tab=readme-ov-file#ollama";
@@ -47,7 +47,7 @@ impl OllamaClient {
#[cfg(test)]
async fn try_from_provider_with_base_url(base_url: &str) -> io::Result<Self> {
let provider = codex_core::create_oss_provider_with_base_url(base_url);
let provider = llmx_core::create_oss_provider_with_base_url(base_url);
Self::try_from_provider(&provider).await
}
@@ -239,10 +239,10 @@ mod tests {
// Happy-path tests using a mock HTTP server; skip if sandbox network is disabled.
#[tokio::test]
async fn test_fetch_models_happy_path() {
if std::env::var(codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
if std::env::var(llmx_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
tracing::info!(
"{} is set; skipping test_fetch_models_happy_path",
codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR
llmx_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR
);
return;
}
@@ -270,10 +270,10 @@ mod tests {
#[tokio::test]
async fn test_probe_server_happy_path_openai_compat_and_native() {
if std::env::var(codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
if std::env::var(llmx_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
tracing::info!(
"{} set; skipping test_probe_server_happy_path_openai_compat_and_native",
codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR
llmx_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR
);
return;
}
@@ -307,10 +307,10 @@ mod tests {
#[tokio::test]
async fn test_try_from_oss_provider_ok_when_server_running() {
if std::env::var(codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
if std::env::var(llmx_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
tracing::info!(
"{} set; skipping test_try_from_oss_provider_ok_when_server_running",
codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR
llmx_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR
);
return;
}
@@ -331,10 +331,10 @@ mod tests {
#[tokio::test]
async fn test_try_from_oss_provider_err_when_server_missing() {
if std::env::var(codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
if std::env::var(llmx_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
tracing::info!(
"{} set; skipping test_try_from_oss_provider_err_when_server_missing",
codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR
llmx_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR
);
return;
}

View File

@@ -4,7 +4,7 @@ mod pull;
mod url;
pub use client::OllamaClient;
use codex_core::config::Config;
use llmx_core::config::Config;
pub use pull::CliProgressReporter;
pub use pull::PullEvent;
pub use pull::PullProgressReporter;