Phase 2: Rust Workspace Transformation (Part 1)

- Renamed directory: codex-backend-openapi-models -> llmx-backend-openapi-models
- Updated all Cargo.toml files:
  - Package names: codex-* -> llmx-*
  - Library names: codex_* -> llmx_*
  - Workspace dependencies updated
- Renamed Rust source files:
  - codex*.rs -> llmx*.rs (all modules)
  - codex_conversation -> llmx_conversation
  - codex_delegate -> llmx_delegate
  - codex_message_processor -> llmx_message_processor
  - codex_tool_* -> llmx_tool_*
- Updated all Rust imports:
  - use codex_* -> use llmx_*
  - mod codex* -> mod llmx*
- Updated environment variables in code:
  - CODEX_HOME -> LLMX_HOME
  - .codex -> .llmx paths
- Updated protocol crate lib name for proper linking

Note: Some compilation errors remain (type inference issues) but all
renaming is complete. Will fix compilation in next phase.

🤖 Generated with Claude Code
This commit is contained in:
Sebastian Krüger
2025-11-11 14:29:57 +01:00
parent f237fe560d
commit cb8d941adf
346 changed files with 3256 additions and 3199 deletions

View File

@@ -1,11 +1,11 @@
[package]
edition = "2024"
name = "codex-otel"
name = "llmx-otel"
version = { workspace = true }
[lib]
doctest = false
name = "codex_otel"
name = "llmx_otel"
path = "src/lib.rs"
[lints]
@@ -19,8 +19,8 @@ otel = ["opentelemetry", "opentelemetry_sdk", "opentelemetry-otlp", "tonic"]
[dependencies]
chrono = { workspace = true }
codex-app-server-protocol = { workspace = true }
codex-protocol = { workspace = true }
llmx-app-server-protocol = { workspace = true }
llmx-protocol = { workspace = true }
eventsource-stream = { workspace = true }
opentelemetry = { workspace = true, features = ["logs"], optional = true }
opentelemetry-otlp = { workspace = true, features = [

View File

@@ -1,15 +1,15 @@
use chrono::SecondsFormat;
use chrono::Utc;
use codex_app_server_protocol::AuthMode;
use codex_protocol::ConversationId;
use codex_protocol::config_types::ReasoningEffort;
use codex_protocol::config_types::ReasoningSummary;
use codex_protocol::models::ResponseItem;
use codex_protocol::protocol::AskForApproval;
use codex_protocol::protocol::ReviewDecision;
use codex_protocol::protocol::SandboxPolicy;
use codex_protocol::protocol::SandboxRiskLevel;
use codex_protocol::user_input::UserInput;
use llmx_app_server_protocol::AuthMode;
use llmx_protocol::ConversationId;
use llmx_protocol::config_types::ReasoningEffort;
use llmx_protocol::config_types::ReasoningSummary;
use llmx_protocol::models::ResponseItem;
use llmx_protocol::protocol::AskForApproval;
use llmx_protocol::protocol::ReviewDecision;
use llmx_protocol::protocol::SandboxPolicy;
use llmx_protocol::protocol::SandboxRiskLevel;
use llmx_protocol::user_input::UserInput;
use eventsource_stream::Event as StreamEvent;
use eventsource_stream::EventStreamError as StreamError;
use reqwest::Error;