Compare commits
4 Commits
rust-v0.1.
...
rust-v0.1.
| Author | SHA1 | Date | |
|---|---|---|---|
| 7237627ac7 | |||
| 75dda1c285 | |||
| 8f79e89db2 | |||
| c0775ad8a3 |
82
llmx-rs/Cargo.lock
generated
82
llmx-rs/Cargo.lock
generated
@@ -178,7 +178,7 @@ checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
|
||||
|
||||
[[package]]
|
||||
name = "app_test_support"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"assert_cmd",
|
||||
@@ -952,7 +952,7 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
|
||||
|
||||
[[package]]
|
||||
name = "core_test_support"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"assert_cmd",
|
||||
@@ -2828,7 +2828,7 @@ checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77"
|
||||
|
||||
[[package]]
|
||||
name = "llmx-ansi-escape"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"ansi-to-tui",
|
||||
"ratatui",
|
||||
@@ -2837,7 +2837,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-app-server"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"app_test_support",
|
||||
@@ -2872,7 +2872,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-app-server-protocol"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
@@ -2890,7 +2890,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-apply-patch"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"assert_cmd",
|
||||
@@ -2905,7 +2905,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-arg0"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"dotenvy",
|
||||
@@ -2918,7 +2918,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-async-utils"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"pretty_assertions",
|
||||
@@ -2942,7 +2942,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-backend-openapi-models"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -2951,7 +2951,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-chatgpt"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
@@ -2966,7 +2966,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-cli"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"assert_cmd",
|
||||
@@ -3006,7 +3006,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-cloud-tasks"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
@@ -3032,7 +3032,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-cloud-tasks-client"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
@@ -3047,7 +3047,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-common"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"clap",
|
||||
"llmx-app-server-protocol",
|
||||
@@ -3059,7 +3059,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-core"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"askama",
|
||||
@@ -3140,7 +3140,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-exec"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"assert_cmd",
|
||||
@@ -3173,7 +3173,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-execpolicy"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"allocative",
|
||||
"anyhow",
|
||||
@@ -3193,7 +3193,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-feedback"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"llmx-protocol",
|
||||
@@ -3204,7 +3204,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-file-search"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
@@ -3217,7 +3217,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-git"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"assert_matches",
|
||||
"once_cell",
|
||||
@@ -3233,7 +3233,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-keyring-store"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"keyring",
|
||||
"tracing",
|
||||
@@ -3241,7 +3241,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-linux-sandbox"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"clap",
|
||||
"landlock",
|
||||
@@ -3254,7 +3254,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-login"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"base64",
|
||||
@@ -3278,7 +3278,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-mcp-server"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"assert_cmd",
|
||||
@@ -3305,7 +3305,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-ollama"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"assert_matches",
|
||||
"async-stream",
|
||||
@@ -3321,7 +3321,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-otel"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"eventsource-stream",
|
||||
@@ -3342,14 +3342,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-process-hardening"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "llmx-protocol"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"base64",
|
||||
@@ -3375,7 +3375,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-responses-api-proxy"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
@@ -3391,7 +3391,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-rmcp-client"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"axum",
|
||||
@@ -3420,7 +3420,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-stdio-to-uds"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"assert_cmd",
|
||||
@@ -3431,7 +3431,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-tui"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arboard",
|
||||
@@ -3496,7 +3496,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-utils-cache"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"lru",
|
||||
"sha1",
|
||||
@@ -3505,7 +3505,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-utils-image"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"image",
|
||||
@@ -3517,7 +3517,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-utils-json-to-toml"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"pretty_assertions",
|
||||
"serde_json",
|
||||
@@ -3526,7 +3526,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-utils-pty"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"portable-pty",
|
||||
@@ -3535,7 +3535,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-utils-readiness"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"assert_matches",
|
||||
"async-trait",
|
||||
@@ -3546,11 +3546,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "llmx-utils-string"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
|
||||
[[package]]
|
||||
name = "llmx-utils-tokenizer"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"pretty_assertions",
|
||||
@@ -3660,7 +3660,7 @@ checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3"
|
||||
|
||||
[[package]]
|
||||
name = "mcp-types"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"schemars 0.8.22",
|
||||
"serde",
|
||||
@@ -3670,7 +3670,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "mcp_test_support"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"assert_cmd",
|
||||
|
||||
@@ -43,7 +43,7 @@ members = [
|
||||
resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
# Track the edition for all workspace crates in one place. Individual
|
||||
# crates can still override this value, but keeping it here means new
|
||||
# crates created with `cargo new -w ...` automatically inherit the 2024
|
||||
|
||||
@@ -138,7 +138,7 @@ impl McpProcess {
|
||||
client_info: ClientInfo {
|
||||
name: "llmx-app-server-tests".to_string(),
|
||||
title: None,
|
||||
version: "0.1.6".to_string(),
|
||||
version: "0.1.7".to_string(),
|
||||
},
|
||||
})?);
|
||||
let req_id = self.send_request("initialize", params).await?;
|
||||
|
||||
@@ -26,7 +26,7 @@ async fn get_user_agent_returns_current_llmx_user_agent() -> Result<()> {
|
||||
|
||||
let os_info = os_info::get();
|
||||
let user_agent = format!(
|
||||
"llmx_cli_rs/0.1.6 ({} {}; {}) {} (llmx-app-server-tests; 0.1.6)",
|
||||
"llmx_cli_rs/0.1.7 ({} {}; {}) {} (llmx-app-server-tests; 0.1.7)",
|
||||
os_info.os_type(),
|
||||
os_info.version(),
|
||||
os_info.architecture().unwrap_or("unknown"),
|
||||
|
||||
@@ -443,10 +443,12 @@ pub(crate) async fn stream_chat_completions(
|
||||
});
|
||||
|
||||
// Add max_tokens - required by Anthropic Messages API
|
||||
// Use a sensible default of 8192 if not configured
|
||||
// Use provider config value or default to 8192
|
||||
let max_tokens = provider.max_tokens.unwrap_or(8192);
|
||||
if let Some(obj) = payload.as_object_mut() {
|
||||
obj.insert("max_tokens".to_string(), json!(8192));
|
||||
obj.insert("max_tokens".to_string(), json!(max_tokens));
|
||||
}
|
||||
debug!("Using max_tokens: {}", max_tokens);
|
||||
|
||||
debug!(
|
||||
"POST to {}: {}",
|
||||
|
||||
@@ -1123,6 +1123,7 @@ mod tests {
|
||||
request_max_retries: Some(0),
|
||||
stream_max_retries: Some(0),
|
||||
stream_idle_timeout_ms: Some(1000),
|
||||
max_tokens: None,
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
|
||||
@@ -1187,6 +1188,7 @@ mod tests {
|
||||
request_max_retries: Some(0),
|
||||
stream_max_retries: Some(0),
|
||||
stream_idle_timeout_ms: Some(1000),
|
||||
max_tokens: None,
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
|
||||
@@ -1224,6 +1226,7 @@ mod tests {
|
||||
request_max_retries: Some(0),
|
||||
stream_max_retries: Some(0),
|
||||
stream_idle_timeout_ms: Some(1000),
|
||||
max_tokens: None,
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
|
||||
@@ -1263,6 +1266,7 @@ mod tests {
|
||||
request_max_retries: Some(0),
|
||||
stream_max_retries: Some(0),
|
||||
stream_idle_timeout_ms: Some(1000),
|
||||
max_tokens: None,
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
|
||||
@@ -1298,6 +1302,7 @@ mod tests {
|
||||
request_max_retries: Some(0),
|
||||
stream_max_retries: Some(0),
|
||||
stream_idle_timeout_ms: Some(1000),
|
||||
max_tokens: None,
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
|
||||
@@ -1333,6 +1338,7 @@ mod tests {
|
||||
request_max_retries: Some(0),
|
||||
stream_max_retries: Some(0),
|
||||
stream_idle_timeout_ms: Some(1000),
|
||||
max_tokens: None,
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
|
||||
@@ -1437,6 +1443,7 @@ mod tests {
|
||||
request_max_retries: Some(0),
|
||||
stream_max_retries: Some(0),
|
||||
stream_idle_timeout_ms: Some(1000),
|
||||
max_tokens: None,
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
|
||||
|
||||
@@ -2809,6 +2809,7 @@ model_verbosity = "high"
|
||||
request_max_retries: Some(4),
|
||||
stream_max_retries: Some(10),
|
||||
stream_idle_timeout_ms: Some(300_000),
|
||||
max_tokens: None,
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
let model_provider_map = {
|
||||
|
||||
@@ -87,6 +87,10 @@ pub struct ModelProviderInfo {
|
||||
/// the connection as lost.
|
||||
pub stream_idle_timeout_ms: Option<u64>,
|
||||
|
||||
/// Maximum number of tokens to generate in the response. If not specified, defaults to 8192.
|
||||
/// This is required by some providers (e.g., Anthropic via LiteLLM).
|
||||
pub max_tokens: Option<i64>,
|
||||
|
||||
/// Does this provider require an OpenAI API Key or ChatGPT login token? If true,
|
||||
/// user is presented with login screen on first run, and login preference and token/key
|
||||
/// are stored in auth.json. If false (which is the default), login screen is skipped,
|
||||
@@ -290,6 +294,7 @@ pub fn built_in_model_providers() -> HashMap<String, ModelProviderInfo> {
|
||||
request_max_retries: None,
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
max_tokens: None,
|
||||
requires_openai_auth: false,
|
||||
},
|
||||
),
|
||||
@@ -330,6 +335,7 @@ pub fn built_in_model_providers() -> HashMap<String, ModelProviderInfo> {
|
||||
request_max_retries: None,
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
max_tokens: None,
|
||||
requires_openai_auth: true,
|
||||
},
|
||||
),
|
||||
@@ -375,6 +381,7 @@ pub fn create_oss_provider_with_base_url(base_url: &str) -> ModelProviderInfo {
|
||||
request_max_retries: None,
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
max_tokens: None,
|
||||
requires_openai_auth: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -58,6 +58,7 @@ async fn run_request(input: Vec<ResponseItem>) -> Value {
|
||||
request_max_retries: Some(0),
|
||||
stream_max_retries: Some(0),
|
||||
stream_idle_timeout_ms: Some(5_000),
|
||||
max_tokens: None,
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
|
||||
|
||||
@@ -58,6 +58,7 @@ async fn run_stream_with_bytes(sse_body: &[u8]) -> Vec<ResponseEvent> {
|
||||
request_max_retries: Some(0),
|
||||
stream_max_retries: Some(0),
|
||||
stream_idle_timeout_ms: Some(5_000),
|
||||
max_tokens: None,
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
|
||||
|
||||
@@ -47,6 +47,7 @@ async fn responses_stream_includes_subagent_header_on_review() {
|
||||
request_max_retries: Some(0),
|
||||
stream_max_retries: Some(0),
|
||||
stream_idle_timeout_ms: Some(5_000),
|
||||
max_tokens: None,
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
|
||||
@@ -135,6 +136,7 @@ async fn responses_stream_includes_subagent_header_on_other() {
|
||||
request_max_retries: Some(0),
|
||||
stream_max_retries: Some(0),
|
||||
stream_idle_timeout_ms: Some(5_000),
|
||||
max_tokens: None,
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
|
||||
|
||||
@@ -712,6 +712,7 @@ async fn azure_responses_request_includes_store_and_reasoning_ids() {
|
||||
request_max_retries: Some(0),
|
||||
stream_max_retries: Some(0),
|
||||
stream_idle_timeout_ms: Some(5_000),
|
||||
max_tokens: None,
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
|
||||
@@ -1195,6 +1196,7 @@ async fn azure_overrides_assign_properties_used_for_responses_url() {
|
||||
request_max_retries: None,
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
max_tokens: None,
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
|
||||
@@ -1272,6 +1274,7 @@ async fn env_var_overrides_loaded_auth() {
|
||||
request_max_retries: None,
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
max_tokens: None,
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
|
||||
|
||||
@@ -72,6 +72,7 @@ async fn continue_after_stream_error() {
|
||||
request_max_retries: Some(1),
|
||||
stream_max_retries: Some(1),
|
||||
stream_idle_timeout_ms: Some(2_000),
|
||||
max_tokens: None,
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
|
||||
|
||||
@@ -80,6 +80,7 @@ async fn retries_on_early_close() {
|
||||
request_max_retries: Some(0),
|
||||
stream_max_retries: Some(1),
|
||||
stream_idle_timeout_ms: Some(2000),
|
||||
max_tokens: None,
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
|
||||
|
||||
@@ -144,7 +144,7 @@ impl McpProcess {
|
||||
let initialized = self.read_jsonrpc_message().await?;
|
||||
let os_info = os_info::get();
|
||||
let user_agent = format!(
|
||||
"llmx_cli_rs/0.1.6 ({} {}; {}) {} (elicitation test; 0.0.0)",
|
||||
"llmx_cli_rs/0.1.7 ({} {}; {}) {} (elicitation test; 0.0.0)",
|
||||
os_info.os_type(),
|
||||
os_info.version(),
|
||||
os_info.architecture().unwrap_or("unknown"),
|
||||
@@ -163,7 +163,7 @@ impl McpProcess {
|
||||
"serverInfo": {
|
||||
"name": "llmx-mcp-server",
|
||||
"title": "LLMX",
|
||||
"version": "0.1.6",
|
||||
"version": "0.1.7",
|
||||
"user_agent": user_agent
|
||||
},
|
||||
"protocolVersion": mcp_types::MCP_SCHEMA_VERSION
|
||||
|
||||
@@ -5,7 +5,7 @@ expression: sanitized
|
||||
/status
|
||||
|
||||
╭───────────────────────────────────────────────────────────────────────────╮
|
||||
│ >_ LLMX (v0.1.6) │
|
||||
│ >_ LLMX (v0.1.7) │
|
||||
│ │
|
||||
│ Visit https://chatgpt.com/llmx/settings/usage for up-to-date │
|
||||
│ information on rate limits and credits │
|
||||
|
||||
@@ -5,7 +5,7 @@ expression: sanitized
|
||||
/status
|
||||
|
||||
╭─────────────────────────────────────────────────────────────────╮
|
||||
│ >_ LLMX (v0.1.6) │
|
||||
│ >_ LLMX (v0.1.7) │
|
||||
│ │
|
||||
│ Visit https://chatgpt.com/llmx/settings/usage for up-to-date │
|
||||
│ information on rate limits and credits │
|
||||
|
||||
@@ -5,7 +5,7 @@ expression: sanitized
|
||||
/status
|
||||
|
||||
╭──────────────────────────────────────────────────────────────╮
|
||||
│ >_ LLMX (v0.1.6) │
|
||||
│ >_ LLMX (v0.1.7) │
|
||||
│ │
|
||||
│ Visit https://chatgpt.com/llmx/settings/usage for up-to-date │
|
||||
│ information on rate limits and credits │
|
||||
|
||||
@@ -5,7 +5,7 @@ expression: sanitized
|
||||
/status
|
||||
|
||||
╭──────────────────────────────────────────────────────────────╮
|
||||
│ >_ LLMX (v0.1.6) │
|
||||
│ >_ LLMX (v0.1.7) │
|
||||
│ │
|
||||
│ Visit https://chatgpt.com/llmx/settings/usage for up-to-date │
|
||||
│ information on rate limits and credits │
|
||||
|
||||
@@ -5,7 +5,7 @@ expression: sanitized
|
||||
/status
|
||||
|
||||
╭───────────────────────────────────────────────────────────────────╮
|
||||
│ >_ LLMX (v0.1.6) │
|
||||
│ >_ LLMX (v0.1.7) │
|
||||
│ │
|
||||
│ Visit https://chatgpt.com/llmx/settings/usage for up-to-date │
|
||||
│ information on rate limits and credits │
|
||||
|
||||
@@ -5,7 +5,7 @@ expression: sanitized
|
||||
/status
|
||||
|
||||
╭────────────────────────────────────────────╮
|
||||
│ >_ LLMX (v0.1.6) │
|
||||
│ >_ LLMX (v0.1.7) │
|
||||
│ │
|
||||
│ Visit https://chatgpt.com/llmx/settings/ │
|
||||
│ usage for up-to-date │
|
||||
|
||||
10
llmx-rs/tui/tests/fixtures/binary-size-log.jsonl
vendored
10
llmx-rs/tui/tests/fixtures/binary-size-log.jsonl
vendored
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user