fix: resume lookup for gitignored CODEX_HOME (#5311)

Walk the sessions tree instead of using file_search so gitignored
CODEX_HOME directories can resume sessions. Add a regression test that
covers a .gitignore'd sessions directory.

Fixes #5247
Fixes #5412

---------

Co-authored-by: Owen Lin <owen@openai.com>
This commit is contained in:
Thibault Sottiaux
2025-10-23 10:04:40 -07:00
committed by GitHub
parent 0b4527146e
commit 3059373e06
5 changed files with 51 additions and 6 deletions

View File

@@ -46,6 +46,7 @@ pub(crate) async fn run_fuzzy_file_search(
threads, threads,
cancel_flag, cancel_flag,
COMPUTE_INDICES, COMPUTE_INDICES,
true,
) { ) {
Ok(res) => Ok((root, res)), Ok(res) => Ok((root, res)),
Err(err) => Err((root, err)), Err(err) => Err((root, err)),

View File

@@ -1,12 +1,11 @@
use std::cmp::Reverse; use std::cmp::Reverse;
use std::io::{self}; use std::io::{self};
use std::num::NonZero;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use codex_file_search as file_search;
use std::num::NonZero;
use std::sync::Arc; use std::sync::Arc;
use std::sync::atomic::AtomicBool; use std::sync::atomic::AtomicBool;
use time::OffsetDateTime; use time::OffsetDateTime;
use time::PrimitiveDateTime; use time::PrimitiveDateTime;
use time::format_description::FormatItem; use time::format_description::FormatItem;
@@ -15,6 +14,7 @@ use uuid::Uuid;
use super::SESSIONS_SUBDIR; use super::SESSIONS_SUBDIR;
use crate::protocol::EventMsg; use crate::protocol::EventMsg;
use codex_file_search as file_search;
use codex_protocol::protocol::RolloutItem; use codex_protocol::protocol::RolloutItem;
use codex_protocol::protocol::RolloutLine; use codex_protocol::protocol::RolloutLine;
use codex_protocol::protocol::SessionSource; use codex_protocol::protocol::SessionSource;
@@ -515,6 +515,7 @@ pub async fn find_conversation_path_by_id_str(
threads, threads,
cancel, cancel,
compute_indices, compute_indices,
false,
) )
.map_err(|e| io::Error::other(format!("file search failed: {e}")))?; .map_err(|e| io::Error::other(format!("file search failed: {e}")))?;

View File

@@ -1,5 +1,6 @@
#![allow(clippy::unwrap_used, clippy::expect_used)] #![allow(clippy::unwrap_used, clippy::expect_used)]
use std::io::Write; use std::io::Write;
use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use codex_core::find_conversation_path_by_id_str; use codex_core::find_conversation_path_by_id_str;
@@ -8,8 +9,8 @@ use uuid::Uuid;
/// Create sessions/YYYY/MM/DD and write a minimal rollout file containing the /// Create sessions/YYYY/MM/DD and write a minimal rollout file containing the
/// provided conversation id in the SessionMeta line. Returns the absolute path. /// provided conversation id in the SessionMeta line. Returns the absolute path.
fn write_minimal_rollout_with_id(codex_home: &TempDir, id: Uuid) -> PathBuf { fn write_minimal_rollout_with_id(codex_home: &Path, id: Uuid) -> PathBuf {
let sessions = codex_home.path().join("sessions/2024/01/01"); let sessions = codex_home.join("sessions/2024/01/01");
std::fs::create_dir_all(&sessions).unwrap(); std::fs::create_dir_all(&sessions).unwrap();
let file = sessions.join(format!("rollout-2024-01-01T00-00-00-{id}.jsonl")); let file = sessions.join(format!("rollout-2024-01-01T00-00-00-{id}.jsonl"));
@@ -40,7 +41,7 @@ fn write_minimal_rollout_with_id(codex_home: &TempDir, id: Uuid) -> PathBuf {
async fn find_locates_rollout_file_by_id() { async fn find_locates_rollout_file_by_id() {
let home = TempDir::new().unwrap(); let home = TempDir::new().unwrap();
let id = Uuid::new_v4(); let id = Uuid::new_v4();
let expected = write_minimal_rollout_with_id(&home, id); let expected = write_minimal_rollout_with_id(home.path(), id);
let found = find_conversation_path_by_id_str(home.path(), &id.to_string()) let found = find_conversation_path_by_id_str(home.path(), &id.to_string())
.await .await
@@ -48,3 +49,33 @@ async fn find_locates_rollout_file_by_id() {
assert_eq!(found.unwrap(), expected); assert_eq!(found.unwrap(), expected);
} }
#[tokio::test]
async fn find_handles_gitignore_covering_codex_home_directory() {
let repo = TempDir::new().unwrap();
let codex_home = repo.path().join(".codex");
std::fs::create_dir_all(&codex_home).unwrap();
std::fs::write(repo.path().join(".gitignore"), ".codex/**\n").unwrap();
let id = Uuid::new_v4();
let expected = write_minimal_rollout_with_id(&codex_home, id);
let found = find_conversation_path_by_id_str(&codex_home, &id.to_string())
.await
.unwrap();
assert_eq!(found, Some(expected));
}
#[tokio::test]
async fn find_ignores_granular_gitignore_rules() {
let home = TempDir::new().unwrap();
let id = Uuid::new_v4();
let expected = write_minimal_rollout_with_id(home.path(), id);
std::fs::write(home.path().join("sessions/.gitignore"), "*.jsonl\n").unwrap();
let found = find_conversation_path_by_id_str(home.path(), &id.to_string())
.await
.unwrap();
assert_eq!(found, Some(expected));
}

View File

@@ -105,6 +105,7 @@ pub async fn run_main<T: Reporter>(
threads, threads,
cancel_flag, cancel_flag,
compute_indices, compute_indices,
true,
)?; )?;
let match_count = matches.len(); let match_count = matches.len();
let matches_truncated = total_match_count > match_count; let matches_truncated = total_match_count > match_count;
@@ -121,6 +122,7 @@ pub async fn run_main<T: Reporter>(
/// The worker threads will periodically check `cancel_flag` to see if they /// The worker threads will periodically check `cancel_flag` to see if they
/// should stop processing files. /// should stop processing files.
#[allow(clippy::too_many_arguments)]
pub fn run( pub fn run(
pattern_text: &str, pattern_text: &str,
limit: NonZero<usize>, limit: NonZero<usize>,
@@ -129,6 +131,7 @@ pub fn run(
threads: NonZero<usize>, threads: NonZero<usize>,
cancel_flag: Arc<AtomicBool>, cancel_flag: Arc<AtomicBool>,
compute_indices: bool, compute_indices: bool,
respect_gitignore: bool,
) -> anyhow::Result<FileSearchResults> { ) -> anyhow::Result<FileSearchResults> {
let pattern = create_pattern(pattern_text); let pattern = create_pattern(pattern_text);
// Create one BestMatchesList per worker thread so that each worker can // Create one BestMatchesList per worker thread so that each worker can
@@ -157,6 +160,14 @@ pub fn run(
.hidden(false) .hidden(false)
// Don't require git to be present to apply to apply git-related ignore rules. // Don't require git to be present to apply to apply git-related ignore rules.
.require_git(false); .require_git(false);
if !respect_gitignore {
walk_builder
.git_ignore(false)
.git_global(false)
.git_exclude(false)
.ignore(false)
.parents(false);
}
if !exclude.is_empty() { if !exclude.is_empty() {
let mut override_builder = OverrideBuilder::new(search_directory); let mut override_builder = OverrideBuilder::new(search_directory);

View File

@@ -172,6 +172,7 @@ impl FileSearchManager {
NUM_FILE_SEARCH_THREADS, NUM_FILE_SEARCH_THREADS,
cancellation_token.clone(), cancellation_token.clone(),
compute_indices, compute_indices,
true,
) )
.map(|res| res.matches) .map(|res| res.matches)
.unwrap_or_default(); .unwrap_or_default();