fix: run apply_patch calls through the sandbox (#1705)
Building on the work of https://github.com/openai/codex/pull/1702, this changes how a shell call to `apply_patch` is handled. Previously, a shell call to `apply_patch` was always handled in-process, never leveraging a sandbox. To determine whether the `apply_patch` operation could be auto-approved, the `is_write_patch_constrained_to_writable_paths()` function would check if all the paths listed in the paths were writable. If so, the agent would apply the changes listed in the patch. Unfortunately, this approach afforded a loophole: symlinks! * For a soft link, we could fix this issue by tracing the link and checking whether the target is in the set of writable paths, however... * ...For a hard link, things are not as simple. We can run `stat FILE` to see if the number of links is greater than 1, but then we would have to do something potentially expensive like `find . -inum <inode_number>` to find the other paths for `FILE`. Further, even if this worked, this approach runs the risk of a [TOCTOU](https://en.wikipedia.org/wiki/Time-of-check_to_time-of-use) race condition, so it is not robust. The solution, implemented in this PR, is to take the virtual execution of the `apply_patch` CLI into an _actual_ execution using `codex --codex-run-as-apply-patch PATCH`, which we can run under the sandbox the user specified, just like any other `shell` call. This, of course, assumes that the sandbox prevents writing through symlinks as a mechanism to write to folders that are not in the writable set configured by the sandbox. I verified this by testing the following on both Mac and Linux: ```shell #!/usr/bin/env bash set -euo pipefail # Can running a command in SANDBOX_DIR write a file in EXPLOIT_DIR? # Codex is run in SANDBOX_DIR, so writes should be constrianed to this directory. SANDBOX_DIR=$(mktemp -d -p "$HOME" sandboxtesttemp.XXXXXX) # EXPLOIT_DIR is outside of SANDBOX_DIR, so let's see if we can write to it. EXPLOIT_DIR=$(mktemp -d -p "$HOME" sandboxtesttemp.XXXXXX) echo "SANDBOX_DIR: $SANDBOX_DIR" echo "EXPLOIT_DIR: $EXPLOIT_DIR" cleanup() { # Only remove if it looks sane and still exists [[ -n "${SANDBOX_DIR:-}" && -d "$SANDBOX_DIR" ]] && rm -rf -- "$SANDBOX_DIR" [[ -n "${EXPLOIT_DIR:-}" && -d "$EXPLOIT_DIR" ]] && rm -rf -- "$EXPLOIT_DIR" } trap cleanup EXIT echo "I am the original content" > "${EXPLOIT_DIR}/original.txt" # Drop the -s to test hard links. ln -s "${EXPLOIT_DIR}/original.txt" "${SANDBOX_DIR}/link-to-original.txt" cat "${SANDBOX_DIR}/link-to-original.txt" if [[ "$(uname)" == "Linux" ]]; then SANDBOX_SUBCOMMAND=landlock else SANDBOX_SUBCOMMAND=seatbelt fi # Attempt the exploit cd "${SANDBOX_DIR}" codex debug "${SANDBOX_SUBCOMMAND}" bash -lc "echo pwned > ./link-to-original.txt" || true cat "${EXPLOIT_DIR}/original.txt" ``` Admittedly, this change merits a proper integration test, but I think I will have to do that in a follow-up PR.
This commit is contained in:
@@ -58,16 +58,24 @@ impl PartialEq for IoError {
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum MaybeApplyPatch {
|
||||
Body(Vec<Hunk>),
|
||||
Body(ApplyPatchArgs),
|
||||
ShellParseError(ExtractHeredocError),
|
||||
PatchParseError(ParseError),
|
||||
NotApplyPatch,
|
||||
}
|
||||
|
||||
/// Both the raw PATCH argument to `apply_patch` as well as the PATCH argument
|
||||
/// parsed into hunks.
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct ApplyPatchArgs {
|
||||
pub patch: String,
|
||||
pub hunks: Vec<Hunk>,
|
||||
}
|
||||
|
||||
pub fn maybe_parse_apply_patch(argv: &[String]) -> MaybeApplyPatch {
|
||||
match argv {
|
||||
[cmd, body] if cmd == "apply_patch" => match parse_patch(body) {
|
||||
Ok(hunks) => MaybeApplyPatch::Body(hunks),
|
||||
Ok(source) => MaybeApplyPatch::Body(source),
|
||||
Err(e) => MaybeApplyPatch::PatchParseError(e),
|
||||
},
|
||||
[bash, flag, script]
|
||||
@@ -77,7 +85,7 @@ pub fn maybe_parse_apply_patch(argv: &[String]) -> MaybeApplyPatch {
|
||||
{
|
||||
match extract_heredoc_body_from_apply_patch_command(script) {
|
||||
Ok(body) => match parse_patch(&body) {
|
||||
Ok(hunks) => MaybeApplyPatch::Body(hunks),
|
||||
Ok(source) => MaybeApplyPatch::Body(source),
|
||||
Err(e) => MaybeApplyPatch::PatchParseError(e),
|
||||
},
|
||||
Err(e) => MaybeApplyPatch::ShellParseError(e),
|
||||
@@ -116,11 +124,19 @@ pub enum MaybeApplyPatchVerified {
|
||||
NotApplyPatch,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
/// ApplyPatchAction is the result of parsing an `apply_patch` command. By
|
||||
/// construction, all paths should be absolute paths.
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct ApplyPatchAction {
|
||||
changes: HashMap<PathBuf, ApplyPatchFileChange>,
|
||||
|
||||
/// The raw patch argument that can be used with `apply_patch` as an exec
|
||||
/// call. i.e., if the original arg was parsed in "lenient" mode with a
|
||||
/// heredoc, this should be the value without the heredoc wrapper.
|
||||
pub patch: String,
|
||||
|
||||
/// The working directory that was used to resolve relative paths in the patch.
|
||||
pub cwd: PathBuf,
|
||||
}
|
||||
|
||||
impl ApplyPatchAction {
|
||||
@@ -140,8 +156,28 @@ impl ApplyPatchAction {
|
||||
panic!("path must be absolute");
|
||||
}
|
||||
|
||||
#[allow(clippy::expect_used)]
|
||||
let filename = path
|
||||
.file_name()
|
||||
.expect("path should not be empty")
|
||||
.to_string_lossy();
|
||||
let patch = format!(
|
||||
r#"*** Begin Patch
|
||||
*** Update File: {filename}
|
||||
@@
|
||||
+ {content}
|
||||
*** End Patch"#,
|
||||
);
|
||||
let changes = HashMap::from([(path.to_path_buf(), ApplyPatchFileChange::Add { content })]);
|
||||
Self { changes }
|
||||
#[allow(clippy::expect_used)]
|
||||
Self {
|
||||
changes,
|
||||
cwd: path
|
||||
.parent()
|
||||
.expect("path should have parent")
|
||||
.to_path_buf(),
|
||||
patch,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -149,7 +185,7 @@ impl ApplyPatchAction {
|
||||
/// patch.
|
||||
pub fn maybe_parse_apply_patch_verified(argv: &[String], cwd: &Path) -> MaybeApplyPatchVerified {
|
||||
match maybe_parse_apply_patch(argv) {
|
||||
MaybeApplyPatch::Body(hunks) => {
|
||||
MaybeApplyPatch::Body(ApplyPatchArgs { patch, hunks }) => {
|
||||
let mut changes = HashMap::new();
|
||||
for hunk in hunks {
|
||||
let path = hunk.resolve_path(cwd);
|
||||
@@ -183,7 +219,11 @@ pub fn maybe_parse_apply_patch_verified(argv: &[String], cwd: &Path) -> MaybeApp
|
||||
}
|
||||
}
|
||||
}
|
||||
MaybeApplyPatchVerified::Body(ApplyPatchAction { changes })
|
||||
MaybeApplyPatchVerified::Body(ApplyPatchAction {
|
||||
changes,
|
||||
patch,
|
||||
cwd: cwd.to_path_buf(),
|
||||
})
|
||||
}
|
||||
MaybeApplyPatch::ShellParseError(e) => MaybeApplyPatchVerified::ShellParseError(e),
|
||||
MaybeApplyPatch::PatchParseError(e) => MaybeApplyPatchVerified::CorrectnessError(e.into()),
|
||||
@@ -264,7 +304,7 @@ pub fn apply_patch(
|
||||
stderr: &mut impl std::io::Write,
|
||||
) -> Result<(), ApplyPatchError> {
|
||||
let hunks = match parse_patch(patch) {
|
||||
Ok(hunks) => hunks,
|
||||
Ok(source) => source.hunks,
|
||||
Err(e) => {
|
||||
match &e {
|
||||
InvalidPatchError(message) => {
|
||||
@@ -652,7 +692,7 @@ mod tests {
|
||||
]);
|
||||
|
||||
match maybe_parse_apply_patch(&args) {
|
||||
MaybeApplyPatch::Body(hunks) => {
|
||||
MaybeApplyPatch::Body(ApplyPatchArgs { hunks, patch: _ }) => {
|
||||
assert_eq!(
|
||||
hunks,
|
||||
vec![Hunk::AddFile {
|
||||
@@ -679,7 +719,7 @@ PATCH"#,
|
||||
]);
|
||||
|
||||
match maybe_parse_apply_patch(&args) {
|
||||
MaybeApplyPatch::Body(hunks) => {
|
||||
MaybeApplyPatch::Body(ApplyPatchArgs { hunks, patch: _ }) => {
|
||||
assert_eq!(
|
||||
hunks,
|
||||
vec![Hunk::AddFile {
|
||||
@@ -954,7 +994,7 @@ PATCH"#,
|
||||
));
|
||||
let patch = parse_patch(&patch).unwrap();
|
||||
|
||||
let update_file_chunks = match patch.as_slice() {
|
||||
let update_file_chunks = match patch.hunks.as_slice() {
|
||||
[Hunk::UpdateFile { chunks, .. }] => chunks,
|
||||
_ => panic!("Expected a single UpdateFile hunk"),
|
||||
};
|
||||
@@ -992,7 +1032,7 @@ PATCH"#,
|
||||
));
|
||||
|
||||
let patch = parse_patch(&patch).unwrap();
|
||||
let chunks = match patch.as_slice() {
|
||||
let chunks = match patch.hunks.as_slice() {
|
||||
[Hunk::UpdateFile { chunks, .. }] => chunks,
|
||||
_ => panic!("Expected a single UpdateFile hunk"),
|
||||
};
|
||||
@@ -1029,7 +1069,7 @@ PATCH"#,
|
||||
));
|
||||
|
||||
let patch = parse_patch(&patch).unwrap();
|
||||
let chunks = match patch.as_slice() {
|
||||
let chunks = match patch.hunks.as_slice() {
|
||||
[Hunk::UpdateFile { chunks, .. }] => chunks,
|
||||
_ => panic!("Expected a single UpdateFile hunk"),
|
||||
};
|
||||
@@ -1064,7 +1104,7 @@ PATCH"#,
|
||||
));
|
||||
|
||||
let patch = parse_patch(&patch).unwrap();
|
||||
let chunks = match patch.as_slice() {
|
||||
let chunks = match patch.hunks.as_slice() {
|
||||
[Hunk::UpdateFile { chunks, .. }] => chunks,
|
||||
_ => panic!("Expected a single UpdateFile hunk"),
|
||||
};
|
||||
@@ -1110,7 +1150,7 @@ PATCH"#,
|
||||
|
||||
// Extract chunks then build the unified diff.
|
||||
let parsed = parse_patch(&patch).unwrap();
|
||||
let chunks = match parsed.as_slice() {
|
||||
let chunks = match parsed.hunks.as_slice() {
|
||||
[Hunk::UpdateFile { chunks, .. }] => chunks,
|
||||
_ => panic!("Expected a single UpdateFile hunk"),
|
||||
};
|
||||
@@ -1193,6 +1233,8 @@ g
|
||||
new_content: "updated session directory content\n".to_string(),
|
||||
},
|
||||
)]),
|
||||
patch: argv[1].clone(),
|
||||
cwd: session_dir.path().to_path_buf(),
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
//!
|
||||
//! The parser below is a little more lenient than the explicit spec and allows for
|
||||
//! leading/trailing whitespace around patch markers.
|
||||
use crate::ApplyPatchArgs;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
@@ -102,7 +103,7 @@ pub struct UpdateFileChunk {
|
||||
pub is_end_of_file: bool,
|
||||
}
|
||||
|
||||
pub fn parse_patch(patch: &str) -> Result<Vec<Hunk>, ParseError> {
|
||||
pub fn parse_patch(patch: &str) -> Result<ApplyPatchArgs, ParseError> {
|
||||
let mode = if PARSE_IN_STRICT_MODE {
|
||||
ParseMode::Strict
|
||||
} else {
|
||||
@@ -150,7 +151,7 @@ enum ParseMode {
|
||||
Lenient,
|
||||
}
|
||||
|
||||
fn parse_patch_text(patch: &str, mode: ParseMode) -> Result<Vec<Hunk>, ParseError> {
|
||||
fn parse_patch_text(patch: &str, mode: ParseMode) -> Result<ApplyPatchArgs, ParseError> {
|
||||
let lines: Vec<&str> = patch.trim().lines().collect();
|
||||
let lines: &[&str] = match check_patch_boundaries_strict(&lines) {
|
||||
Ok(()) => &lines,
|
||||
@@ -173,7 +174,8 @@ fn parse_patch_text(patch: &str, mode: ParseMode) -> Result<Vec<Hunk>, ParseErro
|
||||
line_number += hunk_lines;
|
||||
remaining_lines = &remaining_lines[hunk_lines..]
|
||||
}
|
||||
Ok(hunks)
|
||||
let patch = lines.join("\n");
|
||||
Ok(ApplyPatchArgs { hunks, patch })
|
||||
}
|
||||
|
||||
/// Checks the start and end lines of the patch text for `apply_patch`,
|
||||
@@ -425,6 +427,7 @@ fn parse_update_file_chunk(
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[allow(clippy::unwrap_used)]
|
||||
fn test_parse_patch() {
|
||||
assert_eq!(
|
||||
parse_patch_text("bad", ParseMode::Strict),
|
||||
@@ -455,8 +458,10 @@ fn test_parse_patch() {
|
||||
"*** Begin Patch\n\
|
||||
*** End Patch",
|
||||
ParseMode::Strict
|
||||
),
|
||||
Ok(Vec::new())
|
||||
)
|
||||
.unwrap()
|
||||
.hunks,
|
||||
Vec::new()
|
||||
);
|
||||
assert_eq!(
|
||||
parse_patch_text(
|
||||
@@ -472,8 +477,10 @@ fn test_parse_patch() {
|
||||
+ return 123\n\
|
||||
*** End Patch",
|
||||
ParseMode::Strict
|
||||
),
|
||||
Ok(vec![
|
||||
)
|
||||
.unwrap()
|
||||
.hunks,
|
||||
vec![
|
||||
AddFile {
|
||||
path: PathBuf::from("path/add.py"),
|
||||
contents: "abc\ndef\n".to_string()
|
||||
@@ -491,7 +498,7 @@ fn test_parse_patch() {
|
||||
is_end_of_file: false
|
||||
}]
|
||||
}
|
||||
])
|
||||
]
|
||||
);
|
||||
// Update hunk followed by another hunk (Add File).
|
||||
assert_eq!(
|
||||
@@ -504,8 +511,10 @@ fn test_parse_patch() {
|
||||
+content\n\
|
||||
*** End Patch",
|
||||
ParseMode::Strict
|
||||
),
|
||||
Ok(vec![
|
||||
)
|
||||
.unwrap()
|
||||
.hunks,
|
||||
vec![
|
||||
UpdateFile {
|
||||
path: PathBuf::from("file.py"),
|
||||
move_path: None,
|
||||
@@ -520,7 +529,7 @@ fn test_parse_patch() {
|
||||
path: PathBuf::from("other.py"),
|
||||
contents: "content\n".to_string()
|
||||
}
|
||||
])
|
||||
]
|
||||
);
|
||||
|
||||
// Update hunk without an explicit @@ header for the first chunk should parse.
|
||||
@@ -533,8 +542,10 @@ fn test_parse_patch() {
|
||||
+bar
|
||||
*** End Patch"#,
|
||||
ParseMode::Strict
|
||||
),
|
||||
Ok(vec![UpdateFile {
|
||||
)
|
||||
.unwrap()
|
||||
.hunks,
|
||||
vec![UpdateFile {
|
||||
path: PathBuf::from("file2.py"),
|
||||
move_path: None,
|
||||
chunks: vec![UpdateFileChunk {
|
||||
@@ -543,7 +554,7 @@ fn test_parse_patch() {
|
||||
new_lines: vec!["import foo".to_string(), "bar".to_string()],
|
||||
is_end_of_file: false,
|
||||
}],
|
||||
}])
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
@@ -574,7 +585,10 @@ fn test_parse_patch_lenient() {
|
||||
);
|
||||
assert_eq!(
|
||||
parse_patch_text(&patch_text_in_heredoc, ParseMode::Lenient),
|
||||
Ok(expected_patch.clone())
|
||||
Ok(ApplyPatchArgs {
|
||||
hunks: expected_patch.clone(),
|
||||
patch: patch_text.to_string()
|
||||
})
|
||||
);
|
||||
|
||||
let patch_text_in_single_quoted_heredoc = format!("<<'EOF'\n{patch_text}\nEOF\n");
|
||||
@@ -584,7 +598,10 @@ fn test_parse_patch_lenient() {
|
||||
);
|
||||
assert_eq!(
|
||||
parse_patch_text(&patch_text_in_single_quoted_heredoc, ParseMode::Lenient),
|
||||
Ok(expected_patch.clone())
|
||||
Ok(ApplyPatchArgs {
|
||||
hunks: expected_patch.clone(),
|
||||
patch: patch_text.to_string()
|
||||
})
|
||||
);
|
||||
|
||||
let patch_text_in_double_quoted_heredoc = format!("<<\"EOF\"\n{patch_text}\nEOF\n");
|
||||
@@ -594,7 +611,10 @@ fn test_parse_patch_lenient() {
|
||||
);
|
||||
assert_eq!(
|
||||
parse_patch_text(&patch_text_in_double_quoted_heredoc, ParseMode::Lenient),
|
||||
Ok(expected_patch.clone())
|
||||
Ok(ApplyPatchArgs {
|
||||
hunks: expected_patch.clone(),
|
||||
patch: patch_text.to_string()
|
||||
})
|
||||
);
|
||||
|
||||
let patch_text_in_mismatched_quotes_heredoc = format!("<<\"EOF'\n{patch_text}\nEOF\n");
|
||||
|
||||
Reference in New Issue
Block a user