include fractional portion of chunk that exceeds stdout/stderr limit (#497)
I saw cases where the first chunk of output from `ls -R` could be large
enough to exceed `MAX_OUTPUT_BYTES` or `MAX_OUTPUT_LINES`, in which case
the loop would exit early in `createTruncatingCollector()` such that
nothing was appended to the `chunks` array. As a result, the reported
`stdout` of `ls -R` would be empty.
I asked Codex to add logic to handle this edge case and write a unit
test. I used this as my test:
```
./codex-cli/dist/cli.js -q 'what is the output of `ls -R`'
```
now it appears to include a ton of stuff whereas before this change, I
saw:
```
{"type":"function_call_output","call_id":"call_a2QhVt7HRJYKjb3dIc8w1aBB","output":"{\"output\":\"\\n\\n[Output truncated: too many lines or bytes]\",\"metadata\":{\"exit_code\":0,\"duration_seconds\":0.5}}"}
```
This commit is contained in:
55
codex-cli/tests/create-truncating-collector.test.ts
Normal file
55
codex-cli/tests/create-truncating-collector.test.ts
Normal file
@@ -0,0 +1,55 @@
|
||||
import { PassThrough } from "stream";
|
||||
import { once } from "events";
|
||||
import { describe, it, expect } from "vitest";
|
||||
import { createTruncatingCollector } from "../src/utils/agent/sandbox/create-truncating-collector.js";
|
||||
|
||||
describe("createTruncatingCollector", () => {
|
||||
it("collects data under limits without truncation", async () => {
|
||||
const stream = new PassThrough();
|
||||
const collector = createTruncatingCollector(stream, 100, 10);
|
||||
const data = "line1\nline2\n";
|
||||
stream.end(Buffer.from(data));
|
||||
await once(stream, "end");
|
||||
expect(collector.getString()).toBe(data);
|
||||
expect(collector.hit).toBe(false);
|
||||
});
|
||||
|
||||
it("truncates data over byte limit", async () => {
|
||||
const stream = new PassThrough();
|
||||
const collector = createTruncatingCollector(stream, 5, 100);
|
||||
stream.end(Buffer.from("hello world"));
|
||||
await once(stream, "end");
|
||||
expect(collector.getString()).toBe("hello");
|
||||
expect(collector.hit).toBe(true);
|
||||
});
|
||||
|
||||
it("truncates data over line limit", async () => {
|
||||
const stream = new PassThrough();
|
||||
const collector = createTruncatingCollector(stream, 1000, 2);
|
||||
const data = "a\nb\nc\nd\n";
|
||||
stream.end(Buffer.from(data));
|
||||
await once(stream, "end");
|
||||
expect(collector.getString()).toBe("a\nb\n");
|
||||
expect(collector.hit).toBe(true);
|
||||
});
|
||||
|
||||
it("stops collecting after limit is hit across multiple writes", async () => {
|
||||
const stream = new PassThrough();
|
||||
const collector = createTruncatingCollector(stream, 10, 2);
|
||||
stream.write(Buffer.from("1\n"));
|
||||
stream.write(Buffer.from("2\n3\n4\n"));
|
||||
stream.end();
|
||||
await once(stream, "end");
|
||||
expect(collector.getString()).toBe("1\n2\n");
|
||||
expect(collector.hit).toBe(true);
|
||||
});
|
||||
|
||||
it("handles zero limits", async () => {
|
||||
const stream = new PassThrough();
|
||||
const collector = createTruncatingCollector(stream, 0, 0);
|
||||
stream.end(Buffer.from("anything\n"));
|
||||
await once(stream, "end");
|
||||
expect(collector.getString()).toBe("");
|
||||
expect(collector.hit).toBe(true);
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user