feat: image resizing (#5446)

Add image resizing on the client side to reduce load on the API
This commit is contained in:
jif-oai
2025-10-27 16:58:10 +00:00
committed by GitHub
parent 775fbba6e0
commit aea7610c76
11 changed files with 684 additions and 28 deletions

23
codex-rs/Cargo.lock generated
View File

@@ -1080,6 +1080,7 @@ dependencies = [
"eventsource-stream",
"futures",
"http",
"image",
"indexmap 2.10.0",
"landlock",
"libc",
@@ -1335,6 +1336,7 @@ dependencies = [
"anyhow",
"base64",
"codex-git-tooling",
"codex-utils-image",
"icu_decimal",
"icu_locale_core",
"mcp-types",
@@ -1479,6 +1481,27 @@ dependencies = [
"vt100",
]
[[package]]
name = "codex-utils-cache"
version = "0.0.0"
dependencies = [
"lru",
"sha1",
"tokio",
]
[[package]]
name = "codex-utils-image"
version = "0.0.0"
dependencies = [
"base64",
"codex-utils-cache",
"image",
"tempfile",
"thiserror 2.0.16",
"tokio",
]
[[package]]
name = "codex-utils-json-to-toml"
version = "0.0.0"

View File

@@ -32,9 +32,11 @@ members = [
"otel",
"tui",
"git-apply",
"utils/cache",
"utils/image",
"utils/json-to-toml",
"utils/readiness",
"utils/pty",
"utils/readiness",
"utils/string",
"utils/tokenizer",
]
@@ -77,6 +79,8 @@ codex-responses-api-proxy = { path = "responses-api-proxy" }
codex-rmcp-client = { path = "rmcp-client" }
codex-stdio-to-uds = { path = "stdio-to-uds" }
codex-tui = { path = "tui" }
codex-utils-cache = { path = "utils/cache" }
codex-utils-image = { path = "utils/image" }
codex-utils-json-to-toml = { path = "utils/json-to-toml" }
codex-utils-pty = { path = "utils/pty" }
codex-utils-readiness = { path = "utils/readiness" }
@@ -129,6 +133,7 @@ landlock = "0.4.1"
lazy_static = "1"
libc = "0.2.175"
log = "0.4"
lru = "0.12.5"
maplit = "1.0.2"
mime_guess = "2.0.5"
multimap = "0.10.0"

View File

@@ -97,6 +97,7 @@ assert_cmd = { workspace = true }
assert_matches = { workspace = true }
core_test_support = { workspace = true }
escargot = { workspace = true }
image = { workspace = true, features = ["jpeg", "png"] }
maplit = { workspace = true }
predicates = { workspace = true }
pretty_assertions = { workspace = true }

View File

@@ -19,6 +19,10 @@ use core_test_support::skip_if_no_network;
use core_test_support::test_codex::TestCodex;
use core_test_support::test_codex::test_codex;
use core_test_support::wait_for_event;
use image::GenericImageView;
use image::ImageBuffer;
use image::Rgba;
use image::load_from_memory;
use serde_json::Value;
use wiremock::matchers::any;
@@ -49,6 +53,88 @@ fn extract_output_text(item: &Value) -> Option<&str> {
})
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn user_turn_with_local_image_attaches_image() -> anyhow::Result<()> {
skip_if_no_network!(Ok(()));
let server = start_mock_server().await;
let TestCodex {
codex,
cwd,
session_configured,
..
} = test_codex().build(&server).await?;
let rel_path = "user-turn/example.png";
let abs_path = cwd.path().join(rel_path);
if let Some(parent) = abs_path.parent() {
std::fs::create_dir_all(parent)?;
}
let image = ImageBuffer::from_pixel(4096, 1024, Rgba([20u8, 40, 60, 255]));
image.save(&abs_path)?;
let response = sse(vec![
ev_response_created("resp-1"),
ev_assistant_message("msg-1", "done"),
ev_completed("resp-1"),
]);
let mock = responses::mount_sse_once_match(&server, any(), response).await;
let session_model = session_configured.model.clone();
codex
.submit(Op::UserTurn {
items: vec![UserInput::LocalImage {
path: abs_path.clone(),
}],
final_output_json_schema: None,
cwd: cwd.path().to_path_buf(),
approval_policy: AskForApproval::Never,
sandbox_policy: SandboxPolicy::DangerFullAccess,
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
})
.await?;
wait_for_event(&codex, |event| matches!(event, EventMsg::TaskComplete(_))).await;
let body = mock.single_request().body_json();
let image_message =
find_image_message(&body).expect("pending input image message not included in request");
let image_url = image_message
.get("content")
.and_then(Value::as_array)
.and_then(|content| {
content.iter().find_map(|span| {
if span.get("type").and_then(Value::as_str) == Some("input_image") {
span.get("image_url").and_then(Value::as_str)
} else {
None
}
})
})
.expect("image_url present");
let (prefix, encoded) = image_url
.split_once(',')
.expect("image url contains data prefix");
assert_eq!(prefix, "data:image/png;base64");
let decoded = BASE64_STANDARD
.decode(encoded)
.expect("image data decodes from base64 for request");
let resized = load_from_memory(&decoded).expect("load resized image");
let (width, height) = resized.dimensions();
assert!(width <= 2048);
assert!(height <= 768);
assert!(width < 4096);
assert!(height < 1024);
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn view_image_tool_attaches_local_image() -> anyhow::Result<()> {
skip_if_no_network!(Ok(()));
@@ -67,8 +153,8 @@ async fn view_image_tool_attaches_local_image() -> anyhow::Result<()> {
if let Some(parent) = abs_path.parent() {
std::fs::create_dir_all(parent)?;
}
let image_bytes = b"fake_png_bytes".to_vec();
std::fs::write(&abs_path, &image_bytes)?;
let image = ImageBuffer::from_pixel(4096, 1024, Rgba([255u8, 0, 0, 255]));
image.save(&abs_path)?;
let call_id = "view-image-call";
let arguments = serde_json::json!({ "path": rel_path }).to_string();
@@ -143,11 +229,20 @@ async fn view_image_tool_attaches_local_image() -> anyhow::Result<()> {
})
.expect("image_url present");
let expected_image_url = format!(
"data:image/png;base64,{}",
BASE64_STANDARD.encode(&image_bytes)
);
assert_eq!(image_url, expected_image_url);
let (prefix, encoded) = image_url
.split_once(',')
.expect("image url contains data prefix");
assert_eq!(prefix, "data:image/png;base64");
let decoded = BASE64_STANDARD
.decode(encoded)
.expect("image data decodes from base64 for request");
let resized = load_from_memory(&decoded).expect("load resized image");
let (resized_width, resized_height) = resized.dimensions();
assert!(resized_width <= 2048);
assert!(resized_height <= 768);
assert!(resized_width < 4096);
assert!(resized_height < 1024);
Ok(())
}

View File

@@ -14,6 +14,7 @@ workspace = true
codex-git-tooling = { workspace = true }
base64 = { workspace = true }
codex-utils-image = { workspace = true }
icu_decimal = { workspace = true }
icu_locale_core = { workspace = true }
mcp-types = { workspace = true }

View File

@@ -1,6 +1,7 @@
use std::collections::HashMap;
use base64::Engine;
use codex_utils_image::load_and_resize_to_fit;
use mcp_types::CallToolResult;
use serde::Deserialize;
use serde::Deserializer;
@@ -10,6 +11,7 @@ use ts_rs::TS;
use crate::user_input::UserInput;
use codex_git_tooling::GhostCommit;
use codex_utils_image::error::ImageProcessingError;
use schemars::JsonSchema;
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, JsonSchema, TS)]
@@ -134,6 +136,19 @@ fn should_serialize_reasoning_content(content: &Option<Vec<ReasoningItemContent>
}
}
fn local_image_error_placeholder(
path: &std::path::Path,
error: impl std::fmt::Display,
) -> ContentItem {
ContentItem::InputText {
text: format!(
"Codex could not read the local image at `{}`: {}",
path.display(),
error
),
}
}
impl From<ResponseInputItem> for ResponseItem {
fn from(item: ResponseInputItem) -> Self {
match item {
@@ -217,27 +232,40 @@ impl From<Vec<UserInput>> for ResponseInputItem {
role: "user".to_string(),
content: items
.into_iter()
.filter_map(|c| match c {
UserInput::Text { text } => Some(ContentItem::InputText { text }),
UserInput::Image { image_url } => Some(ContentItem::InputImage { image_url }),
UserInput::LocalImage { path } => match std::fs::read(&path) {
Ok(bytes) => {
let mime = mime_guess::from_path(&path)
.first()
.map(|m| m.essence_str().to_owned())
.unwrap_or_else(|| "image".to_string());
let encoded = base64::engine::general_purpose::STANDARD.encode(bytes);
Some(ContentItem::InputImage {
image_url: format!("data:{mime};base64,{encoded}"),
})
}
.map(|c| match c {
UserInput::Text { text } => ContentItem::InputText { text },
UserInput::Image { image_url } => ContentItem::InputImage { image_url },
UserInput::LocalImage { path } => match load_and_resize_to_fit(&path) {
Ok(image) => ContentItem::InputImage {
image_url: image.into_data_url(),
},
Err(err) => {
tracing::warn!(
"Skipping image {} could not read file: {}",
path.display(),
err
);
None
tracing::warn!("Failed to resize image {}: {}", path.display(), err);
if matches!(&err, ImageProcessingError::Read { .. }) {
local_image_error_placeholder(&path, &err)
} else {
match std::fs::read(&path) {
Ok(bytes) => {
let mime = mime_guess::from_path(&path)
.first()
.map(|m| m.essence_str().to_owned())
.unwrap_or_else(|| "image".to_string());
let encoded =
base64::engine::general_purpose::STANDARD.encode(bytes);
ContentItem::InputImage {
image_url: format!("data:{mime};base64,{encoded}"),
}
}
Err(read_err) => {
tracing::warn!(
"Skipping image {} could not read file: {}",
path.display(),
read_err
);
local_image_error_placeholder(&path, &read_err)
}
}
}
}
},
})
@@ -326,6 +354,7 @@ impl std::ops::Deref for FunctionCallOutputPayload {
mod tests {
use super::*;
use anyhow::Result;
use tempfile::tempdir;
#[test]
fn serializes_success_as_plain_string() -> Result<()> {
@@ -383,4 +412,37 @@ mod tests {
);
Ok(())
}
#[test]
fn local_image_read_error_adds_placeholder() -> Result<()> {
let dir = tempdir()?;
let missing_path = dir.path().join("missing-image.png");
let item = ResponseInputItem::from(vec![UserInput::LocalImage {
path: missing_path.clone(),
}]);
match item {
ResponseInputItem::Message { content, .. } => {
assert_eq!(content.len(), 1);
match &content[0] {
ContentItem::InputText { text } => {
let display_path = missing_path.display().to_string();
assert!(
text.contains(&display_path),
"placeholder should mention missing path: {text}"
);
assert!(
text.contains("could not read"),
"placeholder should mention read issue: {text}"
);
}
other => panic!("expected placeholder text but found {other:?}"),
}
}
other => panic!("expected message response but got {other:?}"),
}
Ok(())
}
}

15
codex-rs/utils/cache/Cargo.toml vendored Normal file
View File

@@ -0,0 +1,15 @@
[package]
name = "codex-utils-cache"
version.workspace = true
edition.workspace = true
[lints]
workspace = true
[dependencies]
lru = { workspace = true }
sha1 = { workspace = true }
tokio = { workspace = true, features = ["sync", "rt"] }
[dev-dependencies]
tokio = { workspace = true, features = ["macros", "rt", "rt-multi-thread"] }

159
codex-rs/utils/cache/src/lib.rs vendored Normal file
View File

@@ -0,0 +1,159 @@
use std::borrow::Borrow;
use std::hash::Hash;
use std::num::NonZeroUsize;
use lru::LruCache;
use sha1::Digest;
use sha1::Sha1;
use tokio::sync::Mutex;
use tokio::sync::MutexGuard;
/// A minimal LRU cache protected by a Tokio mutex.
pub struct BlockingLruCache<K, V> {
inner: Mutex<LruCache<K, V>>,
}
impl<K, V> BlockingLruCache<K, V>
where
K: Eq + Hash,
{
/// Creates a cache with the provided non-zero capacity.
#[must_use]
pub fn new(capacity: NonZeroUsize) -> Self {
Self {
inner: Mutex::new(LruCache::new(capacity)),
}
}
/// Returns a clone of the cached value for `key`, or computes and inserts it.
pub fn get_or_insert_with(&self, key: K, value: impl FnOnce() -> V) -> V
where
V: Clone,
{
let mut guard = lock_blocking(&self.inner);
if let Some(v) = guard.get(&key) {
return v.clone();
}
let v = value();
// Insert and return a clone to keep ownership in the cache.
guard.put(key, v.clone());
v
}
/// Like `get_or_insert_with`, but the value factory may fail.
pub fn get_or_try_insert_with<E>(
&self,
key: K,
value: impl FnOnce() -> Result<V, E>,
) -> Result<V, E>
where
V: Clone,
{
let mut guard = lock_blocking(&self.inner);
if let Some(v) = guard.get(&key) {
return Ok(v.clone());
}
let v = value()?;
guard.put(key, v.clone());
Ok(v)
}
/// Builds a cache if `capacity` is non-zero, returning `None` otherwise.
#[must_use]
pub fn try_with_capacity(capacity: usize) -> Option<Self> {
NonZeroUsize::new(capacity).map(Self::new)
}
/// Returns a clone of the cached value corresponding to `key`, if present.
pub fn get<Q>(&self, key: &Q) -> Option<V>
where
K: Borrow<Q>,
Q: Hash + Eq + ?Sized,
V: Clone,
{
lock_blocking(&self.inner).get(key).cloned()
}
/// Inserts `value` for `key`, returning the previous entry if it existed.
pub fn insert(&self, key: K, value: V) -> Option<V> {
lock_blocking(&self.inner).put(key, value)
}
/// Removes the entry for `key` if it exists, returning it.
pub fn remove<Q>(&self, key: &Q) -> Option<V>
where
K: Borrow<Q>,
Q: Hash + Eq + ?Sized,
{
lock_blocking(&self.inner).pop(key)
}
/// Clears all entries from the cache.
pub fn clear(&self) {
lock_blocking(&self.inner).clear();
}
/// Executes `callback` with a mutable reference to the underlying cache.
pub fn with_mut<R>(&self, callback: impl FnOnce(&mut LruCache<K, V>) -> R) -> R {
let mut guard = lock_blocking(&self.inner);
callback(&mut guard)
}
/// Provides direct access to the cache guard for advanced use cases.
pub fn blocking_lock(&self) -> MutexGuard<'_, LruCache<K, V>> {
lock_blocking(&self.inner)
}
}
fn lock_blocking<K, V>(m: &Mutex<LruCache<K, V>>) -> MutexGuard<'_, LruCache<K, V>>
where
K: Eq + Hash,
{
match tokio::runtime::Handle::try_current() {
Ok(_) => tokio::task::block_in_place(|| m.blocking_lock()),
Err(_) => m.blocking_lock(),
}
}
/// Computes the SHA-1 digest of `bytes`.
///
/// Useful for content-based cache keys when you want to avoid staleness
/// caused by path-only keys.
#[must_use]
pub fn sha1_digest(bytes: &[u8]) -> [u8; 20] {
let mut hasher = Sha1::new();
hasher.update(bytes);
let result = hasher.finalize();
let mut out = [0; 20];
out.copy_from_slice(&result);
out
}
#[cfg(test)]
mod tests {
use super::BlockingLruCache;
use std::num::NonZeroUsize;
#[tokio::test(flavor = "multi_thread")]
async fn stores_and_retrieves_values() {
let cache = BlockingLruCache::new(NonZeroUsize::new(2).expect("capacity"));
assert!(cache.get(&"first").is_none());
cache.insert("first", 1);
assert_eq!(cache.get(&"first"), Some(1));
}
#[tokio::test(flavor = "multi_thread")]
async fn evicts_least_recently_used() {
let cache = BlockingLruCache::new(NonZeroUsize::new(2).expect("capacity"));
cache.insert("a", 1);
cache.insert("b", 2);
assert_eq!(cache.get(&"a"), Some(1));
cache.insert("c", 3);
assert!(cache.get(&"b").is_none());
assert_eq!(cache.get(&"a"), Some(1));
assert_eq!(cache.get(&"c"), Some(3));
}
}

View File

@@ -0,0 +1,18 @@
[package]
name = "codex-utils-image"
version.workspace = true
edition.workspace = true
[lints]
workspace = true
[dependencies]
base64 = { workspace = true }
image = { workspace = true, features = ["jpeg", "png"] }
codex-utils-cache = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true, features = ["fs", "rt", "rt-multi-thread", "macros"] }
[dev-dependencies]
image = { workspace = true, features = ["jpeg", "png"] }
tempfile = { workspace = true }

View File

@@ -0,0 +1,25 @@
use image::ImageFormat;
use std::path::PathBuf;
use thiserror::Error;
#[derive(Debug, Error)]
pub enum ImageProcessingError {
#[error("failed to read image at {path}: {source}")]
Read {
path: PathBuf,
#[source]
source: std::io::Error,
},
#[error("failed to decode image at {path}: {source}")]
Decode {
path: PathBuf,
#[source]
source: image::ImageError,
},
#[error("failed to encode image as {format:?}: {source}")]
Encode {
format: ImageFormat,
#[source]
source: image::ImageError,
},
}

View File

@@ -0,0 +1,252 @@
use std::num::NonZeroUsize;
use std::path::Path;
use std::sync::LazyLock;
use crate::error::ImageProcessingError;
use base64::Engine;
use base64::engine::general_purpose::STANDARD as BASE64_STANDARD;
use codex_utils_cache::BlockingLruCache;
use codex_utils_cache::sha1_digest;
use image::ColorType;
use image::DynamicImage;
use image::GenericImageView;
use image::ImageEncoder;
use image::ImageFormat;
use image::codecs::jpeg::JpegEncoder;
use image::codecs::png::PngEncoder;
use image::imageops::FilterType;
/// Maximum width used when resizing images before uploading.
pub const MAX_WIDTH: u32 = 2048;
/// Maximum height used when resizing images before uploading.
pub const MAX_HEIGHT: u32 = 768;
pub mod error;
#[derive(Debug, Clone)]
pub struct EncodedImage {
pub bytes: Vec<u8>,
pub mime: String,
pub width: u32,
pub height: u32,
}
impl EncodedImage {
pub fn into_data_url(self) -> String {
let encoded = BASE64_STANDARD.encode(&self.bytes);
format!("data:{};base64,{}", self.mime, encoded)
}
}
static IMAGE_CACHE: LazyLock<BlockingLruCache<[u8; 20], EncodedImage>> =
LazyLock::new(|| BlockingLruCache::new(NonZeroUsize::new(32).unwrap_or(NonZeroUsize::MIN)));
pub fn load_and_resize_to_fit(path: &Path) -> Result<EncodedImage, ImageProcessingError> {
let path_buf = path.to_path_buf();
let file_bytes = read_file_bytes(path, &path_buf)?;
let key = sha1_digest(&file_bytes);
IMAGE_CACHE.get_or_try_insert_with(key, move || {
let format = match image::guess_format(&file_bytes) {
Ok(ImageFormat::Png) => Some(ImageFormat::Png),
Ok(ImageFormat::Jpeg) => Some(ImageFormat::Jpeg),
_ => None,
};
let dynamic = image::load_from_memory(&file_bytes).map_err(|source| {
ImageProcessingError::Decode {
path: path_buf.clone(),
source,
}
})?;
let (width, height) = dynamic.dimensions();
let encoded = if width <= MAX_WIDTH && height <= MAX_HEIGHT {
if let Some(format) = format {
let mime = format_to_mime(format);
EncodedImage {
bytes: file_bytes,
mime,
width,
height,
}
} else {
let (bytes, output_format) = encode_image(&dynamic, ImageFormat::Png)?;
let mime = format_to_mime(output_format);
EncodedImage {
bytes,
mime,
width,
height,
}
}
} else {
let resized = dynamic.resize(MAX_WIDTH, MAX_HEIGHT, FilterType::Triangle);
let target_format = format.unwrap_or(ImageFormat::Png);
let (bytes, output_format) = encode_image(&resized, target_format)?;
let mime = format_to_mime(output_format);
EncodedImage {
bytes,
mime,
width: resized.width(),
height: resized.height(),
}
};
Ok(encoded)
})
}
fn read_file_bytes(path: &Path, path_for_error: &Path) -> Result<Vec<u8>, ImageProcessingError> {
match tokio::runtime::Handle::try_current() {
// If we're inside a Tokio runtime, avoid block_on (it panics on worker threads).
// Use block_in_place and do a standard blocking read safely.
Ok(_) => tokio::task::block_in_place(|| std::fs::read(path)).map_err(|source| {
ImageProcessingError::Read {
path: path_for_error.to_path_buf(),
source,
}
}),
// Outside a runtime, just read synchronously.
Err(_) => std::fs::read(path).map_err(|source| ImageProcessingError::Read {
path: path_for_error.to_path_buf(),
source,
}),
}
}
fn encode_image(
image: &DynamicImage,
preferred_format: ImageFormat,
) -> Result<(Vec<u8>, ImageFormat), ImageProcessingError> {
let target_format = match preferred_format {
ImageFormat::Jpeg => ImageFormat::Jpeg,
_ => ImageFormat::Png,
};
let mut buffer = Vec::new();
match target_format {
ImageFormat::Png => {
let rgba = image.to_rgba8();
let encoder = PngEncoder::new(&mut buffer);
encoder
.write_image(
rgba.as_raw(),
image.width(),
image.height(),
ColorType::Rgba8.into(),
)
.map_err(|source| ImageProcessingError::Encode {
format: target_format,
source,
})?;
}
ImageFormat::Jpeg => {
let mut encoder = JpegEncoder::new_with_quality(&mut buffer, 85);
encoder
.encode_image(image)
.map_err(|source| ImageProcessingError::Encode {
format: target_format,
source,
})?;
}
_ => unreachable!("unsupported target_format should have been handled earlier"),
}
Ok((buffer, target_format))
}
fn format_to_mime(format: ImageFormat) -> String {
match format {
ImageFormat::Jpeg => "image/jpeg".to_string(),
_ => "image/png".to_string(),
}
}
#[cfg(test)]
mod tests {
use super::*;
use image::GenericImageView;
use image::ImageBuffer;
use image::Rgba;
use tempfile::NamedTempFile;
#[tokio::test(flavor = "multi_thread")]
async fn returns_original_image_when_within_bounds() {
let temp_file = NamedTempFile::new().expect("temp file");
let image = ImageBuffer::from_pixel(64, 32, Rgba([10u8, 20, 30, 255]));
image
.save_with_format(temp_file.path(), ImageFormat::Png)
.expect("write png to temp file");
let original_bytes = std::fs::read(temp_file.path()).expect("read written image");
let encoded = load_and_resize_to_fit(temp_file.path()).expect("process image");
assert_eq!(encoded.width, 64);
assert_eq!(encoded.height, 32);
assert_eq!(encoded.mime, "image/png");
assert_eq!(encoded.bytes, original_bytes);
}
#[tokio::test(flavor = "multi_thread")]
async fn downscales_large_image() {
let temp_file = NamedTempFile::new().expect("temp file");
let image = ImageBuffer::from_pixel(4096, 2048, Rgba([200u8, 10, 10, 255]));
image
.save_with_format(temp_file.path(), ImageFormat::Png)
.expect("write png to temp file");
let processed = load_and_resize_to_fit(temp_file.path()).expect("process image");
assert!(processed.width <= MAX_WIDTH);
assert!(processed.height <= MAX_HEIGHT);
let loaded =
image::load_from_memory(&processed.bytes).expect("read resized bytes back into image");
assert_eq!(loaded.dimensions(), (processed.width, processed.height));
}
#[tokio::test(flavor = "multi_thread")]
async fn fails_cleanly_for_invalid_images() {
let temp_file = NamedTempFile::new().expect("temp file");
std::fs::write(temp_file.path(), b"not an image").expect("write bytes");
let err = load_and_resize_to_fit(temp_file.path()).expect_err("invalid image should fail");
match err {
ImageProcessingError::Decode { .. } => {}
_ => panic!("unexpected error variant"),
}
}
#[tokio::test(flavor = "multi_thread")]
async fn reprocesses_updated_file_contents() {
{
IMAGE_CACHE.clear();
}
let temp_file = NamedTempFile::new().expect("temp file");
let first_image = ImageBuffer::from_pixel(32, 16, Rgba([20u8, 120, 220, 255]));
first_image
.save_with_format(temp_file.path(), ImageFormat::Png)
.expect("write initial image");
let first = load_and_resize_to_fit(temp_file.path()).expect("process first image");
let second_image = ImageBuffer::from_pixel(96, 48, Rgba([50u8, 60, 70, 255]));
second_image
.save_with_format(temp_file.path(), ImageFormat::Png)
.expect("write updated image");
let second = load_and_resize_to_fit(temp_file.path()).expect("process updated image");
assert_eq!(first.width, 32);
assert_eq!(first.height, 16);
assert_eq!(second.width, 96);
assert_eq!(second.height, 48);
assert_ne!(second.bytes, first.bytes);
}
}