eframe: capture a screenshot using `Frame::request_screenshot`

Co-authored-by: Emil Ernerfeldt <emil.ernerfeldt@gmail.com>
This commit is contained in:
amfaber 2023-03-29 16:34:22 +02:00 committed by GitHub
parent 74d43bfa17
commit 870264b005
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 384 additions and 73 deletions

1
Cargo.lock generated
View File

@ -3179,6 +3179,7 @@ name = "screenshot"
version = "0.1.0"
dependencies = [
"eframe",
"image",
"itertools",
]

View File

@ -5,6 +5,7 @@ NOTE: [`egui-winit`](../egui-winit/CHANGELOG.md), [`egui_glium`](../egui_glium/C
## Unreleased
* Add `Frame::request_screenshot` and `Frame::screenshot` to communicate to the backend that a screenshot of the current frame should be exposed by `Frame` during `App::post_rendering` ([#2676](https://github.com/emilk/egui/pull/2676))
## 0.21.3 - 2023-02-15

View File

@ -197,7 +197,7 @@ pub trait App {
/// Called each time after the rendering the UI.
///
/// Can be used to access pixel data with `get_pixels`
/// Can be used to access pixel data with [`Frame::screenshot`]
fn post_rendering(&mut self, _window_size_px: [u32; 2], _frame: &Frame) {}
}
@ -674,6 +674,11 @@ pub struct Frame {
/// Can be used to manage GPU resources for custom rendering with WGPU using [`egui::PaintCallback`]s.
#[cfg(feature = "wgpu")]
pub(crate) wgpu_render_state: Option<egui_wgpu::RenderState>,
/// If [`Frame::request_screenshot`] was called during a frame, this field will store the screenshot
/// such that it can be retrieved during [`App::post_rendering`] with [`Frame::screenshot`]
#[cfg(not(target_arch = "wasm32"))]
pub(crate) screenshot: std::cell::Cell<Option<egui::ColorImage>>,
}
impl Frame {
@ -695,6 +700,66 @@ impl Frame {
self.storage.as_deref()
}
/// Request the current frame's pixel data. Needs to be retrieved by calling [`Frame::screenshot`]
/// during [`App::post_rendering`].
#[cfg(not(target_arch = "wasm32"))]
pub fn request_screenshot(&mut self) {
self.output.screenshot_requested = true;
}
/// Cancel a request made with [`Frame::request_screenshot`].
#[cfg(not(target_arch = "wasm32"))]
pub fn cancel_screenshot_request(&mut self) {
self.output.screenshot_requested = false;
}
/// During [`App::post_rendering`], use this to retrieve the pixel data that was requested during
/// [`App::update`] via [`Frame::request_screenshot`].
///
/// Returns None if:
/// * Called in [`App::update`]
/// * [`Frame::request_screenshot`] wasn't called on this frame during [`App::update`]
/// * The rendering backend doesn't support this feature (yet). Currently implemented for wgpu and glow, but not with wasm as target.
/// * Retrieving the data was unsuccessful in some way.
///
/// See also [`egui::ColorImage::region`]
///
/// ## Example generating a capture of everything within a square of 100 pixels located at the top left of the app and saving it with the [`image`](crates.io/crates/image) crate:
/// ```
/// struct MyApp;
///
/// impl eframe::App for MyApp {
/// fn update(&mut self, ctx: &egui::Context, frame: &mut eframe::Frame) {
/// // In real code the app would render something here
/// frame.request_screenshot();
/// // Things that are added to the frame after the call to
/// // request_screenshot() will still be included.
/// }
///
/// fn post_rendering(&mut self, _window_size: [u32; 2], frame: &eframe::Frame) {
/// if let Some(screenshot) = frame.screenshot() {
/// let pixels_per_point = frame.info().native_pixels_per_point;
/// let region = egui::Rect::from_two_pos(
/// egui::Pos2::ZERO,
/// egui::Pos2{ x: 100., y: 100. },
/// );
/// let top_left_corner = screenshot.region(&region, pixels_per_point);
/// image::save_buffer(
/// "top_left.png",
/// top_left_corner.as_raw(),
/// top_left_corner.width() as u32,
/// top_left_corner.height() as u32,
/// image::ColorType::Rgba8,
/// ).unwrap();
/// }
/// }
/// }
/// ```
#[cfg(not(target_arch = "wasm32"))]
pub fn screenshot(&self) -> Option<egui::ColorImage> {
self.screenshot.take()
}
/// A place where you can store custom data in a way that persists when you restart the app.
pub fn storage_mut(&mut self) -> Option<&mut (dyn Storage + 'static)> {
self.storage.as_deref_mut()
@ -1061,5 +1126,8 @@ pub(crate) mod backend {
/// Set to some bool to maximize or unmaximize window.
#[cfg(not(target_arch = "wasm32"))]
pub maximized: Option<bool>,
#[cfg(not(target_arch = "wasm32"))]
pub screenshot_requested: bool,
}
}

View File

@ -128,7 +128,7 @@ pub fn window_builder<E>(
// Restore pos/size from previous session
window_settings.clamp_to_sane_values(largest_monitor_point_size(event_loop));
#[cfg(windows)]
window_settings.clamp_window_to_sane_position(&event_loop);
window_settings.clamp_window_to_sane_position(event_loop);
window_builder = window_settings.initialize_window(window_builder);
window_settings.inner_size_points()
} else {
@ -228,6 +228,7 @@ pub fn handle_app_output(
window_pos,
visible: _, // handled in post_present
always_on_top,
screenshot_requested: _, // handled by the rendering backend,
minimized,
maximized,
} = app_output;
@ -349,6 +350,7 @@ impl EpiIntegration {
gl,
#[cfg(feature = "wgpu")]
wgpu_render_state,
screenshot: std::cell::Cell::new(None),
};
let mut egui_winit = egui_winit::State::new(event_loop);
@ -467,6 +469,7 @@ impl EpiIntegration {
tracing::debug!("App::on_close_event returned {}", self.close);
}
self.frame.output.visible = app_output.visible; // this is handled by post_present
self.frame.output.screenshot_requested = app_output.screenshot_requested;
handle_app_output(
window,
self.egui_ctx.pixels_per_point(),

View File

@ -803,6 +803,14 @@ mod glow_integration {
&textures_delta,
);
let screenshot_requested = &mut integration.frame.output.screenshot_requested;
if *screenshot_requested {
*screenshot_requested = false;
let screenshot = painter.read_screen_rgba(screen_size_in_pixels);
integration.frame.screenshot.set(Some(screenshot));
}
integration.post_rendering(app.as_mut(), window);
{
@ -820,11 +828,15 @@ mod glow_integration {
path.ends_with(".png"),
"Expected EFRAME_SCREENSHOT_TO to end with '.png', got {path:?}"
);
let [w, h] = screen_size_in_pixels;
let pixels = painter.read_screen_rgba(screen_size_in_pixels);
let image = image::RgbaImage::from_vec(w, h, pixels).unwrap();
let image = image::imageops::flip_vertical(&image);
image.save(&path).unwrap_or_else(|err| {
let screenshot = painter.read_screen_rgba(screen_size_in_pixels);
image::save_buffer(
&path,
screenshot.as_raw(),
screenshot.width() as u32,
screenshot.height() as u32,
image::ColorType::Rgba8,
)
.unwrap_or_else(|err| {
panic!("Failed to save screenshot to {path:?}: {err}");
});
eprintln!("Screenshot saved to {path:?}.");
@ -1229,12 +1241,17 @@ mod wgpu_integration {
integration.egui_ctx.tessellate(shapes)
};
painter.paint_and_update_textures(
let screenshot_requested = &mut integration.frame.output.screenshot_requested;
let screenshot = painter.paint_and_update_textures(
integration.egui_ctx.pixels_per_point(),
app.clear_color(&integration.egui_ctx.style().visuals),
&clipped_primitives,
&textures_delta,
*screenshot_requested,
);
*screenshot_requested = false;
integration.frame.screenshot.set(screenshot);
integration.post_rendering(app.as_mut(), window);
integration.post_present(window);

View File

@ -3,6 +3,7 @@ All notable changes to the `egui-wgpu` integration will be noted in this file.
## Unreleased
* Add `read_screan_rgba` to the egui-wgpu `Painter`, to allow for capturing the current frame when using wgpu. Used in conjuction with `Frame::request_screenshot`. ([#2676](https://github.com/emilk/egui/pull/2676))
## 0.21.0 - 2023-02-08
@ -12,7 +13,6 @@ All notable changes to the `egui-wgpu` integration will be noted in this file.
* `egui-wgpu` now only depends on `epaint` instead of the entire `egui` ([#2438](https://github.com/emilk/egui/pull/2438)).
* `winit::Painter` now supports transparent backbuffer ([#2684](https://github.com/emilk/egui/pull/2684)).
## 0.20.0 - 2022-12-08 - web support
* Renamed `RenderPass` to `Renderer`.
* Renamed `RenderPass::execute` to `RenderPass::render`.

View File

@ -1,6 +1,6 @@
use std::sync::Arc;
use epaint::mutex::RwLock;
use epaint::{self, mutex::RwLock};
use tracing::error;
@ -13,6 +13,65 @@ struct SurfaceState {
height: u32,
}
/// A texture and a buffer for reading the rendered frame back to the cpu.
/// The texture is required since [`wgpu::TextureUsages::COPY_DST`] is not an allowed
/// flag for the surface texture on all platforms. This means that anytime we want to
/// capture the frame, we first render it to this texture, and then we can copy it to
/// both the surface texture and the buffer, from where we can pull it back to the cpu.
struct CaptureState {
texture: wgpu::Texture,
buffer: wgpu::Buffer,
padding: BufferPadding,
}
impl CaptureState {
fn new(device: &Arc<wgpu::Device>, surface_texture: &wgpu::Texture) -> Self {
let texture = device.create_texture(&wgpu::TextureDescriptor {
label: Some("egui_screen_capture_texture"),
size: surface_texture.size(),
mip_level_count: surface_texture.mip_level_count(),
sample_count: surface_texture.sample_count(),
dimension: surface_texture.dimension(),
format: surface_texture.format(),
usage: wgpu::TextureUsages::RENDER_ATTACHMENT | wgpu::TextureUsages::COPY_SRC,
view_formats: &[],
});
let padding = BufferPadding::new(surface_texture.width());
let buffer = device.create_buffer(&wgpu::BufferDescriptor {
label: Some("egui_screen_capture_buffer"),
size: (padding.padded_bytes_per_row * texture.height()) as u64,
usage: wgpu::BufferUsages::COPY_DST | wgpu::BufferUsages::MAP_READ,
mapped_at_creation: false,
});
Self {
texture,
buffer,
padding,
}
}
}
struct BufferPadding {
unpadded_bytes_per_row: u32,
padded_bytes_per_row: u32,
}
impl BufferPadding {
fn new(width: u32) -> Self {
let bytes_per_pixel = std::mem::size_of::<u32>() as u32;
let unpadded_bytes_per_row = width * bytes_per_pixel;
let padded_bytes_per_row =
wgpu::util::align_to(unpadded_bytes_per_row, wgpu::COPY_BYTES_PER_ROW_ALIGNMENT);
Self {
unpadded_bytes_per_row,
padded_bytes_per_row,
}
}
}
/// Everything you need to paint egui with [`wgpu`] on [`winit`].
///
/// Alternatively you can use [`crate::renderer`] directly.
@ -22,6 +81,7 @@ pub struct Painter {
support_transparent_backbuffer: bool,
depth_format: Option<wgpu::TextureFormat>,
depth_texture_view: Option<wgpu::TextureView>,
screen_capture_state: Option<CaptureState>,
instance: wgpu::Instance,
adapter: Option<wgpu::Adapter>,
@ -59,6 +119,7 @@ impl Painter {
support_transparent_backbuffer,
depth_format: (depth_bits > 0).then_some(wgpu::TextureFormat::Depth32Float),
depth_texture_view: None,
screen_capture_state: None,
instance,
adapter: None,
@ -136,7 +197,7 @@ impl Painter {
surface_state.surface.configure(
&render_state.device,
&wgpu::SurfaceConfiguration {
usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
usage: wgpu::TextureUsages::RENDER_ATTACHMENT | wgpu::TextureUsages::COPY_DST,
format: render_state.target_format,
width: surface_state.width,
height: surface_state.height,
@ -274,23 +335,118 @@ impl Painter {
}
}
// CaptureState only needs to be updated when the size of the two textures don't match and we want to
// capture a frame
fn update_capture_state(
screen_capture_state: &mut Option<CaptureState>,
surface_texture: &wgpu::SurfaceTexture,
render_state: &RenderState,
) {
let surface_texture = &surface_texture.texture;
match screen_capture_state {
Some(capture_state) => {
if capture_state.texture.size() != surface_texture.size() {
*capture_state = CaptureState::new(&render_state.device, surface_texture);
}
}
None => {
*screen_capture_state =
Some(CaptureState::new(&render_state.device, surface_texture));
}
}
}
// Handles copying from the CaptureState texture to the surface texture and the cpu
fn read_screen_rgba(
screen_capture_state: &CaptureState,
render_state: &RenderState,
output_frame: &wgpu::SurfaceTexture,
) -> Option<epaint::ColorImage> {
let CaptureState {
texture: tex,
buffer,
padding,
} = screen_capture_state;
let device = &render_state.device;
let queue = &render_state.queue;
let tex_extent = tex.size();
let mut encoder = device.create_command_encoder(&Default::default());
encoder.copy_texture_to_buffer(
tex.as_image_copy(),
wgpu::ImageCopyBuffer {
buffer,
layout: wgpu::ImageDataLayout {
offset: 0,
bytes_per_row: Some(std::num::NonZeroU32::new(padding.padded_bytes_per_row)?),
rows_per_image: None,
},
},
tex_extent,
);
encoder.copy_texture_to_texture(
tex.as_image_copy(),
output_frame.texture.as_image_copy(),
tex.size(),
);
let id = queue.submit(Some(encoder.finish()));
let buffer_slice = buffer.slice(..);
let (sender, receiver) = std::sync::mpsc::channel();
buffer_slice.map_async(wgpu::MapMode::Read, move |v| {
drop(sender.send(v));
});
device.poll(wgpu::Maintain::WaitForSubmissionIndex(id));
receiver.recv().ok()?.ok()?;
let to_rgba = match tex.format() {
wgpu::TextureFormat::Rgba8Unorm => [0, 1, 2, 3],
wgpu::TextureFormat::Bgra8Unorm => [2, 1, 0, 3],
_ => {
tracing::error!("Screen can't be captured unless the surface format is Rgba8Unorm or Bgra8Unorm. Current surface format is {:?}", tex.format());
return None;
}
};
let mut pixels = Vec::with_capacity((tex.width() * tex.height()) as usize);
for padded_row in buffer_slice
.get_mapped_range()
.chunks(padding.padded_bytes_per_row as usize)
{
let row = &padded_row[..padding.unpadded_bytes_per_row as usize];
for color in row.chunks(4) {
pixels.push(epaint::Color32::from_rgba_premultiplied(
color[to_rgba[0]],
color[to_rgba[1]],
color[to_rgba[2]],
color[to_rgba[3]],
));
}
}
buffer.unmap();
Some(epaint::ColorImage {
size: [tex.width() as usize, tex.height() as usize],
pixels,
})
}
// Returns a vector with the frame's pixel data if it was requested.
pub fn paint_and_update_textures(
&mut self,
pixels_per_point: f32,
clear_color: [f32; 4],
clipped_primitives: &[epaint::ClippedPrimitive],
textures_delta: &epaint::textures::TexturesDelta,
) {
capture: bool,
) -> Option<epaint::ColorImage> {
crate::profile_function!();
let render_state = match self.render_state.as_mut() {
Some(rs) => rs,
None => return,
};
let surface_state = match self.surface_state.as_ref() {
Some(rs) => rs,
None => return,
};
let render_state = self.render_state.as_mut()?;
let surface_state = self.surface_state.as_ref()?;
let output_frame = {
crate::profile_scope!("get_current_texture");
@ -308,10 +464,10 @@ impl Painter {
render_state,
self.configuration.present_mode,
);
return;
return None;
}
SurfaceErrorAction::SkipFrame => {
return;
return None;
}
},
};
@ -351,9 +507,21 @@ impl Painter {
{
let renderer = render_state.renderer.read();
let frame_view = output_frame
.texture
.create_view(&wgpu::TextureViewDescriptor::default());
let frame_view = if capture {
Self::update_capture_state(
&mut self.screen_capture_state,
&output_frame,
render_state,
);
self.screen_capture_state
.as_ref()?
.texture
.create_view(&wgpu::TextureViewDescriptor::default())
} else {
output_frame
.texture
.create_view(&wgpu::TextureViewDescriptor::default())
};
let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
color_attachments: &[Some(wgpu::RenderPassColorAttachment {
view: &frame_view,
@ -404,11 +572,18 @@ impl Painter {
.submit(user_cmd_bufs.into_iter().chain(std::iter::once(encoded)));
};
let screenshot = if capture {
let screen_capture_state = self.screen_capture_state.as_ref()?;
Self::read_screen_rgba(screen_capture_state, render_state, &output_frame)
} else {
None
};
// Redraw egui
{
crate::profile_scope!("present");
output_frame.present();
}
screenshot
}
#[allow(clippy::unused_self)]

View File

@ -622,7 +622,7 @@ impl Painter {
}
}
pub fn read_screen_rgba(&self, [w, h]: [u32; 2]) -> Vec<u8> {
pub fn read_screen_rgba(&self, [w, h]: [u32; 2]) -> egui::ColorImage {
let mut pixels = vec![0_u8; (w * h * 4) as usize];
unsafe {
self.gl.read_pixels(
@ -635,7 +635,14 @@ impl Painter {
glow::PixelPackData::Slice(&mut pixels),
);
}
pixels
let mut flipped = Vec::with_capacity((w * h * 4) as usize);
for row in pixels.chunks_exact((w * 4) as usize).rev() {
flipped.extend_from_slice(bytemuck::cast_slice(row));
}
egui::ColorImage {
size: [w as usize, h as usize],
pixels: flipped,
}
}
pub fn read_screen_rgb(&self, [w, h]: [u32; 2]) -> Vec<u8> {

View File

@ -101,6 +101,54 @@ impl ColorImage {
Self { size, pixels }
}
pub fn from_rgba_premultiplied(size: [usize; 2], rgba: &[u8]) -> Self {
assert_eq!(size[0] * size[1] * 4, rgba.len());
let pixels = rgba
.chunks_exact(4)
.map(|p| Color32::from_rgba_premultiplied(p[0], p[1], p[2], p[3]))
.collect();
Self { size, pixels }
}
/// A view of the underlying data as `&[u8]`
pub fn as_raw(&self) -> &[u8] {
bytemuck::cast_slice(&self.pixels)
}
/// A view of the underlying data as `&mut [u8]`
pub fn as_raw_mut(&mut self) -> &mut [u8] {
bytemuck::cast_slice_mut(&mut self.pixels)
}
/// Create a new Image from a patch of the current image. This method is especially convenient for screenshotting a part of the app
/// since `region` can be interpreted as screen coordinates of the entire screenshot if `pixels_per_point` is provided for the native application.
/// The floats of [`emath::Rect`] are cast to usize, rounding them down in order to interpret them as indices to the image data.
///
/// Panics if `region.min.x > region.max.x || region.min.y > region.max.y`, or if a region larger than the image is passed.
pub fn region(&self, region: &emath::Rect, pixels_per_point: Option<f32>) -> Self {
let pixels_per_point = pixels_per_point.unwrap_or(1.0);
let min_x = (region.min.x * pixels_per_point) as usize;
let max_x = (region.max.x * pixels_per_point) as usize;
let min_y = (region.min.y * pixels_per_point) as usize;
let max_y = (region.max.y * pixels_per_point) as usize;
assert!(min_x <= max_x);
assert!(min_y <= max_y);
let width = max_x - min_x;
let height = max_y - min_y;
let mut output = Vec::with_capacity(width * height);
let row_stride = self.size[0];
for row in min_y..max_y {
output.extend_from_slice(
&self.pixels[row * row_stride + min_x..row * row_stride + max_x],
);
}
Self {
size: [width, height],
pixels: output,
}
}
/// Create a [`ColorImage`] from flat RGB data.
///
/// This is what you want to use after having loaded an image file (and if

View File

@ -1,7 +1,10 @@
[package]
name = "screenshot"
version = "0.1.0"
authors = ["René Rössler <rene@freshx.de>"]
authors = [
"René Rössler <rene@freshx.de>",
"Andreas Faber <andreas.mfaber@gmail.com",
]
license = "MIT OR Apache-2.0"
edition = "2021"
rust-version = "1.65"
@ -11,5 +14,7 @@ publish = false
[dependencies]
eframe = { path = "../../crates/eframe", features = [
"__screenshot", # __screenshot is so we can dump a screenshot using EFRAME_SCREENSHOT_TO
"wgpu",
] }
itertools = "0.10.3"
image = { version = "0.24", default-features = false, features = ["png"] }

View File

@ -1,13 +1,12 @@
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] // hide console window on Windows in release
use eframe::{
egui::{self, ColorImage},
glow::{self, HasContext},
};
use itertools::Itertools as _;
use eframe::egui::{self, ColorImage};
fn main() -> Result<(), eframe::Error> {
let options = eframe::NativeOptions::default();
let options = eframe::NativeOptions {
renderer: eframe::Renderer::Wgpu,
..Default::default()
};
eframe::run_native(
"Take screenshots and display with eframe/egui",
options,
@ -18,13 +17,13 @@ fn main() -> Result<(), eframe::Error> {
#[derive(Default)]
struct MyApp {
continuously_take_screenshots: bool,
take_screenshot: bool,
texture: Option<egui::TextureHandle>,
screenshot: Option<ColorImage>,
save_to_file: bool,
}
impl eframe::App for MyApp {
fn update(&mut self, ctx: &egui::Context, _frame: &mut eframe::Frame) {
fn update(&mut self, ctx: &egui::Context, frame: &mut eframe::Frame) {
egui::CentralPanel::default().show(ctx, |ui| {
if let Some(screenshot) = self.screenshot.take() {
self.texture = Some(ui.ctx().load_texture(
@ -40,6 +39,11 @@ impl eframe::App for MyApp {
"continuously take screenshots",
);
if ui.button("save to 'top_left.png'").clicked() {
self.save_to_file = true;
frame.request_screenshot();
}
ui.with_layout(egui::Layout::top_down(egui::Align::RIGHT), |ui| {
if self.continuously_take_screenshots {
if ui
@ -50,8 +54,9 @@ impl eframe::App for MyApp {
} else {
ctx.set_visuals(egui::Visuals::light());
};
frame.request_screenshot();
} else if ui.button("take screenshot!").clicked() {
self.take_screenshot = true;
frame.request_screenshot();
}
});
});
@ -66,43 +71,24 @@ impl eframe::App for MyApp {
});
}
#[allow(unsafe_code)]
fn post_rendering(&mut self, screen_size_px: [u32; 2], frame: &eframe::Frame) {
if !self.take_screenshot && !self.continuously_take_screenshots {
return;
}
self.take_screenshot = false;
if let Some(gl) = frame.gl() {
let [w, h] = screen_size_px;
let mut buf = vec![0u8; w as usize * h as usize * 4];
let pixels = glow::PixelPackData::Slice(&mut buf[..]);
unsafe {
gl.read_pixels(
0,
0,
w as i32,
h as i32,
glow::RGBA,
glow::UNSIGNED_BYTE,
pixels,
);
fn post_rendering(&mut self, _window_size: [u32; 2], frame: &eframe::Frame) {
if let Some(screenshot) = frame.screenshot() {
if self.save_to_file {
let pixels_per_point = frame.info().native_pixels_per_point;
let region =
egui::Rect::from_two_pos(egui::Pos2::ZERO, egui::Pos2 { x: 100., y: 100. });
let top_left_corner = screenshot.region(&region, pixels_per_point);
image::save_buffer(
"top_left.png",
top_left_corner.as_raw(),
top_left_corner.width() as u32,
top_left_corner.height() as u32,
image::ColorType::Rgba8,
)
.unwrap();
self.save_to_file = false;
}
// Flip vertically:
let mut rows: Vec<Vec<u8>> = buf
.into_iter()
.chunks(w as usize * 4)
.into_iter()
.map(|chunk| chunk.collect())
.collect();
rows.reverse();
let buf: Vec<u8> = rows.into_iter().flatten().collect();
self.screenshot = Some(ColorImage::from_rgba_unmultiplied(
[screen_size_px[0] as usize, screen_size_px[1] as usize],
&buf[..],
));
self.screenshot = Some(screenshot);
}
}
}