Copy/paste images into editors (Mac only) (#15782)

For future reference: WIP branch of copy/pasting a mixture of images and
text: https://github.com/zed-industries/zed/tree/copy-paste-images -
we'll come back to that one after landing this one.

Release Notes:

- You can now paste images into the Assistant Panel to include them as
context. Currently works only on Mac, and with Anthropic models. Future
support is planned for more models, operating systems, and image
clipboard operations.

---------

Co-authored-by: Antonio <antonio@zed.dev>
Co-authored-by: Mikayla <mikayla@zed.dev>
Co-authored-by: Jason <jason@zed.dev>
Co-authored-by: Kyle <kylek@zed.dev>
This commit is contained in:
Richard Feldman 2024-08-13 13:18:25 -04:00 committed by GitHub
parent e3b0de5dda
commit b1a581e81b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
58 changed files with 2983 additions and 1708 deletions

View file

@ -11,9 +11,12 @@ use std::{
use anyhow::{anyhow, Result};
use derive_more::{Deref, DerefMut};
use futures::{channel::oneshot, future::LocalBoxFuture, Future};
use futures::{
channel::oneshot,
future::{LocalBoxFuture, Shared},
Future, FutureExt,
};
use slotmap::SlotMap;
use smol::future::FutureExt;
pub use async_context::*;
use collections::{FxHashMap, FxHashSet, VecDeque};
@ -25,8 +28,8 @@ pub use test_context::*;
use util::ResultExt;
use crate::{
current_platform, init_app_menus, Action, ActionRegistry, Any, AnyView, AnyWindowHandle,
AssetCache, AssetSource, BackgroundExecutor, ClipboardItem, Context, DispatchPhase, DisplayId,
current_platform, hash, init_app_menus, Action, ActionRegistry, Any, AnyView, AnyWindowHandle,
Asset, AssetSource, BackgroundExecutor, ClipboardItem, Context, DispatchPhase, DisplayId,
Entity, EventEmitter, ForegroundExecutor, Global, KeyBinding, Keymap, Keystroke, LayoutId,
Menu, MenuItem, OwnedMenu, PathPromptOptions, Pixels, Platform, PlatformDisplay, Point,
PromptBuilder, PromptHandle, PromptLevel, Render, RenderablePromptHandle, Reservation,
@ -220,7 +223,6 @@ pub struct AppContext {
pub(crate) background_executor: BackgroundExecutor,
pub(crate) foreground_executor: ForegroundExecutor,
pub(crate) loading_assets: FxHashMap<(TypeId, u64), Box<dyn Any>>,
pub(crate) asset_cache: AssetCache,
asset_source: Arc<dyn AssetSource>,
pub(crate) svg_renderer: SvgRenderer,
http_client: Arc<dyn HttpClient>,
@ -276,7 +278,6 @@ impl AppContext {
background_executor: executor,
foreground_executor,
svg_renderer: SvgRenderer::new(asset_source.clone()),
asset_cache: AssetCache::new(),
loading_assets: Default::default(),
asset_source,
http_client,
@ -1267,6 +1268,40 @@ impl AppContext {
) {
self.prompt_builder = Some(PromptBuilder::Custom(Box::new(renderer)))
}
/// Remove an asset from GPUI's cache
pub fn remove_cached_asset<A: Asset + 'static>(&mut self, source: &A::Source) {
let asset_id = (TypeId::of::<A>(), hash(source));
self.loading_assets.remove(&asset_id);
}
/// Asynchronously load an asset, if the asset hasn't finished loading this will return None.
///
/// Note that the multiple calls to this method will only result in one `Asset::load` call at a
/// time, and the results of this call will be cached
///
/// This asset will not be cached by default, see [Self::use_cached_asset]
pub fn fetch_asset<A: Asset + 'static>(
&mut self,
source: &A::Source,
) -> (Shared<Task<A::Output>>, bool) {
let asset_id = (TypeId::of::<A>(), hash(source));
let mut is_first = false;
let task = self
.loading_assets
.remove(&asset_id)
.map(|boxed_task| *boxed_task.downcast::<Shared<Task<A::Output>>>().unwrap())
.unwrap_or_else(|| {
is_first = true;
let future = A::load(source.clone(), self);
let task = self.background_executor().spawn(future).shared();
task
});
self.loading_assets.insert(asset_id, Box::new(task.clone()));
(task, is_first)
}
}
impl Context for AppContext {

View file

@ -1,17 +1,14 @@
use crate::{SharedString, SharedUri, WindowContext};
use collections::FxHashMap;
use crate::{AppContext, SharedString, SharedUri};
use futures::Future;
use parking_lot::Mutex;
use std::any::TypeId;
use std::hash::{Hash, Hasher};
use std::path::PathBuf;
use std::sync::Arc;
use std::{any::Any, path::PathBuf};
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
pub(crate) enum UriOrPath {
Uri(SharedUri),
Path(Arc<PathBuf>),
Asset(SharedString),
Embedded(SharedString),
}
impl From<SharedUri> for UriOrPath {
@ -37,7 +34,7 @@ pub trait Asset {
/// Load the asset asynchronously
fn load(
source: Self::Source,
cx: &mut WindowContext,
cx: &mut AppContext,
) -> impl Future<Output = Self::Output> + Send + 'static;
}
@ -47,42 +44,3 @@ pub fn hash<T: Hash>(data: &T) -> u64 {
data.hash(&mut hasher);
hasher.finish()
}
/// A cache for assets.
#[derive(Clone)]
pub struct AssetCache {
assets: Arc<Mutex<FxHashMap<(TypeId, u64), Box<dyn Any + Send>>>>,
}
impl AssetCache {
pub(crate) fn new() -> Self {
Self {
assets: Default::default(),
}
}
/// Get the asset from the cache, if it exists.
pub fn get<A: Asset + 'static>(&self, source: &A::Source) -> Option<A::Output> {
self.assets
.lock()
.get(&(TypeId::of::<A>(), hash(&source)))
.and_then(|task| task.downcast_ref::<A::Output>())
.cloned()
}
/// Insert the asset into the cache.
pub fn insert<A: Asset + 'static>(&mut self, source: A::Source, output: A::Output) {
self.assets
.lock()
.insert((TypeId::of::<A>(), hash(&source)), Box::new(output));
}
/// Remove an entry from the asset cache
pub fn remove<A: Asset + 'static>(&mut self, source: &A::Source) -> Option<A::Output> {
self.assets
.lock()
.remove(&(TypeId::of::<A>(), hash(&source)))
.and_then(|any| any.downcast::<A::Output>().ok())
.map(|boxed| *boxed)
}
}

View file

@ -38,14 +38,22 @@ pub(crate) struct RenderImageParams {
pub(crate) frame_index: usize,
}
/// A cached and processed image.
pub struct ImageData {
/// A cached and processed image, in BGRA format
pub struct RenderImage {
/// The ID associated with this image
pub id: ImageId,
data: SmallVec<[Frame; 1]>,
}
impl ImageData {
impl PartialEq for RenderImage {
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
impl Eq for RenderImage {}
impl RenderImage {
/// Create a new image from the given data.
pub fn new(data: impl Into<SmallVec<[Frame; 1]>>) -> Self {
static NEXT_ID: AtomicUsize = AtomicUsize::new(0);
@ -57,8 +65,10 @@ impl ImageData {
}
/// Convert this image into a byte slice.
pub fn as_bytes(&self, frame_index: usize) -> &[u8] {
&self.data[frame_index].buffer()
pub fn as_bytes(&self, frame_index: usize) -> Option<&[u8]> {
self.data
.get(frame_index)
.map(|frame| frame.buffer().as_raw().as_slice())
}
/// Get the size of this image, in pixels.
@ -78,7 +88,7 @@ impl ImageData {
}
}
impl fmt::Debug for ImageData {
impl fmt::Debug for RenderImage {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("ImageData")
.field("id", &self.id)

View file

@ -1,16 +1,14 @@
use crate::{
point, px, size, AbsoluteLength, Asset, Bounds, DefiniteLength, DevicePixels, Element,
ElementId, GlobalElementId, Hitbox, ImageData, InteractiveElement, Interactivity, IntoElement,
LayoutId, Length, Pixels, SharedString, SharedUri, Size, StyleRefinement, Styled, SvgSize,
UriOrPath, WindowContext,
px, AbsoluteLength, AppContext, Asset, Bounds, DefiniteLength, Element, ElementId,
GlobalElementId, Hitbox, Image, InteractiveElement, Interactivity, IntoElement, LayoutId,
Length, ObjectFit, Pixels, RenderImage, SharedString, SharedUri, Size, StyleRefinement, Styled,
SvgSize, UriOrPath, WindowContext,
};
use futures::{AsyncReadExt, Future};
use http_client;
use image::{
codecs::gif::GifDecoder, AnimationDecoder, Frame, ImageBuffer, ImageError, ImageFormat,
};
#[cfg(target_os = "macos")]
use media::core_video::CVImageBuffer;
use smallvec::SmallVec;
use std::{
fs,
@ -23,20 +21,18 @@ use thiserror::Error;
use util::ResultExt;
/// A source of image content.
#[derive(Clone, Debug)]
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum ImageSource {
/// Image content will be loaded from provided URI at render time.
Uri(SharedUri),
/// Image content will be loaded from the provided file at render time.
File(Arc<PathBuf>),
/// Cached image data
Data(Arc<ImageData>),
Render(Arc<RenderImage>),
/// Cached image data
Image(Arc<Image>),
/// Image content will be loaded from Asset at render time.
Asset(SharedString),
// TODO: move surface definitions into mac platform module
/// A CoreVideo image buffer
#[cfg(target_os = "macos")]
Surface(CVImageBuffer),
Embedded(SharedString),
}
fn is_uri(uri: &str) -> bool {
@ -54,7 +50,7 @@ impl From<&'static str> for ImageSource {
if is_uri(&s) {
Self::Uri(s.into())
} else {
Self::Asset(s.into())
Self::Embedded(s.into())
}
}
}
@ -64,7 +60,7 @@ impl From<String> for ImageSource {
if is_uri(&s) {
Self::Uri(s.into())
} else {
Self::Asset(s.into())
Self::Embedded(s.into())
}
}
}
@ -74,7 +70,7 @@ impl From<SharedString> for ImageSource {
if is_uri(&s) {
Self::Uri(s.into())
} else {
Self::Asset(s)
Self::Embedded(s)
}
}
}
@ -91,16 +87,9 @@ impl From<PathBuf> for ImageSource {
}
}
impl From<Arc<ImageData>> for ImageSource {
fn from(value: Arc<ImageData>) -> Self {
Self::Data(value)
}
}
#[cfg(target_os = "macos")]
impl From<CVImageBuffer> for ImageSource {
fn from(value: CVImageBuffer) -> Self {
Self::Surface(value)
impl From<Arc<RenderImage>> for ImageSource {
fn from(value: Arc<RenderImage>) -> Self {
Self::Render(value)
}
}
@ -122,121 +111,6 @@ pub fn img(source: impl Into<ImageSource>) -> Img {
}
}
/// How to fit the image into the bounds of the element.
pub enum ObjectFit {
/// The image will be stretched to fill the bounds of the element.
Fill,
/// The image will be scaled to fit within the bounds of the element.
Contain,
/// The image will be scaled to cover the bounds of the element.
Cover,
/// The image will be scaled down to fit within the bounds of the element.
ScaleDown,
/// The image will maintain its original size.
None,
}
impl ObjectFit {
/// Get the bounds of the image within the given bounds.
pub fn get_bounds(
&self,
bounds: Bounds<Pixels>,
image_size: Size<DevicePixels>,
) -> Bounds<Pixels> {
let image_size = image_size.map(|dimension| Pixels::from(u32::from(dimension)));
let image_ratio = image_size.width / image_size.height;
let bounds_ratio = bounds.size.width / bounds.size.height;
let result_bounds = match self {
ObjectFit::Fill => bounds,
ObjectFit::Contain => {
let new_size = if bounds_ratio > image_ratio {
size(
image_size.width * (bounds.size.height / image_size.height),
bounds.size.height,
)
} else {
size(
bounds.size.width,
image_size.height * (bounds.size.width / image_size.width),
)
};
Bounds {
origin: point(
bounds.origin.x + (bounds.size.width - new_size.width) / 2.0,
bounds.origin.y + (bounds.size.height - new_size.height) / 2.0,
),
size: new_size,
}
}
ObjectFit::ScaleDown => {
// Check if the image is larger than the bounds in either dimension.
if image_size.width > bounds.size.width || image_size.height > bounds.size.height {
// If the image is larger, use the same logic as Contain to scale it down.
let new_size = if bounds_ratio > image_ratio {
size(
image_size.width * (bounds.size.height / image_size.height),
bounds.size.height,
)
} else {
size(
bounds.size.width,
image_size.height * (bounds.size.width / image_size.width),
)
};
Bounds {
origin: point(
bounds.origin.x + (bounds.size.width - new_size.width) / 2.0,
bounds.origin.y + (bounds.size.height - new_size.height) / 2.0,
),
size: new_size,
}
} else {
// If the image is smaller than or equal to the container, display it at its original size,
// centered within the container.
let original_size = size(image_size.width, image_size.height);
Bounds {
origin: point(
bounds.origin.x + (bounds.size.width - original_size.width) / 2.0,
bounds.origin.y + (bounds.size.height - original_size.height) / 2.0,
),
size: original_size,
}
}
}
ObjectFit::Cover => {
let new_size = if bounds_ratio > image_ratio {
size(
bounds.size.width,
image_size.height * (bounds.size.width / image_size.width),
)
} else {
size(
image_size.width * (bounds.size.height / image_size.height),
bounds.size.height,
)
};
Bounds {
origin: point(
bounds.origin.x + (bounds.size.width - new_size.width) / 2.0,
bounds.origin.y + (bounds.size.height - new_size.height) / 2.0,
),
size: new_size,
}
}
ObjectFit::None => Bounds {
origin: bounds.origin,
size: image_size,
},
};
result_bounds
}
}
impl Img {
/// A list of all format extensions currently supported by this img element
pub fn extensions() -> &'static [&'static str] {
@ -291,7 +165,7 @@ impl Element for Img {
let layout_id = self
.interactivity
.request_layout(global_id, cx, |mut style, cx| {
if let Some(data) = self.source.data(cx) {
if let Some(data) = self.source.use_data(cx) {
if let Some(state) = &mut state {
let frame_count = data.frame_count();
if frame_count > 1 {
@ -363,7 +237,7 @@ impl Element for Img {
.paint(global_id, bounds, hitbox.as_ref(), cx, |style, cx| {
let corner_radii = style.corner_radii.to_pixels(bounds.size, cx.rem_size());
if let Some(data) = source.data(cx) {
if let Some(data) = source.use_data(cx) {
let new_bounds = self.object_fit.get_bounds(bounds, data.size(*frame_index));
cx.paint_image(
new_bounds,
@ -374,17 +248,6 @@ impl Element for Img {
)
.log_err();
}
match source {
#[cfg(target_os = "macos")]
ImageSource::Surface(surface) => {
let size = size(surface.width().into(), surface.height().into());
let new_bounds = self.object_fit.get_bounds(bounds, size);
// TODO: Add support for corner_radii and grayscale.
cx.paint_surface(new_bounds, surface);
}
_ => {}
}
})
}
}
@ -410,39 +273,74 @@ impl InteractiveElement for Img {
}
impl ImageSource {
fn data(&self, cx: &mut WindowContext) -> Option<Arc<ImageData>> {
pub(crate) fn use_data(&self, cx: &mut WindowContext) -> Option<Arc<RenderImage>> {
match self {
ImageSource::Uri(_) | ImageSource::Asset(_) | ImageSource::File(_) => {
ImageSource::Uri(_) | ImageSource::Embedded(_) | ImageSource::File(_) => {
let uri_or_path: UriOrPath = match self {
ImageSource::Uri(uri) => uri.clone().into(),
ImageSource::File(path) => path.clone().into(),
ImageSource::Asset(path) => UriOrPath::Asset(path.clone()),
ImageSource::Embedded(path) => UriOrPath::Embedded(path.clone()),
_ => unreachable!(),
};
cx.use_cached_asset::<Image>(&uri_or_path)?.log_err()
cx.use_asset::<ImageAsset>(&uri_or_path)?.log_err()
}
ImageSource::Data(data) => Some(data.to_owned()),
#[cfg(target_os = "macos")]
ImageSource::Surface(_) => None,
ImageSource::Render(data) => Some(data.to_owned()),
ImageSource::Image(data) => cx.use_asset::<ImageDecoder>(data)?.log_err(),
}
}
/// Fetch the data associated with this source, using GPUI's asset caching
pub async fn data(&self, cx: &mut AppContext) -> Option<Arc<RenderImage>> {
match self {
ImageSource::Uri(_) | ImageSource::Embedded(_) | ImageSource::File(_) => {
let uri_or_path: UriOrPath = match self {
ImageSource::Uri(uri) => uri.clone().into(),
ImageSource::File(path) => path.clone().into(),
ImageSource::Embedded(path) => UriOrPath::Embedded(path.clone()),
_ => unreachable!(),
};
cx.fetch_asset::<ImageAsset>(&uri_or_path).0.await.log_err()
}
ImageSource::Render(data) => Some(data.to_owned()),
ImageSource::Image(data) => cx.fetch_asset::<ImageDecoder>(data).0.await.log_err(),
}
}
}
#[derive(Clone)]
enum Image {}
enum ImageDecoder {}
impl Asset for Image {
type Source = UriOrPath;
type Output = Result<Arc<ImageData>, ImageCacheError>;
impl Asset for ImageDecoder {
type Source = Arc<Image>;
type Output = Result<Arc<RenderImage>, Arc<anyhow::Error>>;
fn load(
source: Self::Source,
cx: &mut WindowContext,
cx: &mut AppContext,
) -> impl Future<Output = Self::Output> + Send + 'static {
let result = source.to_image_data(cx).map_err(Arc::new);
async { result }
}
}
#[derive(Clone)]
enum ImageAsset {}
impl Asset for ImageAsset {
type Source = UriOrPath;
type Output = Result<Arc<RenderImage>, ImageCacheError>;
fn load(
source: Self::Source,
cx: &mut AppContext,
) -> impl Future<Output = Self::Output> + Send + 'static {
let client = cx.http_client();
let scale_factor = cx.scale_factor();
// TODO: Can we make SVGs always rescale?
// let scale_factor = cx.scale_factor();
let svg_renderer = cx.svg_renderer();
let asset_source = cx.asset_source().clone();
async move {
@ -461,7 +359,7 @@ impl Asset for Image {
}
body
}
UriOrPath::Asset(path) => {
UriOrPath::Embedded(path) => {
let data = asset_source.load(&path).ok().flatten();
if let Some(data) = data {
data.to_vec()
@ -503,15 +401,16 @@ impl Asset for Image {
}
};
ImageData::new(data)
RenderImage::new(data)
} else {
let pixmap =
svg_renderer.render_pixmap(&bytes, SvgSize::ScaleFactor(scale_factor))?;
// TODO: Can we make svgs always rescale?
svg_renderer.render_pixmap(&bytes, SvgSize::ScaleFactor(1.0))?;
let buffer =
ImageBuffer::from_raw(pixmap.width(), pixmap.height(), pixmap.take()).unwrap();
ImageData::new(SmallVec::from_elem(Frame::new(buffer), 1))
RenderImage::new(SmallVec::from_elem(Frame::new(buffer), 1))
};
Ok(Arc::new(data))

View file

@ -5,6 +5,7 @@ mod deferred;
mod div;
mod img;
mod list;
mod surface;
mod svg;
mod text;
mod uniform_list;
@ -16,6 +17,7 @@ pub use deferred::*;
pub use div::*;
pub use img::*;
pub use list::*;
pub use surface::*;
pub use svg::*;
pub use text::*;
pub use uniform_list::*;

View file

@ -0,0 +1,111 @@
use crate::{
Bounds, Element, ElementId, GlobalElementId, IntoElement, LayoutId, ObjectFit, Pixels, Style,
StyleRefinement, Styled, WindowContext,
};
#[cfg(target_os = "macos")]
use media::core_video::CVImageBuffer;
use refineable::Refineable;
/// A source of a surface's content.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum SurfaceSource {
/// A macOS image buffer from CoreVideo
#[cfg(target_os = "macos")]
Surface(CVImageBuffer),
}
#[cfg(target_os = "macos")]
impl From<CVImageBuffer> for SurfaceSource {
fn from(value: CVImageBuffer) -> Self {
SurfaceSource::Surface(value)
}
}
/// A surface element.
pub struct Surface {
source: SurfaceSource,
object_fit: ObjectFit,
style: StyleRefinement,
}
/// Create a new surface element.
pub fn surface(source: impl Into<SurfaceSource>) -> Surface {
Surface {
source: source.into(),
object_fit: ObjectFit::Contain,
style: Default::default(),
}
}
impl Surface {
/// Set the object fit for the image.
pub fn object_fit(mut self, object_fit: ObjectFit) -> Self {
self.object_fit = object_fit;
self
}
}
impl Element for Surface {
type RequestLayoutState = ();
type PrepaintState = ();
fn id(&self) -> Option<ElementId> {
None
}
fn request_layout(
&mut self,
_global_id: Option<&GlobalElementId>,
cx: &mut WindowContext,
) -> (LayoutId, Self::RequestLayoutState) {
let mut style = Style::default();
style.refine(&self.style);
let layout_id = cx.request_layout(style, []);
(layout_id, ())
}
fn prepaint(
&mut self,
_global_id: Option<&GlobalElementId>,
_bounds: Bounds<Pixels>,
_request_layout: &mut Self::RequestLayoutState,
_cx: &mut WindowContext,
) -> Self::PrepaintState {
()
}
fn paint(
&mut self,
_global_id: Option<&GlobalElementId>,
#[cfg_attr(not(target_os = "macos"), allow(unused_variables))] bounds: Bounds<Pixels>,
_: &mut Self::RequestLayoutState,
_: &mut Self::PrepaintState,
#[cfg_attr(not(target_os = "macos"), allow(unused_variables))] cx: &mut WindowContext,
) {
match &self.source {
#[cfg(target_os = "macos")]
SurfaceSource::Surface(surface) => {
let size = crate::size(surface.width().into(), surface.height().into());
let new_bounds = self.object_fit.get_bounds(bounds, size);
// TODO: Add support for corner_radii
cx.paint_surface(new_bounds, surface.clone());
}
#[allow(unreachable_patterns)]
_ => {}
}
}
}
impl IntoElement for Surface {
type Element = Self;
fn into_element(self) -> Self::Element {
self
}
}
impl Styled for Surface {
fn style(&mut self) -> &mut StyleRefinement {
&mut self.style
}
}

View file

@ -2447,10 +2447,24 @@ impl From<usize> for Pixels {
/// affected by the device's scale factor, `DevicePixels` always correspond to real pixels on the
/// display.
#[derive(
Add, AddAssign, Clone, Copy, Default, Div, Eq, Hash, Ord, PartialEq, PartialOrd, Sub, SubAssign,
Add,
AddAssign,
Clone,
Copy,
Default,
Div,
Eq,
Hash,
Ord,
PartialEq,
PartialOrd,
Sub,
SubAssign,
Serialize,
Deserialize,
)]
#[repr(transparent)]
pub struct DevicePixels(pub(crate) i32);
pub struct DevicePixels(pub i32);
impl DevicePixels {
/// Converts the `DevicePixels` value to the number of bytes needed to represent it in memory.

View file

@ -20,21 +20,25 @@ mod test;
mod windows;
use crate::{
point, Action, AnyWindowHandle, AsyncWindowContext, BackgroundExecutor, Bounds, DevicePixels,
DispatchEventResult, Font, FontId, FontMetrics, FontRun, ForegroundExecutor, GPUSpecs, GlyphId,
Keymap, LineLayout, Pixels, PlatformInput, Point, RenderGlyphParams, RenderImageParams,
RenderSvgParams, Scene, SharedString, Size, Task, TaskLabel, WindowContext,
DEFAULT_WINDOW_SIZE,
point, Action, AnyWindowHandle, AppContext, AsyncWindowContext, BackgroundExecutor, Bounds,
DevicePixels, DispatchEventResult, Font, FontId, FontMetrics, FontRun, ForegroundExecutor,
GPUSpecs, GlyphId, ImageSource, Keymap, LineLayout, Pixels, PlatformInput, Point,
RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams, Scene, SharedString, Size,
SvgSize, Task, TaskLabel, WindowContext, DEFAULT_WINDOW_SIZE,
};
use anyhow::Result;
use async_task::Runnable;
use futures::channel::oneshot;
use image::codecs::gif::GifDecoder;
use image::{AnimationDecoder as _, Frame};
use parking::Unparker;
use raw_window_handle::{HasDisplayHandle, HasWindowHandle};
use seahash::SeaHasher;
use serde::{Deserialize, Serialize};
use smallvec::SmallVec;
use std::borrow::Cow;
use std::hash::{Hash, Hasher};
use std::io::Cursor;
use std::time::{Duration, Instant};
use std::{
fmt::{self, Debug},
@ -43,6 +47,7 @@ use std::{
rc::Rc,
sync::Arc,
};
use strum::EnumIter;
use uuid::Uuid;
pub use app_menu::*;
@ -969,12 +974,210 @@ impl Default for CursorStyle {
/// A clipboard item that should be copied to the clipboard
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct ClipboardItem {
entries: Vec<ClipboardEntry>,
}
/// Either a ClipboardString or a ClipboardImage
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum ClipboardEntry {
/// A string entry
String(ClipboardString),
/// An image entry
Image(Image),
}
impl ClipboardItem {
/// Create a new ClipboardItem::String with no associated metadata
pub fn new_string(text: String) -> Self {
Self {
entries: vec![ClipboardEntry::String(ClipboardString::new(text))],
}
}
/// Create a new ClipboardItem::String with the given text and associated metadata
pub fn new_string_with_metadata(text: String, metadata: String) -> Self {
Self {
entries: vec![ClipboardEntry::String(ClipboardString {
text,
metadata: Some(metadata),
})],
}
}
/// Create a new ClipboardItem::String with the given text and associated metadata
pub fn new_string_with_json_metadata<T: Serialize>(text: String, metadata: T) -> Self {
Self {
entries: vec![ClipboardEntry::String(
ClipboardString::new(text).with_json_metadata(metadata),
)],
}
}
/// Concatenates together all the ClipboardString entries in the item.
/// Returns None if there were no ClipboardString entries.
pub fn text(&self) -> Option<String> {
let mut answer = String::new();
let mut any_entries = false;
for entry in self.entries.iter() {
if let ClipboardEntry::String(ClipboardString { text, metadata: _ }) = entry {
answer.push_str(text);
any_entries = true;
}
}
if any_entries {
Some(answer)
} else {
None
}
}
/// If this item is one ClipboardEntry::String, returns its metadata.
#[cfg_attr(not(target_os = "windows"), allow(dead_code))]
pub fn metadata(&self) -> Option<&String> {
match self.entries().first() {
Some(ClipboardEntry::String(clipboard_string)) if self.entries.len() == 1 => {
clipboard_string.metadata.as_ref()
}
_ => None,
}
}
/// Get the item's entries
pub fn entries(&self) -> &[ClipboardEntry] {
&self.entries
}
/// Get owned versions of the item's entries
pub fn into_entries(self) -> impl Iterator<Item = ClipboardEntry> {
self.entries.into_iter()
}
}
/// One of the editor's supported image formats (e.g. PNG, JPEG) - used when dealing with images in the clipboard
#[derive(Clone, Copy, Debug, Eq, PartialEq, EnumIter, Hash)]
pub enum ImageFormat {
// Sorted from most to least likely to be pasted into an editor,
// which matters when we iterate through them trying to see if
// clipboard content matches them.
/// .png
Png,
/// .jpeg or .jpg
Jpeg,
/// .webp
Webp,
/// .gif
Gif,
/// .svg
Svg,
/// .bmp
Bmp,
/// .tif or .tiff
Tiff,
}
/// An image, with a format and certain bytes
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Image {
/// The image format the bytes represent (e.g. PNG)
format: ImageFormat,
/// The raw image bytes
bytes: Vec<u8>,
id: u64,
}
impl Hash for Image {
fn hash<H: Hasher>(&self, state: &mut H) {
state.write_u64(self.id);
}
}
impl Image {
/// Get this image's ID
pub fn id(&self) -> u64 {
self.id
}
/// Use the GPUI `use_asset` API to make this image renderable
pub fn use_render_image(self: Arc<Self>, cx: &mut WindowContext) -> Option<Arc<RenderImage>> {
ImageSource::Image(self).use_data(cx)
}
/// Convert the clipboard image to an `ImageData` object.
pub fn to_image_data(&self, cx: &AppContext) -> Result<Arc<RenderImage>> {
fn frames_for_image(
bytes: &[u8],
format: image::ImageFormat,
) -> Result<SmallVec<[Frame; 1]>> {
let mut data = image::load_from_memory_with_format(bytes, format)?.into_rgba8();
// Convert from RGBA to BGRA.
for pixel in data.chunks_exact_mut(4) {
pixel.swap(0, 2);
}
Ok(SmallVec::from_elem(Frame::new(data), 1))
}
let frames = match self.format {
ImageFormat::Gif => {
let decoder = GifDecoder::new(Cursor::new(&self.bytes))?;
let mut frames = SmallVec::new();
for frame in decoder.into_frames() {
let mut frame = frame?;
// Convert from RGBA to BGRA.
for pixel in frame.buffer_mut().chunks_exact_mut(4) {
pixel.swap(0, 2);
}
frames.push(frame);
}
frames
}
ImageFormat::Png => frames_for_image(&self.bytes, image::ImageFormat::Png)?,
ImageFormat::Jpeg => frames_for_image(&self.bytes, image::ImageFormat::Jpeg)?,
ImageFormat::Webp => frames_for_image(&self.bytes, image::ImageFormat::WebP)?,
ImageFormat::Bmp => frames_for_image(&self.bytes, image::ImageFormat::Bmp)?,
ImageFormat::Tiff => frames_for_image(&self.bytes, image::ImageFormat::Tiff)?,
ImageFormat::Svg => {
// TODO: Fix this
let pixmap = cx
.svg_renderer()
.render_pixmap(&self.bytes, SvgSize::ScaleFactor(1.0))?;
let buffer =
image::ImageBuffer::from_raw(pixmap.width(), pixmap.height(), pixmap.take())
.unwrap();
SmallVec::from_elem(Frame::new(buffer), 1)
}
};
Ok(Arc::new(RenderImage::new(frames)))
}
/// Get the format of the clipboard image
pub fn format(&self) -> ImageFormat {
self.format
}
/// Get the raw bytes of the clipboard image
pub fn bytes(&self) -> &[u8] {
self.bytes.as_slice()
}
}
/// A clipboard item that should be copied to the clipboard
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct ClipboardString {
pub(crate) text: String,
pub(crate) metadata: Option<String>,
}
impl ClipboardItem {
/// Create a new clipboard item with the given text
impl ClipboardString {
/// Create a new clipboard string with the given text
pub fn new(text: String) -> Self {
Self {
text,
@ -982,19 +1185,25 @@ impl ClipboardItem {
}
}
/// Create a new clipboard item with the given text and metadata
pub fn with_metadata<T: Serialize>(mut self, metadata: T) -> Self {
/// Return a new clipboard item with the metadata replaced by the given metadata,
/// after serializing it as JSON.
pub fn with_json_metadata<T: Serialize>(mut self, metadata: T) -> Self {
self.metadata = Some(serde_json::to_string(&metadata).unwrap());
self
}
/// Get the text of the clipboard item
/// Get the text of the clipboard string
pub fn text(&self) -> &String {
&self.text
}
/// Get the metadata of the clipboard item
pub fn metadata<T>(&self) -> Option<T>
/// Get the owned text of the clipboard string
pub fn into_text(self) -> String {
self.text
}
/// Get the metadata of the clipboard string, formatted as JSON
pub fn metadata_json<T>(&self) -> Option<T>
where
T: for<'a> Deserialize<'a>,
{

View file

@ -112,14 +112,18 @@ impl Clipboard {
}
pub fn send(&self, _mime_type: String, fd: OwnedFd) {
if let Some(contents) = &self.contents {
self.send_internal(fd, contents.text.as_bytes().to_owned());
if let Some(text) = self.contents.as_ref().and_then(|contents| contents.text()) {
self.send_internal(fd, text.as_bytes().to_owned());
}
}
pub fn send_primary(&self, _mime_type: String, fd: OwnedFd) {
if let Some(primary_contents) = &self.primary_contents {
self.send_internal(fd, primary_contents.text.as_bytes().to_owned());
if let Some(text) = self
.primary_contents
.as_ref()
.and_then(|contents| contents.text())
{
self.send_internal(fd, text.as_bytes().to_owned());
}
}
@ -145,7 +149,7 @@ impl Clipboard {
match unsafe { read_fd(fd) } {
Ok(v) => {
self.cached_read = Some(ClipboardItem::new(v));
self.cached_read = Some(ClipboardItem::new_string(v));
self.cached_read.clone()
}
Err(err) => {
@ -177,7 +181,7 @@ impl Clipboard {
match unsafe { read_fd(fd) } {
Ok(v) => {
self.cached_primary_read = Some(ClipboardItem::new(v.clone()));
self.cached_primary_read = Some(ClipboardItem::new_string(v.clone()));
self.cached_primary_read.clone()
}
Err(err) => {

View file

@ -1259,7 +1259,7 @@ impl LinuxClient for X11Client {
.store(
state.clipboard.setter.atoms.primary,
state.clipboard.setter.atoms.utf8_string,
item.text().as_bytes(),
item.text().unwrap_or_default().as_bytes(),
)
.ok();
}
@ -1271,7 +1271,7 @@ impl LinuxClient for X11Client {
.store(
state.clipboard.setter.atoms.clipboard,
state.clipboard.setter.atoms.utf8_string,
item.text().as_bytes(),
item.text().unwrap_or_default().as_bytes(),
)
.ok();
state.clipboard_item.replace(item);
@ -1287,10 +1287,7 @@ impl LinuxClient for X11Client {
state.clipboard.getter.atoms.property,
Duration::from_secs(3),
)
.map(|text| crate::ClipboardItem {
text: String::from_utf8(text).unwrap(),
metadata: None,
})
.map(|text| crate::ClipboardItem::new_string(String::from_utf8(text).unwrap()))
.ok()
}
@ -1318,10 +1315,7 @@ impl LinuxClient for X11Client {
state.clipboard.getter.atoms.property,
Duration::from_secs(3),
)
.map(|text| crate::ClipboardItem {
text: String::from_utf8(text).unwrap(),
metadata: None,
})
.map(|text| crate::ClipboardItem::new_string(String::from_utf8(text).unwrap()))
.ok()
}

View file

@ -16,6 +16,7 @@ use metal_renderer as renderer;
#[cfg(feature = "macos-blade")]
use crate::platform::blade as renderer;
mod attributed_string;
mod open_type;
mod platform;
mod text_system;

View file

@ -0,0 +1,122 @@
use cocoa::base::id;
use cocoa::foundation::NSRange;
use objc::{class, msg_send, sel, sel_impl};
/// The `cocoa` crate does not define NSAttributedString (and related Cocoa classes),
/// which are needed for copying rich text (that is, text intermingled with images)
/// to the clipboard. This adds access to those APIs.
#[allow(non_snake_case)]
pub trait NSAttributedString: Sized {
unsafe fn alloc(_: Self) -> id {
msg_send![class!(NSAttributedString), alloc]
}
unsafe fn init_attributed_string(self, string: id) -> id;
unsafe fn appendAttributedString_(self, attr_string: id);
unsafe fn RTFDFromRange_documentAttributes_(self, range: NSRange, attrs: id) -> id;
unsafe fn RTFFromRange_documentAttributes_(self, range: NSRange, attrs: id) -> id;
unsafe fn string(self) -> id;
}
impl NSAttributedString for id {
unsafe fn init_attributed_string(self, string: id) -> id {
msg_send![self, initWithString: string]
}
unsafe fn appendAttributedString_(self, attr_string: id) {
let _: () = msg_send![self, appendAttributedString: attr_string];
}
unsafe fn RTFDFromRange_documentAttributes_(self, range: NSRange, attrs: id) -> id {
msg_send![self, RTFDFromRange: range documentAttributes: attrs]
}
unsafe fn RTFFromRange_documentAttributes_(self, range: NSRange, attrs: id) -> id {
msg_send![self, RTFFromRange: range documentAttributes: attrs]
}
unsafe fn string(self) -> id {
msg_send![self, string]
}
}
pub trait NSMutableAttributedString: NSAttributedString {
unsafe fn alloc(_: Self) -> id {
msg_send![class!(NSMutableAttributedString), alloc]
}
}
impl NSMutableAttributedString for id {}
#[cfg(test)]
mod tests {
use super::*;
use cocoa::appkit::NSImage;
use cocoa::base::nil;
use cocoa::foundation::NSString;
#[test]
#[ignore] // This was SIGSEGV-ing on CI but not locally; need to investigate https://github.com/zed-industries/zed/actions/runs/10362363230/job/28684225486?pr=15782#step:4:1348
fn test_nsattributed_string() {
// TODO move these to parent module once it's actually ready to be used
#[allow(non_snake_case)]
pub trait NSTextAttachment: Sized {
unsafe fn alloc(_: Self) -> id {
msg_send![class!(NSTextAttachment), alloc]
}
}
impl NSTextAttachment for id {}
unsafe {
let image: id = msg_send![class!(NSImage), alloc];
image.initWithContentsOfFile_(
NSString::alloc(nil).init_str("/Users/rtfeldman/Downloads/test.jpeg"),
);
let _size = image.size();
let string = NSString::alloc(nil).init_str("Test String");
let attr_string = NSMutableAttributedString::alloc(nil).init_attributed_string(string);
let hello_string = NSString::alloc(nil).init_str("Hello World");
let hello_attr_string =
NSAttributedString::alloc(nil).init_attributed_string(hello_string);
attr_string.appendAttributedString_(hello_attr_string);
let attachment = NSTextAttachment::alloc(nil);
let _: () = msg_send![attachment, setImage: image];
let image_attr_string =
msg_send![class!(NSAttributedString), attributedStringWithAttachment: attachment];
attr_string.appendAttributedString_(image_attr_string);
let another_string = NSString::alloc(nil).init_str("Another String");
let another_attr_string =
NSAttributedString::alloc(nil).init_attributed_string(another_string);
attr_string.appendAttributedString_(another_attr_string);
let _len: cocoa::foundation::NSUInteger = msg_send![attr_string, length];
///////////////////////////////////////////////////
// pasteboard.clearContents();
let rtfd_data = attr_string.RTFDFromRange_documentAttributes_(
NSRange::new(0, msg_send![attr_string, length]),
nil,
);
assert_ne!(rtfd_data, nil);
// if rtfd_data != nil {
// pasteboard.setData_forType(rtfd_data, NSPasteboardTypeRTFD);
// }
// let rtf_data = attributed_string.RTFFromRange_documentAttributes_(
// NSRange::new(0, attributed_string.length()),
// nil,
// );
// if rtf_data != nil {
// pasteboard.setData_forType(rtf_data, NSPasteboardTypeRTF);
// }
// let plain_text = attributed_string.string();
// pasteboard.setString_forType(plain_text, NSPasteboardTypeString);
}
}
}

View file

@ -1,8 +1,8 @@
use super::metal_atlas::MetalAtlas;
use crate::{
point, size, AtlasTextureId, AtlasTextureKind, AtlasTile, Bounds, ContentMask, DevicePixels,
Hsla, MonochromeSprite, Path, PathId, PathVertex, PolychromeSprite, PrimitiveBatch, Quad,
ScaledPixels, Scene, Shadow, Size, Surface, Underline,
Hsla, MonochromeSprite, PaintSurface, Path, PathId, PathVertex, PolychromeSprite,
PrimitiveBatch, Quad, ScaledPixels, Scene, Shadow, Size, Surface, Underline,
};
use anyhow::{anyhow, Result};
use block::ConcreteBlock;
@ -1020,7 +1020,7 @@ impl MetalRenderer {
fn draw_surfaces(
&mut self,
surfaces: &[Surface],
surfaces: &[PaintSurface],
instance_buffer: &mut InstanceBuffer,
instance_offset: &mut usize,
viewport_size: Size<DevicePixels>,

View file

@ -1,8 +1,13 @@
use super::{events::key_to_native, BoolExt};
use super::{
attributed_string::{NSAttributedString, NSMutableAttributedString},
events::key_to_native,
BoolExt,
};
use crate::{
Action, AnyWindowHandle, BackgroundExecutor, ClipboardItem, CursorStyle, ForegroundExecutor,
Keymap, MacDispatcher, MacDisplay, MacTextSystem, MacWindow, Menu, MenuItem, PathPromptOptions,
Platform, PlatformDisplay, PlatformTextSystem, PlatformWindow, Result, SemanticVersion, Task,
hash, Action, AnyWindowHandle, BackgroundExecutor, ClipboardEntry, ClipboardItem,
ClipboardString, CursorStyle, ForegroundExecutor, Image, ImageFormat, Keymap, MacDispatcher,
MacDisplay, MacTextSystem, MacWindow, Menu, MenuItem, PathPromptOptions, Platform,
PlatformDisplay, PlatformTextSystem, PlatformWindow, Result, SemanticVersion, Task,
WindowAppearance, WindowParams,
};
use anyhow::anyhow;
@ -11,16 +16,17 @@ use cocoa::{
appkit::{
NSApplication, NSApplicationActivationPolicy::NSApplicationActivationPolicyRegular,
NSEventModifierFlags, NSMenu, NSMenuItem, NSModalResponse, NSOpenPanel, NSPasteboard,
NSPasteboardTypeString, NSSavePanel, NSWindow,
NSPasteboardTypePNG, NSPasteboardTypeRTF, NSPasteboardTypeRTFD, NSPasteboardTypeString,
NSPasteboardTypeTIFF, NSSavePanel, NSWindow,
},
base::{id, nil, selector, BOOL, YES},
foundation::{
NSArray, NSAutoreleasePool, NSBundle, NSData, NSInteger, NSProcessInfo, NSString,
NSArray, NSAutoreleasePool, NSBundle, NSData, NSInteger, NSProcessInfo, NSRange, NSString,
NSUInteger, NSURL,
},
};
use core_foundation::{
base::{CFRelease, CFType, CFTypeRef, OSStatus, TCFType as _},
base::{CFRelease, CFType, CFTypeRef, OSStatus, TCFType},
boolean::CFBoolean,
data::CFData,
dictionary::{CFDictionary, CFDictionaryRef, CFMutableDictionary},
@ -50,6 +56,7 @@ use std::{
slice, str,
sync::Arc,
};
use strum::IntoEnumIterator;
use super::renderer;
@ -421,7 +428,7 @@ impl Platform for MacPlatform {
pool.drain();
(*app).set_ivar(MAC_PLATFORM_IVAR, null_mut::<c_void>());
(*app.delegate()).set_ivar(MAC_PLATFORM_IVAR, null_mut::<c_void>());
(*NSWindow::delegate(app)).set_ivar(MAC_PLATFORM_IVAR, null_mut::<c_void>());
}
}
@ -749,7 +756,7 @@ impl Platform for MacPlatform {
let app: id = msg_send![APP_CLASS, sharedApplication];
let mut state = self.0.lock();
let actions = &mut state.menu_actions;
app.setMainMenu_(self.create_menu_bar(menus, app.delegate(), actions, keymap));
app.setMainMenu_(self.create_menu_bar(menus, NSWindow::delegate(app), actions, keymap));
}
}
@ -758,7 +765,7 @@ impl Platform for MacPlatform {
let app: id = msg_send![APP_CLASS, sharedApplication];
let mut state = self.0.lock();
let actions = &mut state.menu_actions;
let new = self.create_dock_menu(menu, app.delegate(), actions, keymap);
let new = self.create_dock_menu(menu, NSWindow::delegate(app), actions, keymap);
if let Some(old) = state.dock_menu.replace(new) {
CFRelease(old as _)
}
@ -851,79 +858,115 @@ impl Platform for MacPlatform {
}
fn write_to_clipboard(&self, item: ClipboardItem) {
let state = self.0.lock();
use crate::ClipboardEntry;
unsafe {
state.pasteboard.clearContents();
// We only want to use NSAttributedString if there are multiple entries to write.
if item.entries.len() <= 1 {
match item.entries.first() {
Some(entry) => match entry {
ClipboardEntry::String(string) => {
self.write_plaintext_to_clipboard(string);
}
ClipboardEntry::Image(image) => {
self.write_image_to_clipboard(image);
}
},
None => {
// Writing an empty list of entries just clears the clipboard.
let state = self.0.lock();
state.pasteboard.clearContents();
}
}
} else {
let mut any_images = false;
let attributed_string = {
let mut buf = NSMutableAttributedString::alloc(nil)
// TODO can we skip this? Or at least part of it?
.init_attributed_string(NSString::alloc(nil).init_str(""));
let text_bytes = NSData::dataWithBytes_length_(
nil,
item.text.as_ptr() as *const c_void,
item.text.len() as u64,
);
state
.pasteboard
.setData_forType(text_bytes, NSPasteboardTypeString);
for entry in item.entries {
if let ClipboardEntry::String(ClipboardString { text, metadata: _ }) = entry
{
let to_append = NSAttributedString::alloc(nil)
.init_attributed_string(NSString::alloc(nil).init_str(&text));
if let Some(metadata) = item.metadata.as_ref() {
let hash_bytes = ClipboardItem::text_hash(&item.text).to_be_bytes();
let hash_bytes = NSData::dataWithBytes_length_(
nil,
hash_bytes.as_ptr() as *const c_void,
hash_bytes.len() as u64,
);
buf.appendAttributedString_(to_append);
}
}
buf
};
let state = self.0.lock();
state.pasteboard.clearContents();
// Only set rich text clipboard types if we actually have 1+ images to include.
if any_images {
let rtfd_data = attributed_string.RTFDFromRange_documentAttributes_(
NSRange::new(0, msg_send![attributed_string, length]),
nil,
);
if rtfd_data != nil {
state
.pasteboard
.setData_forType(rtfd_data, NSPasteboardTypeRTFD);
}
let rtf_data = attributed_string.RTFFromRange_documentAttributes_(
NSRange::new(0, attributed_string.length()),
nil,
);
if rtf_data != nil {
state
.pasteboard
.setData_forType(rtf_data, NSPasteboardTypeRTF);
}
}
let plain_text = attributed_string.string();
state
.pasteboard
.setData_forType(hash_bytes, state.text_hash_pasteboard_type);
let metadata_bytes = NSData::dataWithBytes_length_(
nil,
metadata.as_ptr() as *const c_void,
metadata.len() as u64,
);
state
.pasteboard
.setData_forType(metadata_bytes, state.metadata_pasteboard_type);
.setString_forType(plain_text, NSPasteboardTypeString);
}
}
}
fn read_from_clipboard(&self) -> Option<ClipboardItem> {
let state = self.0.lock();
unsafe {
if let Some(text_bytes) =
self.read_from_pasteboard(state.pasteboard, NSPasteboardTypeString)
{
let text = String::from_utf8_lossy(text_bytes).to_string();
let hash_bytes = self
.read_from_pasteboard(state.pasteboard, state.text_hash_pasteboard_type)
.and_then(|bytes| bytes.try_into().ok())
.map(u64::from_be_bytes);
let metadata_bytes = self
.read_from_pasteboard(state.pasteboard, state.metadata_pasteboard_type)
.and_then(|bytes| String::from_utf8(bytes.to_vec()).ok());
let pasteboard = state.pasteboard;
if let Some((hash, metadata)) = hash_bytes.zip(metadata_bytes) {
if hash == ClipboardItem::text_hash(&text) {
Some(ClipboardItem {
text,
metadata: Some(metadata),
})
} else {
Some(ClipboardItem {
text,
metadata: None,
})
}
// First, see if it's a string.
unsafe {
let types: id = pasteboard.types();
let string_type: id = ns_string("public.utf8-plain-text");
if msg_send![types, containsObject: string_type] {
let data = pasteboard.dataForType(string_type);
if data == nil {
return None;
} else if data.bytes().is_null() {
// https://developer.apple.com/documentation/foundation/nsdata/1410616-bytes?language=objc
// "If the length of the NSData object is 0, this property returns nil."
return Some(self.read_string_from_clipboard(&state, &[]));
} else {
Some(ClipboardItem {
text,
metadata: None,
})
let bytes =
slice::from_raw_parts(data.bytes() as *mut u8, data.length() as usize);
return Some(self.read_string_from_clipboard(&state, bytes));
}
}
// If it wasn't a string, try the various supported image types.
for format in ImageFormat::iter() {
if let Some(item) = try_clipboard_image(pasteboard, format) {
return Some(item);
}
} else {
None
}
}
// If it wasn't a string or a supported image type, give up.
None
}
fn write_credentials(&self, url: &str, username: &str, password: &[u8]) -> Task<Result<()>> {
@ -1038,6 +1081,110 @@ impl Platform for MacPlatform {
}
}
impl MacPlatform {
unsafe fn read_string_from_clipboard(
&self,
state: &MacPlatformState,
text_bytes: &[u8],
) -> ClipboardItem {
let text = String::from_utf8_lossy(text_bytes).to_string();
let metadata = self
.read_from_pasteboard(state.pasteboard, state.text_hash_pasteboard_type)
.and_then(|hash_bytes| {
let hash_bytes = hash_bytes.try_into().ok()?;
let hash = u64::from_be_bytes(hash_bytes);
let metadata =
self.read_from_pasteboard(state.pasteboard, state.metadata_pasteboard_type)?;
if hash == ClipboardString::text_hash(&text) {
String::from_utf8(metadata.to_vec()).ok()
} else {
None
}
});
ClipboardItem {
entries: vec![ClipboardEntry::String(ClipboardString { text, metadata })],
}
}
unsafe fn write_plaintext_to_clipboard(&self, string: &ClipboardString) {
let state = self.0.lock();
state.pasteboard.clearContents();
let text_bytes = NSData::dataWithBytes_length_(
nil,
string.text.as_ptr() as *const c_void,
string.text.len() as u64,
);
state
.pasteboard
.setData_forType(text_bytes, NSPasteboardTypeString);
if let Some(metadata) = string.metadata.as_ref() {
let hash_bytes = ClipboardString::text_hash(&string.text).to_be_bytes();
let hash_bytes = NSData::dataWithBytes_length_(
nil,
hash_bytes.as_ptr() as *const c_void,
hash_bytes.len() as u64,
);
state
.pasteboard
.setData_forType(hash_bytes, state.text_hash_pasteboard_type);
let metadata_bytes = NSData::dataWithBytes_length_(
nil,
metadata.as_ptr() as *const c_void,
metadata.len() as u64,
);
state
.pasteboard
.setData_forType(metadata_bytes, state.metadata_pasteboard_type);
}
}
unsafe fn write_image_to_clipboard(&self, image: &Image) {
let state = self.0.lock();
state.pasteboard.clearContents();
let bytes = NSData::dataWithBytes_length_(
nil,
image.bytes.as_ptr() as *const c_void,
image.bytes.len() as u64,
);
state
.pasteboard
.setData_forType(bytes, Into::<UTType>::into(image.format).inner_mut());
}
}
fn try_clipboard_image(pasteboard: id, format: ImageFormat) -> Option<ClipboardItem> {
let mut ut_type: UTType = format.into();
unsafe {
let types: id = pasteboard.types();
if msg_send![types, containsObject: ut_type.inner()] {
let data = pasteboard.dataForType(ut_type.inner_mut());
if data == nil {
None
} else {
let bytes = Vec::from(slice::from_raw_parts(
data.bytes() as *mut u8,
data.length() as usize,
));
let id = hash(&bytes);
Some(ClipboardItem {
entries: vec![ClipboardEntry::Image(Image { format, bytes, id })],
})
}
} else {
None
}
}
}
unsafe fn path_from_objc(path: id) -> PathBuf {
let len = msg_send![path, lengthOfBytesUsingEncoding: NSUTF8StringEncoding];
let bytes = path.UTF8String() as *const u8;
@ -1216,6 +1363,68 @@ mod security {
pub const errSecItemNotFound: OSStatus = -25300;
}
impl From<ImageFormat> for UTType {
fn from(value: ImageFormat) -> Self {
match value {
ImageFormat::Png => Self::png(),
ImageFormat::Jpeg => Self::jpeg(),
ImageFormat::Tiff => Self::tiff(),
ImageFormat::Webp => Self::webp(),
ImageFormat::Gif => Self::gif(),
ImageFormat::Bmp => Self::bmp(),
ImageFormat::Svg => Self::svg(),
}
}
}
// See https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/
struct UTType(id);
impl UTType {
pub fn png() -> Self {
// https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/png
Self(unsafe { NSPasteboardTypePNG }) // This is a rare case where there's a built-in NSPasteboardType
}
pub fn jpeg() -> Self {
// https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/jpeg
Self(unsafe { ns_string("public.jpeg") })
}
pub fn gif() -> Self {
// https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/gif
Self(unsafe { ns_string("com.compuserve.gif") })
}
pub fn webp() -> Self {
// https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/webp
Self(unsafe { ns_string("org.webmproject.webp") })
}
pub fn bmp() -> Self {
// https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/bmp
Self(unsafe { ns_string("com.microsoft.bmp") })
}
pub fn svg() -> Self {
// https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/svg
Self(unsafe { ns_string("public.svg-image") })
}
pub fn tiff() -> Self {
// https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/tiff
Self(unsafe { NSPasteboardTypeTIFF }) // This is a rare case where there's a built-in NSPasteboardType
}
fn inner(&self) -> *const Object {
self.0
}
fn inner_mut(&mut self) -> *mut Object {
self.0 as *mut _
}
}
#[cfg(test)]
mod tests {
use crate::ClipboardItem;
@ -1227,11 +1436,15 @@ mod tests {
let platform = build_platform();
assert_eq!(platform.read_from_clipboard(), None);
let item = ClipboardItem::new("1".to_string());
let item = ClipboardItem::new_string("1".to_string());
platform.write_to_clipboard(item.clone());
assert_eq!(platform.read_from_clipboard(), Some(item));
let item = ClipboardItem::new("2".to_string()).with_metadata(vec![3, 4]);
let item = ClipboardItem {
entries: vec![ClipboardEntry::String(
ClipboardString::new("2".to_string()).with_json_metadata(vec![3, 4]),
)],
};
platform.write_to_clipboard(item.clone());
assert_eq!(platform.read_from_clipboard(), Some(item));
@ -1250,7 +1463,7 @@ mod tests {
}
assert_eq!(
platform.read_from_clipboard(),
Some(ClipboardItem::new(text_from_other_app.to_string()))
Some(ClipboardItem::new_string(text_from_other_app.to_string()))
);
}

View file

@ -735,12 +735,17 @@ fn write_to_clipboard_inner(
unsafe {
OpenClipboard(None)?;
EmptyClipboard()?;
let encode_wide = item.text.encode_utf16().chain(Some(0)).collect_vec();
let encode_wide = item
.text()
.unwrap_or_default()
.encode_utf16()
.chain(Some(0))
.collect_vec();
set_data_to_clipboard(&encode_wide, CF_UNICODETEXT.0 as u32)?;
if let Some(ref metadata) = item.metadata {
if let Some((metadata, text)) = item.metadata().zip(item.text()) {
let hash_result = {
let hash = ClipboardItem::text_hash(&item.text);
let hash = ClipboardString::text_hash(&text);
hash.to_ne_bytes()
};
let encode_wide = std::slice::from_raw_parts(hash_result.as_ptr().cast::<u16>(), 4);
@ -778,20 +783,17 @@ fn read_from_clipboard_inner(hash_format: u32, metadata_format: u32) -> Result<C
let text = PCWSTR(handle.0 as *const u16);
String::from_utf16_lossy(text.as_wide())
};
let mut item = ClipboardItem {
text,
metadata: None,
};
let Some(hash) = read_hash_from_clipboard(hash_format) else {
return Ok(item);
return Ok(ClipboardItem::new_string(text));
};
let Some(metadata) = read_metadata_from_clipboard(metadata_format) else {
return Ok(item);
return Ok(ClipboardItem::new_string(text));
};
if hash == ClipboardItem::text_hash(&item.text) {
item.metadata = Some(metadata);
if hash == ClipboardString::text_hash(&text) {
Ok(ClipboardItem::new_string_with_metadata(text, metadata))
} else {
Ok(ClipboardItem::new_string(text))
}
Ok(item)
}
}
@ -826,15 +828,15 @@ mod tests {
#[test]
fn test_clipboard() {
let platform = WindowsPlatform::new();
let item = ClipboardItem::new("你好".to_string());
let item = ClipboardItem::new_string("你好".to_string());
platform.write_to_clipboard(item.clone());
assert_eq!(platform.read_from_clipboard(), Some(item));
let item = ClipboardItem::new("12345".to_string());
let item = ClipboardItem::new_string("12345".to_string());
platform.write_to_clipboard(item.clone());
assert_eq!(platform.read_from_clipboard(), Some(item));
let item = ClipboardItem::new("abcdef".to_string()).with_metadata(vec![3, 4]);
let item = ClipboardItem::new_string_with_json_metadata("abcdef".to_string(), vec![3, 4]);
platform.write_to_clipboard(item.clone());
assert_eq!(platform.read_from_clipboard(), Some(item));
}

View file

@ -23,7 +23,7 @@ pub(crate) struct Scene {
pub(crate) underlines: Vec<Underline>,
pub(crate) monochrome_sprites: Vec<MonochromeSprite>,
pub(crate) polychrome_sprites: Vec<PolychromeSprite>,
pub(crate) surfaces: Vec<Surface>,
pub(crate) surfaces: Vec<PaintSurface>,
}
impl Scene {
@ -183,7 +183,7 @@ pub(crate) enum Primitive {
Underline(Underline),
MonochromeSprite(MonochromeSprite),
PolychromeSprite(PolychromeSprite),
Surface(Surface),
Surface(PaintSurface),
}
impl Primitive {
@ -231,9 +231,9 @@ struct BatchIterator<'a> {
polychrome_sprites: &'a [PolychromeSprite],
polychrome_sprites_start: usize,
polychrome_sprites_iter: Peekable<slice::Iter<'a, PolychromeSprite>>,
surfaces: &'a [Surface],
surfaces: &'a [PaintSurface],
surfaces_start: usize,
surfaces_iter: Peekable<slice::Iter<'a, Surface>>,
surfaces_iter: Peekable<slice::Iter<'a, PaintSurface>>,
}
impl<'a> Iterator for BatchIterator<'a> {
@ -411,7 +411,7 @@ pub(crate) enum PrimitiveBatch<'a> {
texture_id: AtlasTextureId,
sprites: &'a [PolychromeSprite],
},
Surfaces(&'a [Surface]),
Surfaces(&'a [PaintSurface]),
}
#[derive(Default, Debug, Clone, Eq, PartialEq)]
@ -673,7 +673,7 @@ impl From<PolychromeSprite> for Primitive {
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub(crate) struct Surface {
pub(crate) struct PaintSurface {
pub order: DrawOrder,
pub bounds: Bounds<ScaledPixels>,
pub content_mask: ContentMask<ScaledPixels>,
@ -681,20 +681,20 @@ pub(crate) struct Surface {
pub image_buffer: media::core_video::CVImageBuffer,
}
impl Ord for Surface {
impl Ord for PaintSurface {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.order.cmp(&other.order)
}
}
impl PartialOrd for Surface {
impl PartialOrd for PaintSurface {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl From<Surface> for Primitive {
fn from(surface: Surface) -> Self {
impl From<PaintSurface> for Primitive {
fn from(surface: PaintSurface) -> Self {
Primitive::Surface(surface)
}
}

View file

@ -5,10 +5,10 @@ use std::{
};
use crate::{
black, phi, point, quad, rems, AbsoluteLength, Bounds, ContentMask, Corners, CornersRefinement,
CursorStyle, DefiniteLength, Edges, EdgesRefinement, Font, FontFallbacks, FontFeatures,
FontStyle, FontWeight, Hsla, Length, Pixels, Point, PointRefinement, Rgba, SharedString, Size,
SizeRefinement, Styled, TextRun, WindowContext,
black, phi, point, quad, rems, size, AbsoluteLength, Bounds, ContentMask, Corners,
CornersRefinement, CursorStyle, DefiniteLength, DevicePixels, Edges, EdgesRefinement, Font,
FontFallbacks, FontFeatures, FontStyle, FontWeight, Hsla, Length, Pixels, Point,
PointRefinement, Rgba, SharedString, Size, SizeRefinement, Styled, TextRun, WindowContext,
};
use collections::HashSet;
use refineable::Refineable;
@ -27,6 +27,121 @@ pub struct DebugBelow;
#[cfg(debug_assertions)]
impl crate::Global for DebugBelow {}
/// How to fit the image into the bounds of the element.
pub enum ObjectFit {
/// The image will be stretched to fill the bounds of the element.
Fill,
/// The image will be scaled to fit within the bounds of the element.
Contain,
/// The image will be scaled to cover the bounds of the element.
Cover,
/// The image will be scaled down to fit within the bounds of the element.
ScaleDown,
/// The image will maintain its original size.
None,
}
impl ObjectFit {
/// Get the bounds of the image within the given bounds.
pub fn get_bounds(
&self,
bounds: Bounds<Pixels>,
image_size: Size<DevicePixels>,
) -> Bounds<Pixels> {
let image_size = image_size.map(|dimension| Pixels::from(u32::from(dimension)));
let image_ratio = image_size.width / image_size.height;
let bounds_ratio = bounds.size.width / bounds.size.height;
let result_bounds = match self {
ObjectFit::Fill => bounds,
ObjectFit::Contain => {
let new_size = if bounds_ratio > image_ratio {
size(
image_size.width * (bounds.size.height / image_size.height),
bounds.size.height,
)
} else {
size(
bounds.size.width,
image_size.height * (bounds.size.width / image_size.width),
)
};
Bounds {
origin: point(
bounds.origin.x + (bounds.size.width - new_size.width) / 2.0,
bounds.origin.y + (bounds.size.height - new_size.height) / 2.0,
),
size: new_size,
}
}
ObjectFit::ScaleDown => {
// Check if the image is larger than the bounds in either dimension.
if image_size.width > bounds.size.width || image_size.height > bounds.size.height {
// If the image is larger, use the same logic as Contain to scale it down.
let new_size = if bounds_ratio > image_ratio {
size(
image_size.width * (bounds.size.height / image_size.height),
bounds.size.height,
)
} else {
size(
bounds.size.width,
image_size.height * (bounds.size.width / image_size.width),
)
};
Bounds {
origin: point(
bounds.origin.x + (bounds.size.width - new_size.width) / 2.0,
bounds.origin.y + (bounds.size.height - new_size.height) / 2.0,
),
size: new_size,
}
} else {
// If the image is smaller than or equal to the container, display it at its original size,
// centered within the container.
let original_size = size(image_size.width, image_size.height);
Bounds {
origin: point(
bounds.origin.x + (bounds.size.width - original_size.width) / 2.0,
bounds.origin.y + (bounds.size.height - original_size.height) / 2.0,
),
size: original_size,
}
}
}
ObjectFit::Cover => {
let new_size = if bounds_ratio > image_ratio {
size(
bounds.size.width,
image_size.height * (bounds.size.width / image_size.width),
)
} else {
size(
image_size.width * (bounds.size.height / image_size.height),
bounds.size.height,
)
};
Bounds {
origin: point(
bounds.origin.x + (bounds.size.width - new_size.width) / 2.0,
bounds.origin.y + (bounds.size.height - new_size.height) / 2.0,
),
size: new_size,
}
}
ObjectFit::None => Bounds {
origin: bounds.origin,
size: image_size,
},
};
result_bounds
}
}
/// The CSS styling that can be applied to an element via the `Styled` trait
#[derive(Clone, Refineable, Debug)]
#[refineable(Debug)]

View file

@ -1,26 +1,25 @@
use crate::{
hash, point, prelude::*, px, size, transparent_black, Action, AnyDrag, AnyElement, AnyTooltip,
point, prelude::*, px, size, transparent_black, Action, AnyDrag, AnyElement, AnyTooltip,
AnyView, AppContext, Arena, Asset, AsyncWindowContext, AvailableSpace, Bounds, BoxShadow,
Context, Corners, CursorStyle, Decorations, DevicePixels, DispatchActionListener,
DispatchNodeId, DispatchTree, DisplayId, Edges, Effect, Entity, EntityId, EventEmitter,
FileDropEvent, Flatten, FontId, GPUSpecs, Global, GlobalElementId, GlyphId, Hsla, ImageData,
InputHandler, IsZero, KeyBinding, KeyContext, KeyDownEvent, KeyEvent, Keystroke,
KeystrokeEvent, LayoutId, LineLayoutIndex, Model, ModelContext, Modifiers,
ModifiersChangedEvent, MonochromeSprite, MouseButton, MouseEvent, MouseMoveEvent, MouseUpEvent,
Path, Pixels, PlatformAtlas, PlatformDisplay, PlatformInput, PlatformInputHandler,
PlatformWindow, Point, PolychromeSprite, PromptLevel, Quad, Render, RenderGlyphParams,
RenderImageParams, RenderSvgParams, Replay, ResizeEdge, ScaledPixels, Scene, Shadow,
SharedString, Size, StrikethroughStyle, Style, SubscriberSet, Subscription, TaffyLayoutEngine,
Task, TextStyle, TextStyleRefinement, TransformationMatrix, Underline, UnderlineStyle, View,
VisualContext, WeakView, WindowAppearance, WindowBackgroundAppearance, WindowBounds,
WindowControls, WindowDecorations, WindowOptions, WindowParams, WindowTextSystem,
SUBPIXEL_VARIANTS,
FileDropEvent, Flatten, FontId, GPUSpecs, Global, GlobalElementId, GlyphId, Hsla, InputHandler,
IsZero, KeyBinding, KeyContext, KeyDownEvent, KeyEvent, Keystroke, KeystrokeEvent, LayoutId,
LineLayoutIndex, Model, ModelContext, Modifiers, ModifiersChangedEvent, MonochromeSprite,
MouseButton, MouseEvent, MouseMoveEvent, MouseUpEvent, Path, Pixels, PlatformAtlas,
PlatformDisplay, PlatformInput, PlatformInputHandler, PlatformWindow, Point, PolychromeSprite,
PromptLevel, Quad, Render, RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams,
Replay, ResizeEdge, ScaledPixels, Scene, Shadow, SharedString, Size, StrikethroughStyle, Style,
SubscriberSet, Subscription, TaffyLayoutEngine, Task, TextStyle, TextStyleRefinement,
TransformationMatrix, Underline, UnderlineStyle, View, VisualContext, WeakView,
WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowControls, WindowDecorations,
WindowOptions, WindowParams, WindowTextSystem, SUBPIXEL_VARIANTS,
};
use anyhow::{anyhow, Context as _, Result};
use collections::{FxHashMap, FxHashSet};
use derive_more::{Deref, DerefMut};
use futures::channel::oneshot;
use futures::{future::Shared, FutureExt};
use futures::FutureExt;
#[cfg(target_os = "macos")]
use media::core_video::CVImageBuffer;
use parking_lot::RwLock;
@ -1956,36 +1955,6 @@ impl<'a> WindowContext<'a> {
self.window.requested_autoscroll.take()
}
/// Remove an asset from GPUI's cache
pub fn remove_cached_asset<A: Asset + 'static>(
&mut self,
source: &A::Source,
) -> Option<A::Output> {
self.asset_cache.remove::<A>(source)
}
/// Asynchronously load an asset, if the asset hasn't finished loading this will return None.
/// Your view will be re-drawn once the asset has finished loading.
///
/// Note that the multiple calls to this method will only result in one `Asset::load` call.
/// The results of that call will be cached, and returned on subsequent uses of this API.
///
/// Use [Self::remove_cached_asset] to reload your asset.
pub fn use_cached_asset<A: Asset + 'static>(
&mut self,
source: &A::Source,
) -> Option<A::Output> {
self.asset_cache.get::<A>(source).or_else(|| {
if let Some(asset) = self.use_asset::<A>(source) {
self.asset_cache
.insert::<A>(source.to_owned(), asset.clone());
Some(asset)
} else {
None
}
})
}
/// Asynchronously load an asset, if the asset hasn't finished loading this will return None.
/// Your view will be re-drawn once the asset has finished loading.
///
@ -1994,19 +1963,7 @@ impl<'a> WindowContext<'a> {
///
/// This asset will not be cached by default, see [Self::use_cached_asset]
pub fn use_asset<A: Asset + 'static>(&mut self, source: &A::Source) -> Option<A::Output> {
let asset_id = (TypeId::of::<A>(), hash(source));
let mut is_first = false;
let task = self
.loading_assets
.remove(&asset_id)
.map(|boxed_task| *boxed_task.downcast::<Shared<Task<A::Output>>>().unwrap())
.unwrap_or_else(|| {
is_first = true;
let future = A::load(source.clone(), self);
let task = self.background_executor().spawn(future).shared();
task
});
let (task, is_first) = self.fetch_asset::<A>(source);
task.clone().now_or_never().or_else(|| {
if is_first {
let parent_id = self.parent_view_id();
@ -2027,12 +1984,9 @@ impl<'a> WindowContext<'a> {
.detach();
}
self.loading_assets.insert(asset_id, Box::new(task));
None
})
}
/// Obtain the current element offset. This method should only be called during the
/// prepaint phase of element drawing.
pub fn element_offset(&self) -> Point<Pixels> {
@ -2610,13 +2564,14 @@ impl<'a> WindowContext<'a> {
}
/// Paint an image into the scene for the next frame at the current z-index.
/// This method will panic if the frame_index is not valid
///
/// This method should only be called as part of the paint phase of element drawing.
pub fn paint_image(
&mut self,
bounds: Bounds<Pixels>,
corner_radii: Corners<Pixels>,
data: Arc<ImageData>,
data: Arc<RenderImage>,
frame_index: usize,
grayscale: bool,
) -> Result<()> {
@ -2639,7 +2594,10 @@ impl<'a> WindowContext<'a> {
.get_or_insert_with(&params.clone().into(), &mut || {
Ok(Some((
data.size(frame_index),
Cow::Borrowed(data.as_bytes(frame_index)),
Cow::Borrowed(
data.as_bytes(frame_index)
.expect("It's the caller's job to pass a valid frame index"),
),
)))
})?
.expect("Callback above only returns Some");
@ -2665,6 +2623,8 @@ impl<'a> WindowContext<'a> {
/// This method should only be called as part of the paint phase of element drawing.
#[cfg(target_os = "macos")]
pub fn paint_surface(&mut self, bounds: Bounds<Pixels>, image_buffer: CVImageBuffer) {
use crate::PaintSurface;
debug_assert_eq!(
self.window.draw_phase,
DrawPhase::Paint,
@ -2674,15 +2634,12 @@ impl<'a> WindowContext<'a> {
let scale_factor = self.scale_factor();
let bounds = bounds.scale(scale_factor);
let content_mask = self.content_mask().scale(scale_factor);
self.window
.next_frame
.scene
.insert_primitive(crate::Surface {
order: 0,
bounds,
content_mask,
image_buffer,
});
self.window.next_frame.scene.insert_primitive(PaintSurface {
order: 0,
bounds,
content_mask,
image_buffer,
});
}
#[must_use]