Detailed changes
@@ -52,7 +52,7 @@ version = "0.7.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9"
dependencies = [
- "getrandom 0.2.10",
+ "getrandom 0.2.15",
"once_cell",
"version_check",
]
@@ -65,7 +65,7 @@ checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011"
dependencies = [
"cfg-if",
"const-random",
- "getrandom 0.2.10",
+ "getrandom 0.2.15",
"once_cell",
"version_check",
"zerocopy",
@@ -93,8 +93,8 @@ dependencies = [
"miow",
"parking_lot",
"piper",
- "polling 3.3.2",
- "regex-automata 0.4.5",
+ "polling 3.7.2",
+ "regex-automata 0.4.7",
"rustix-openpty",
"serde",
"signal-hook",
@@ -117,20 +117,19 @@ checksum = "4aa90d7ce82d4be67b64039a3d588d38dbcc6736577de4a847025ce5b0c468d1"
[[package]]
name = "allocator-api2"
-version = "0.2.16"
+version = "0.2.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5"
+checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f"
[[package]]
name = "alsa"
-version = "0.7.1"
+version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e2562ad8dcf0f789f65c6fdaad8a8a9708ed6b488e649da28c01656ad66b8b47"
+checksum = "37fe60779335388a88c01ac6c3be40304d1e349de3ada3b15f7808bb90fa9dce"
dependencies = [
"alsa-sys",
- "bitflags 1.3.2",
+ "bitflags 2.6.0",
"libc",
- "nix 0.24.3",
]
[[package]]
@@ -170,20 +169,6 @@ version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299"
-[[package]]
-name = "anstream"
-version = "0.5.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b1f58811cfac344940f1a400b6e6231ce35171f614f26439e80f8c1465c5cc0c"
-dependencies = [
- "anstyle",
- "anstyle-parse",
- "anstyle-query",
- "anstyle-wincon 2.1.0",
- "colorchoice",
- "utf8parse",
-]
-
[[package]]
name = "anstream"
version = "0.6.15"
@@ -193,7 +178,7 @@ dependencies = [
"anstyle",
"anstyle-parse",
"anstyle-query",
- "anstyle-wincon 3.0.4",
+ "anstyle-wincon",
"colorchoice",
"is_terminal_polyfill",
"utf8parse",
@@ -201,36 +186,26 @@ dependencies = [
[[package]]
name = "anstyle"
-version = "1.0.7"
+version = "1.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b"
+checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1"
[[package]]
name = "anstyle-parse"
-version = "0.2.1"
+version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333"
+checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb"
dependencies = [
"utf8parse",
]
[[package]]
name = "anstyle-query"
-version = "1.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b"
-dependencies = [
- "windows-sys 0.48.0",
-]
-
-[[package]]
-name = "anstyle-wincon"
-version = "2.1.0"
+version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "58f54d10c6dfa51283a066ceab3ec1ab78d13fae00aa49243a45e4571fb79dfd"
+checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a"
dependencies = [
- "anstyle",
- "windows-sys 0.48.0",
+ "windows-sys 0.52.0",
]
[[package]]
@@ -294,14 +269,14 @@ checksum = "0ae92a5119aa49cdbcf6b9f893fe4e1d98b04ccbf82ee0584ad948a44a734dea"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.59",
+ "syn 2.0.72",
]
[[package]]
name = "arrayref"
-version = "0.3.7"
+version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545"
+checksum = "9d151e35f61089500b617991b791fc8bfd237ae50cd5950803758a179b41e67a"
[[package]]
name = "arrayvec"
@@ -327,7 +302,7 @@ version = "0.38.0+1.3.281"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0bb44936d800fea8f016d7f2311c6a4f97aebd5dc86f09906139ec848cf3a46f"
dependencies = [
- "libloading 0.8.0",
+ "libloading",
]
[[package]]
@@ -337,7 +312,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "52bca67b61cb81e5553babde81b8211f713cb6db79766f80168f3e5f40ea6c82"
dependencies = [
"ash",
- "raw-window-handle 0.6.0",
+ "raw-window-handle",
"raw-window-metal",
]
@@ -347,7 +322,7 @@ version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfe7e0dd0ac5a401dc116ed9f9119cf9decc625600474cb41f0fc0a0050abc9a"
dependencies = [
- "async-fs 2.1.1",
+ "async-fs 2.1.2",
"async-net 2.0.0",
"enumflags2",
"futures-channel",
@@ -422,13 +397,14 @@ dependencies = [
"serde_json",
"serde_json_lenient",
"settings",
+ "smallvec",
"smol",
"telemetry_events",
"terminal",
"terminal_view",
"text",
"theme",
- "toml 0.8.16",
+ "toml 0.8.19",
"ui",
"unindent",
"util",
@@ -467,8 +443,8 @@ version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "20cd0e2e25ea8e5f7e9df04578dc6cf5c83577fd09b1a46aaf5c85e1c33f2a7e"
dependencies = [
- "event-listener 5.1.0",
- "event-listener-strategy 0.5.0",
+ "event-listener 5.3.1",
+ "event-listener-strategy",
"futures-core",
"pin-project-lite",
]
@@ -486,13 +462,12 @@ dependencies = [
[[package]]
name = "async-channel"
-version = "2.2.0"
+version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f28243a43d821d11341ab73c80bed182dc015c514b951616cf79bd4af39af0c3"
+checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a"
dependencies = [
"concurrent-queue",
- "event-listener 5.1.0",
- "event-listener-strategy 0.5.0",
+ "event-listener-strategy",
"futures-core",
"pin-project-lite",
]
@@ -536,15 +511,14 @@ dependencies = [
[[package]]
name = "async-executor"
-version = "1.5.1"
+version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6fa3dc5f2a8564f07759c008b9109dc0d39de92a88d5588b8a5036d286383afb"
+checksum = "d7ebdfa2ebdab6b1760375fa7d6f382b9f486eac35fc994625a00e89280bdbb7"
dependencies = [
- "async-lock 2.8.0",
"async-task",
"concurrent-queue",
- "fastrand 1.9.0",
- "futures-lite 1.13.0",
+ "fastrand 2.1.0",
+ "futures-lite 2.3.0",
"slab",
]
@@ -562,27 +536,27 @@ dependencies = [
[[package]]
name = "async-fs"
-version = "2.1.1"
+version = "2.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bc19683171f287921f2405677dd2ed2549c3b3bda697a563ebc3a121ace2aba1"
+checksum = "ebcd09b382f40fcd159c2d695175b2ae620ffa5f3bd6f664131efff4e8b9e04a"
dependencies = [
- "async-lock 3.3.0",
+ "async-lock 3.4.0",
"blocking",
- "futures-lite 2.2.0",
+ "futures-lite 2.3.0",
]
[[package]]
name = "async-global-executor"
-version = "2.3.1"
+version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f1b6f5d7df27bd294849f8eec66ecfc63d11814df7a4f5d74168a2394467b776"
+checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c"
dependencies = [
- "async-channel 1.9.0",
+ "async-channel 2.3.1",
"async-executor",
- "async-io 1.13.0",
- "async-lock 2.8.0",
+ "async-io 2.3.3",
+ "async-lock 3.4.0",
"blocking",
- "futures-lite 1.13.0",
+ "futures-lite 2.3.0",
"once_cell",
]
@@ -602,24 +576,24 @@ dependencies = [
"polling 2.8.0",
"rustix 0.37.27",
"slab",
- "socket2 0.4.9",
+ "socket2 0.4.10",
"waker-fn",
]
[[package]]
name = "async-io"
-version = "2.3.1"
+version = "2.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8f97ab0c5b00a7cdbe5a371b9a782ee7be1316095885c8a4ea1daf490eb0ef65"
+checksum = "0d6baa8f0178795da0e71bc42c9e5d13261aac7ee549853162e66a241ba17964"
dependencies = [
- "async-lock 3.3.0",
+ "async-lock 3.4.0",
"cfg-if",
"concurrent-queue",
"futures-io",
- "futures-lite 2.2.0",
+ "futures-lite 2.3.0",
"parking",
- "polling 3.3.2",
- "rustix 0.38.32",
+ "polling 3.7.2",
+ "rustix 0.38.34",
"slab",
"tracing",
"windows-sys 0.52.0",
@@ -636,12 +610,12 @@ dependencies = [
[[package]]
name = "async-lock"
-version = "3.3.0"
+version = "3.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d034b430882f8381900d3fe6f0aaa3ad94f2cb4ac519b429692a1bc2dda4ae7b"
+checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18"
dependencies = [
- "event-listener 4.0.3",
- "event-listener-strategy 0.4.0",
+ "event-listener 5.3.1",
+ "event-listener-strategy",
"pin-project-lite",
]
@@ -659,12 +633,11 @@ dependencies = [
[[package]]
name = "async-net"
-version = "1.7.0"
+version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4051e67316bc7eff608fe723df5d32ed639946adcd69e07df41fd42a7b411f1f"
+checksum = "0434b1ed18ce1cf5769b8ac540e33f01fa9471058b5e89da9e06f3c882a8c12f"
dependencies = [
"async-io 1.13.0",
- "autocfg",
"blocking",
"futures-lite 1.13.0",
]
@@ -675,9 +648,9 @@ version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b948000fad4873c1c9339d60f2623323a0cfd3816e5181033c6a5cb68b2accf7"
dependencies = [
- "async-io 2.3.1",
+ "async-io 2.3.3",
"blocking",
- "futures-lite 2.2.0",
+ "futures-lite 2.3.0",
]
[[package]]
@@ -691,37 +664,38 @@ dependencies = [
[[package]]
name = "async-process"
-version = "1.7.0"
+version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7a9d28b1d97e08915212e2e45310d47854eafa69600756fc735fb788f75199c9"
+checksum = "ea6438ba0a08d81529c69b36700fa2f95837bfe3e776ab39cde9c14d9149da88"
dependencies = [
"async-io 1.13.0",
"async-lock 2.8.0",
- "autocfg",
+ "async-signal",
"blocking",
"cfg-if",
- "event-listener 2.5.3",
+ "event-listener 3.1.0",
"futures-lite 1.13.0",
- "rustix 0.37.27",
- "signal-hook",
+ "rustix 0.38.34",
"windows-sys 0.48.0",
]
[[package]]
name = "async-process"
-version = "2.1.0"
+version = "2.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "451e3cf68011bd56771c79db04a9e333095ab6349f7e47592b788e9b98720cc8"
+checksum = "f7eda79bbd84e29c2b308d1dc099d7de8dcc7035e48f4bf5dc4a531a44ff5e2a"
dependencies = [
- "async-channel 2.2.0",
- "async-io 2.3.1",
- "async-lock 3.3.0",
+ "async-channel 2.3.1",
+ "async-io 2.3.3",
+ "async-lock 3.4.0",
"async-signal",
+ "async-task",
"blocking",
"cfg-if",
- "event-listener 5.1.0",
- "futures-lite 2.2.0",
- "rustix 0.38.32",
+ "event-listener 5.3.1",
+ "futures-lite 2.3.0",
+ "rustix 0.38.34",
+ "tracing",
"windows-sys 0.52.0",
]
@@ -738,31 +712,31 @@ dependencies = [
[[package]]
name = "async-recursion"
-version = "1.0.5"
+version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0"
+checksum = "3b43422f69d8ff38f95f1b2bb76517c91589a924d1559a0e935d7c8ce0274c11"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.59",
+ "syn 2.0.72",
]
[[package]]
name = "async-signal"
-version = "0.2.5"
+version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9e47d90f65a225c4527103a8d747001fc56e375203592b25ad103e1ca13124c5"
+checksum = "dfb3634b73397aa844481f814fad23bbf07fdb0eabec10f2eb95e58944b1ec32"
dependencies = [
- "async-io 2.3.1",
- "async-lock 2.8.0",
+ "async-io 2.3.3",
+ "async-lock 3.4.0",
"atomic-waker",
"cfg-if",
"futures-core",
"futures-io",
- "rustix 0.38.32",
+ "rustix 0.38.34",
"signal-hook-registry",
"slab",
- "windows-sys 0.48.0",
+ "windows-sys 0.52.0",
]
[[package]]
@@ -776,7 +750,7 @@ dependencies = [
"async-global-executor",
"async-io 1.13.0",
"async-lock 2.8.0",
- "async-process 1.7.0",
+ "async-process 1.8.1",
"crossbeam-utils",
"futures-channel",
"futures-core",
@@ -812,7 +786,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.59",
+ "syn 2.0.72",
]
[[package]]
@@ -864,7 +838,7 @@ checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.59",
+ "syn 2.0.72",
]
[[package]]
@@ -899,7 +873,7 @@ checksum = "00b9f7252833d5ed4b00aa9604b563529dd5e11de9c23615de2dcdf91eb87b52"
dependencies = [
"async-compression",
"crc32fast",
- "futures-lite 2.2.0",
+ "futures-lite 2.3.0",
"pin-project",
"thiserror",
]
@@ -910,7 +884,7 @@ version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a860072022177f903e59730004fb5dc13db9275b79bb2aef7ba8ce831956c233"
dependencies = [
- "bytes 1.5.0",
+ "bytes 1.7.1",
"futures-sink",
"futures-util",
"memchr",
@@ -934,9 +908,9 @@ checksum = "c59bdb34bc650a32731b31bd8f0829cc15d24a708ee31559e0bb34f2bc320cba"
[[package]]
name = "atomic-waker"
-version = "1.1.1"
+version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1181e1e0d1fce796a03db1ae795d67167da795f9cf4a39c37589e85ef57f26d3"
+checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0"
[[package]]
name = "audio"
@@ -980,9 +954,9 @@ dependencies = [
[[package]]
name = "autocfg"
-version = "1.1.0"
+version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0"
[[package]]
name = "av1-grain"
@@ -1009,9 +983,9 @@ dependencies = [
[[package]]
name = "aws-config"
-version = "1.1.5"
+version = "1.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7af266887e24cd5f6d2ea7433cacd25dcd4773b7f70e488701968a7cdf51df57"
+checksum = "caf6cfe2881cb1fcbba9ae946fb9a6480d3b7a714ca84c74925014a89ef3387a"
dependencies = [
"aws-credential-types",
"aws-runtime",
@@ -1025,23 +999,24 @@ dependencies = [
"aws-smithy-runtime-api",
"aws-smithy-types",
"aws-types",
- "bytes 1.5.0",
- "fastrand 2.0.0",
+ "bytes 1.7.1",
+ "fastrand 2.1.0",
"hex",
- "http 0.2.9",
+ "http 0.2.12",
"hyper",
"ring",
"time",
"tokio",
"tracing",
+ "url",
"zeroize",
]
[[package]]
name = "aws-credential-types"
-version = "1.1.5"
+version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2d56f287a9e65e4914bfedb5b22c056b65e4c232fca512d5509a9df36386759f"
+checksum = "e16838e6c9e12125face1c1eff1343c75e3ff540de98ff7ebd61874a89bcfeb9"
dependencies = [
"aws-smithy-async",
"aws-smithy-runtime-api",
@@ -1051,9 +1026,9 @@ dependencies = [
[[package]]
name = "aws-runtime"
-version = "1.1.5"
+version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2d6a29eca8ea8982028a4df81883e7001e250a21d323b86418884b5345950a4b"
+checksum = "87c5f920ffd1e0526ec9e70e50bf444db50b204395a0fa7016bbf9e31ea1698f"
dependencies = [
"aws-credential-types",
"aws-sigv4",
@@ -1063,10 +1038,10 @@ dependencies = [
"aws-smithy-runtime-api",
"aws-smithy-types",
"aws-types",
- "bytes 1.5.0",
- "fastrand 2.0.0",
- "http 0.2.9",
- "http-body",
+ "bytes 1.7.1",
+ "fastrand 2.1.0",
+ "http 0.2.12",
+ "http-body 0.4.6",
"percent-encoding",
"pin-project-lite",
"tracing",
@@ -1075,10 +1050,11 @@ dependencies = [
[[package]]
name = "aws-sdk-s3"
-version = "1.15.0"
+version = "1.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c977e92277652aefb9a76a0fca652b26757d6845dce0d7bf4426da80f13d85b0"
+checksum = "558bbcec8db82a1a8af1610afcb3b10d00652d25ad366a0558eecdff2400a1d1"
dependencies = [
+ "ahash 0.8.11",
"aws-credential-types",
"aws-runtime",
"aws-sigv4",
@@ -1092,21 +1068,26 @@ dependencies = [
"aws-smithy-types",
"aws-smithy-xml",
"aws-types",
- "bytes 1.5.0",
- "http 0.2.9",
- "http-body",
+ "bytes 1.7.1",
+ "fastrand 2.1.0",
+ "hex",
+ "hmac",
+ "http 0.2.12",
+ "http-body 0.4.6",
+ "lru",
"once_cell",
"percent-encoding",
"regex-lite",
+ "sha2",
"tracing",
"url",
]
[[package]]
name = "aws-sdk-sso"
-version = "1.13.0"
+version = "1.36.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e2d7f527c7b28af1a641f7d89f9e6a4863e8ec00f39d2b731b056fc5ec5ce829"
+checksum = "6acca681c53374bf1d9af0e317a41d12a44902ca0f2d1e10e5cb5bb98ed74f35"
dependencies = [
"aws-credential-types",
"aws-runtime",
@@ -1117,8 +1098,8 @@ dependencies = [
"aws-smithy-runtime-api",
"aws-smithy-types",
"aws-types",
- "bytes 1.5.0",
- "http 0.2.9",
+ "bytes 1.7.1",
+ "http 0.2.12",
"once_cell",
"regex-lite",
"tracing",
@@ -1126,9 +1107,9 @@ dependencies = [
[[package]]
name = "aws-sdk-ssooidc"
-version = "1.13.0"
+version = "1.37.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0d0be3224cd574ee8ab5fd7c32087876f25c134c27ac603fcb38669ed8d346b0"
+checksum = "b79c6bdfe612503a526059c05c9ccccbf6bd9530b003673cb863e547fd7c0c9a"
dependencies = [
"aws-credential-types",
"aws-runtime",
@@ -1139,8 +1120,8 @@ dependencies = [
"aws-smithy-runtime-api",
"aws-smithy-types",
"aws-types",
- "bytes 1.5.0",
- "http 0.2.9",
+ "bytes 1.7.1",
+ "http 0.2.12",
"once_cell",
"regex-lite",
"tracing",
@@ -1148,9 +1129,9 @@ dependencies = [
[[package]]
name = "aws-sdk-sts"
-version = "1.13.0"
+version = "1.36.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5b3167c60d82a13bbaef569da06041644ff41e85c6377e5dad53fa2526ccfe9d"
+checksum = "32e6ecdb2bd756f3b2383e6f0588dc10a4e65f5d551e70a56e0bfe0c884673ce"
dependencies = [
"aws-credential-types",
"aws-runtime",
@@ -1163,7 +1144,7 @@ dependencies = [
"aws-smithy-types",
"aws-smithy-xml",
"aws-types",
- "http 0.2.9",
+ "http 0.2.12",
"once_cell",
"regex-lite",
"tracing",
@@ -1171,22 +1152,22 @@ dependencies = [
[[package]]
name = "aws-sigv4"
-version = "1.1.5"
+version = "1.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "54b1cbe0eee57a213039088dbdeca7be9352f24e0d72332d961e8a1cb388f82d"
+checksum = "5df1b0fa6be58efe9d4ccc257df0a53b89cd8909e86591a13ca54817c87517be"
dependencies = [
"aws-credential-types",
"aws-smithy-eventstream",
"aws-smithy-http",
"aws-smithy-runtime-api",
"aws-smithy-types",
- "bytes 1.5.0",
+ "bytes 1.7.1",
"crypto-bigint 0.5.5",
"form_urlencoded",
"hex",
"hmac",
- "http 0.2.9",
- "http 1.0.0",
+ "http 0.2.12",
+ "http 1.1.0",
"once_cell",
"p256",
"percent-encoding",
@@ -1200,9 +1181,9 @@ dependencies = [
[[package]]
name = "aws-smithy-async"
-version = "1.1.5"
+version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "426a5bc369ca7c8d3686439e46edc727f397a47ab3696b13f3ae8c81b3b36132"
+checksum = "62220bc6e97f946ddd51b5f1361f78996e704677afc518a4ff66b7a72ea1378c"
dependencies = [
"futures-util",
"pin-project-lite",
@@ -1211,18 +1192,18 @@ dependencies = [
[[package]]
name = "aws-smithy-checksums"
-version = "0.60.5"
+version = "0.60.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6ee554133eca2611b66d23548e48f9b44713befdb025ab76bc00185b878397a1"
+checksum = "48c4134cf3adaeacff34d588dbe814200357b0c466d730cf1c0d8054384a2de4"
dependencies = [
"aws-smithy-http",
"aws-smithy-types",
- "bytes 1.5.0",
+ "bytes 1.7.1",
"crc32c",
"crc32fast",
"hex",
- "http 0.2.9",
- "http-body",
+ "http 0.2.12",
+ "http-body 0.4.6",
"md-5",
"pin-project-lite",
"sha1",
@@ -1237,24 +1218,24 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6363078f927f612b970edf9d1903ef5cef9a64d1e8423525ebb1f0a1633c858"
dependencies = [
"aws-smithy-types",
- "bytes 1.5.0",
+ "bytes 1.7.1",
"crc32fast",
]
[[package]]
name = "aws-smithy-http"
-version = "0.60.5"
+version = "0.60.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "85d6a0619f7b67183067fa3b558f94f90753da2df8c04aeb7336d673f804b0b8"
+checksum = "d9cd0ae3d97daa0a2bf377a4d8e8e1362cae590c4a1aad0d40058ebca18eb91e"
dependencies = [
"aws-smithy-eventstream",
"aws-smithy-runtime-api",
"aws-smithy-types",
- "bytes 1.5.0",
+ "bytes 1.7.1",
"bytes-utils",
"futures-core",
- "http 0.2.9",
- "http-body",
+ "http 0.2.12",
+ "http-body 0.4.6",
"once_cell",
"percent-encoding",
"pin-project-lite",
@@ -1264,18 +1245,18 @@ dependencies = [
[[package]]
name = "aws-smithy-json"
-version = "0.60.5"
+version = "0.60.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a1c1b5186b6f5c579bf0de1bcca9dd3d946d6d51361ea1d18131f6a0b64e13ae"
+checksum = "4683df9469ef09468dad3473d129960119a0d3593617542b7d52086c8486f2d6"
dependencies = [
"aws-smithy-types",
]
[[package]]
name = "aws-smithy-query"
-version = "0.60.5"
+version = "0.60.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1c0a2ce65882e788d2cf83ff28b9b16918de0460c47bf66c5da4f6c17b4c9694"
+checksum = "f2fbd61ceb3fe8a1cb7352e42689cec5335833cd9f94103a61e98f9bb61c64bb"
dependencies = [
"aws-smithy-types",
"urlencoding",
@@ -1283,19 +1264,21 @@ dependencies = [
[[package]]
name = "aws-smithy-runtime"
-version = "1.1.5"
+version = "1.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b4cb6b3afa5fc9825a75675975dcc3e21764b5476bc91dbc63df4ea3d30a576e"
+checksum = "ce87155eba55e11768b8c1afa607f3e864ae82f03caf63258b37455b0ad02537"
dependencies = [
"aws-smithy-async",
"aws-smithy-http",
"aws-smithy-runtime-api",
"aws-smithy-types",
- "bytes 1.5.0",
- "fastrand 2.0.0",
+ "bytes 1.7.1",
+ "fastrand 2.1.0",
"h2",
- "http 0.2.9",
- "http-body",
+ "http 0.2.12",
+ "http-body 0.4.6",
+ "http-body 1.0.1",
+ "httparse",
"hyper",
"hyper-rustls",
"once_cell",
@@ -1308,14 +1291,15 @@ dependencies = [
[[package]]
name = "aws-smithy-runtime-api"
-version = "1.1.5"
+version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "23165433e80c04e8c09cee66d171292ae7234bae05fa9d5636e33095eae416b2"
+checksum = "30819352ed0a04ecf6a2f3477e344d2d1ba33d43e0f09ad9047c12e0d923616f"
dependencies = [
"aws-smithy-async",
"aws-smithy-types",
- "bytes 1.5.0",
- "http 0.2.9",
+ "bytes 1.7.1",
+ "http 0.2.12",
+ "http 1.1.0",
"pin-project-lite",
"tokio",
"tracing",
@@ -1324,16 +1308,19 @@ dependencies = [
[[package]]
name = "aws-smithy-types"
-version = "1.1.5"
+version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c94a5bec34850b92c9a054dad57b95c1d47f25125f55973e19f6ad788f0381ff"
+checksum = "cfe321a6b21f5d8eabd0ade9c55d3d0335f3c3157fc2b3e87f05f34b539e4df5"
dependencies = [
"base64-simd",
- "bytes 1.5.0",
+ "bytes 1.7.1",
"bytes-utils",
"futures-core",
- "http 0.2.9",
- "http-body",
+ "http 0.2.12",
+ "http 1.1.0",
+ "http-body 0.4.6",
+ "http-body 1.0.1",
+ "http-body-util",
"itoa",
"num-integer",
"pin-project-lite",
@@ -1347,24 +1334,23 @@ dependencies = [
[[package]]
name = "aws-smithy-xml"
-version = "0.60.5"
+version = "0.60.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d16f94c9673412b7a72e3c3efec8de89081c320bf59ea12eed34c417a62ad600"
+checksum = "d123fbc2a4adc3c301652ba8e149bf4bc1d1725affb9784eb20c953ace06bf55"
dependencies = [
"xmlparser",
]
[[package]]
name = "aws-types"
-version = "1.1.5"
+version = "1.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0ff7e122ee50ca962e9de91f5850cc37e2184b1219611eef6d44aa85929b54f6"
+checksum = "5221b91b3e441e6675310829fd8984801b772cb1546ef6c0e54dec9f1ac13fef"
dependencies = [
"aws-credential-types",
"aws-smithy-async",
"aws-smithy-runtime-api",
"aws-smithy-types",
- "http 0.2.9",
"rustc_version",
"tracing",
]
@@ -1379,11 +1365,11 @@ dependencies = [
"axum-core",
"base64 0.21.7",
"bitflags 1.3.2",
- "bytes 1.5.0",
+ "bytes 1.7.1",
"futures-util",
"headers",
- "http 0.2.9",
- "http-body",
+ "http 0.2.12",
+ "http-body 0.4.6",
"hyper",
"itoa",
"matchit",
@@ -1412,10 +1398,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c"
dependencies = [
"async-trait",
- "bytes 1.5.0",
+ "bytes 1.7.1",
"futures-util",
- "http 0.2.9",
- "http-body",
+ "http 0.2.12",
+ "http-body 0.4.6",
"mime",
"rustversion",
"tower-layer",
@@ -1429,9 +1415,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f9a320103719de37b7b4da4c8eb629d4573f6bcfd3dfe80d3208806895ccf81d"
dependencies = [
"axum",
- "bytes 1.5.0",
+ "bytes 1.7.1",
"futures-util",
- "http 0.2.9",
+ "http 0.2.12",
"mime",
"pin-project-lite",
"serde",
@@ -1520,45 +1506,45 @@ dependencies = [
[[package]]
name = "bindgen"
-version = "0.64.0"
+version = "0.65.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c4243e6031260db77ede97ad86c27e501d646a27ab57b59a574f725d98ab1fb4"
+checksum = "cfdf7b466f9a4903edc73f95d6d2bcd5baf8ae620638762244d3f60143643cc5"
dependencies = [
"bitflags 1.3.2",
"cexpr",
"clang-sys",
"lazy_static",
"lazycell",
+ "log",
"peeking_take_while",
+ "prettyplease",
"proc-macro2",
"quote",
"regex",
"rustc-hash",
"shlex",
- "syn 1.0.109",
+ "syn 2.0.72",
+ "which 4.4.2",
]
[[package]]
name = "bindgen"
-version = "0.65.1"
+version = "0.69.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cfdf7b466f9a4903edc73f95d6d2bcd5baf8ae620638762244d3f60143643cc5"
+checksum = "a00dc851838a2120612785d195287475a3ac45514741da670b735818822129a0"
dependencies = [
- "bitflags 1.3.2",
+ "bitflags 2.6.0",
"cexpr",
"clang-sys",
+ "itertools 0.12.1",
"lazy_static",
"lazycell",
- "log",
- "peeking_take_while",
- "prettyplease",
"proc-macro2",
"quote",
"regex",
"rustc-hash",
"shlex",
- "syn 2.0.59",
- "which 4.4.2",
+ "syn 2.0.72",
]
[[package]]
@@ -66,6 +66,7 @@ semantic_index.workspace = true
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
+smallvec.workspace = true
smol.workspace = true
telemetry_events.workspace = true
terminal.workspace = true
@@ -34,12 +34,12 @@ use editor::{
use editor::{display_map::CreaseId, FoldPlaceholder};
use fs::Fs;
use gpui::{
- canvas, div, percentage, point, pulsating_between, Action, Animation, AnimationExt, AnyElement,
- AnyView, AppContext, AsyncWindowContext, ClipboardItem, Context as _, DismissEvent, Empty,
- Entity, EntityId, EventEmitter, FocusHandle, FocusableView, FontWeight, InteractiveElement,
- IntoElement, Model, ParentElement, Pixels, ReadGlobal, Render, SharedString,
- StatefulInteractiveElement, Styled, Subscription, Task, Transformation, UpdateGlobal, View,
- ViewContext, VisualContext, WeakView, WindowContext,
+ canvas, div, img, percentage, point, pulsating_between, size, Action, Animation, AnimationExt,
+ AnyElement, AnyView, AppContext, AsyncWindowContext, ClipboardEntry, ClipboardItem,
+ Context as _, DismissEvent, Empty, Entity, EntityId, EventEmitter, FocusHandle, FocusableView,
+ FontWeight, InteractiveElement, IntoElement, Model, ParentElement, Pixels, ReadGlobal, Render,
+ RenderImage, SharedString, Size, StatefulInteractiveElement, Styled, Subscription, Task,
+ Transformation, UpdateGlobal, View, VisualContext, WeakView, WindowContext,
};
use indexed_docs::IndexedDocsStore;
use language::{
@@ -1715,6 +1715,7 @@ pub struct ContextEditor {
lsp_adapter_delegate: Option<Arc<dyn LspAdapterDelegate>>,
editor: View<Editor>,
blocks: HashSet<CustomBlockId>,
+ image_blocks: HashSet<CustomBlockId>,
scroll_position: Option<ScrollPosition>,
remote_id: Option<workspace::ViewId>,
pending_slash_command_creases: HashMap<Range<language::Anchor>, CreaseId>,
@@ -1773,6 +1774,7 @@ impl ContextEditor {
editor,
lsp_adapter_delegate,
blocks: Default::default(),
+ image_blocks: Default::default(),
scroll_position: None,
remote_id: None,
fs,
@@ -1789,6 +1791,7 @@ impl ContextEditor {
show_accept_terms: false,
};
this.update_message_headers(cx);
+ this.update_image_blocks(cx);
this.insert_slash_command_output_sections(sections, cx);
this
}
@@ -2161,6 +2164,7 @@ impl ContextEditor {
match event {
ContextEvent::MessagesEdited => {
self.update_message_headers(cx);
+ self.update_image_blocks(cx);
self.context.update(cx, |context, cx| {
context.save(Some(Duration::from_millis(500)), self.fs.clone(), cx);
});
@@ -3305,7 +3309,7 @@ impl ContextEditor {
}
if spanned_messages > 1 {
- cx.write_to_clipboard(ClipboardItem::new(copied_text));
+ cx.write_to_clipboard(ClipboardItem::new_string(copied_text));
return;
}
}
@@ -3313,6 +3317,102 @@ impl ContextEditor {
cx.propagate();
}
+ fn paste(&mut self, _: &editor::actions::Paste, cx: &mut ViewContext<Self>) {
+ let images = if let Some(item) = cx.read_from_clipboard() {
+ item.into_entries()
+ .filter_map(|entry| {
+ if let ClipboardEntry::Image(image) = entry {
+ Some(image)
+ } else {
+ None
+ }
+ })
+ .collect()
+ } else {
+ Vec::new()
+ };
+
+ if images.is_empty() {
+ // If we didn't find any valid image data to paste, propagate to let normal pasting happen.
+ cx.propagate();
+ } else {
+ let mut image_positions = Vec::new();
+ self.editor.update(cx, |editor, cx| {
+ editor.transact(cx, |editor, cx| {
+ let edits = editor
+ .selections
+ .all::<usize>(cx)
+ .into_iter()
+ .map(|selection| (selection.start..selection.end, "\n"));
+ editor.edit(edits, cx);
+
+ let snapshot = editor.buffer().read(cx).snapshot(cx);
+ for selection in editor.selections.all::<usize>(cx) {
+ image_positions.push(snapshot.anchor_before(selection.end));
+ }
+ });
+ });
+
+ self.context.update(cx, |context, cx| {
+ for image in images {
+ let image_id = image.id();
+ context.insert_image(image, cx);
+ for image_position in image_positions.iter() {
+ context.insert_image_anchor(image_id, image_position.text_anchor, cx);
+ }
+ }
+ });
+ }
+ }
+
+ fn update_image_blocks(&mut self, cx: &mut ViewContext<Self>) {
+ self.editor.update(cx, |editor, cx| {
+ let buffer = editor.buffer().read(cx).snapshot(cx);
+ let excerpt_id = *buffer.as_singleton().unwrap().0;
+ let old_blocks = std::mem::take(&mut self.image_blocks);
+ let new_blocks = self
+ .context
+ .read(cx)
+ .images(cx)
+ .filter_map(|image| {
+ const MAX_HEIGHT_IN_LINES: u32 = 8;
+ let anchor = buffer.anchor_in_excerpt(excerpt_id, image.anchor).unwrap();
+ let image = image.render_image.clone();
+ anchor.is_valid(&buffer).then(|| BlockProperties {
+ position: anchor,
+ height: MAX_HEIGHT_IN_LINES,
+ style: BlockStyle::Sticky,
+ render: Box::new(move |cx| {
+ let image_size = size_for_image(
+ &image,
+ size(
+ cx.max_width - cx.gutter_dimensions.full_width(),
+ MAX_HEIGHT_IN_LINES as f32 * cx.line_height,
+ ),
+ );
+ h_flex()
+ .pl(cx.gutter_dimensions.full_width())
+ .child(
+ img(image.clone())
+ .object_fit(gpui::ObjectFit::ScaleDown)
+ .w(image_size.width)
+ .h(image_size.height),
+ )
+ .into_any_element()
+ }),
+
+ disposition: BlockDisposition::Above,
+ priority: 0,
+ })
+ })
+ .collect::<Vec<_>>();
+
+ editor.remove_blocks(old_blocks, None, cx);
+ let ids = editor.insert_blocks(new_blocks, None, cx);
+ self.image_blocks = HashSet::from_iter(ids);
+ });
+ }
+
fn split(&mut self, _: &Split, cx: &mut ViewContext<Self>) {
self.context.update(cx, |context, cx| {
let selections = self.editor.read(cx).selections.disjoint_anchors();
@@ -3529,6 +3629,7 @@ impl Render for ContextEditor {
.capture_action(cx.listener(ContextEditor::cancel))
.capture_action(cx.listener(ContextEditor::save))
.capture_action(cx.listener(ContextEditor::copy))
+ .capture_action(cx.listener(ContextEditor::paste))
.capture_action(cx.listener(ContextEditor::cycle_message_role))
.capture_action(cx.listener(ContextEditor::confirm_command))
.on_action(cx.listener(ContextEditor::assist))
@@ -4556,6 +4657,30 @@ fn token_state(context: &Model<Context>, cx: &AppContext) -> Option<TokenState>
Some(token_state)
}
+fn size_for_image(data: &RenderImage, max_size: Size<Pixels>) -> Size<Pixels> {
+ let image_size = data
+ .size(0)
+ .map(|dimension| Pixels::from(u32::from(dimension)));
+ let image_ratio = image_size.width / image_size.height;
+ let bounds_ratio = max_size.width / max_size.height;
+
+ if image_size.width > max_size.width || image_size.height > max_size.height {
+ if bounds_ratio > image_ratio {
+ size(
+ image_size.width * (max_size.height / image_size.height),
+ max_size.height,
+ )
+ } else {
+ size(
+ max_size.width,
+ image_size.height * (max_size.width / image_size.width),
+ )
+ }
+ } else {
+ size(image_size.width, image_size.height)
+ }
+}
+
enum ConfigurationError {
NoProvider,
ProviderNotAuthenticated,
@@ -13,27 +13,31 @@ use editor::Editor;
use fs::{Fs, RemoveOptions};
use futures::{
future::{self, Shared},
+ stream::FuturesUnordered,
FutureExt, StreamExt,
};
use gpui::{
- AppContext, Context as _, EventEmitter, Model, ModelContext, Subscription, Task, UpdateGlobal,
- View, WeakView,
+ AppContext, Context as _, EventEmitter, Image, Model, ModelContext, RenderImage, Subscription,
+ Task, UpdateGlobal, View, WeakView,
};
+
use language::{
AnchorRangeExt, Bias, Buffer, BufferSnapshot, LanguageRegistry, OffsetRangeExt, ParseStatus,
Point, ToOffset,
};
use language_model::{
- LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, LanguageModelTool,
- Role,
+ LanguageModelImage, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage,
+ LanguageModelTool, Role,
};
use open_ai::Model as OpenAiModel;
-use paths::contexts_dir;
+use paths::{context_images_dir, contexts_dir};
use project::Project;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
+use smallvec::SmallVec;
use std::{
cmp::{self, Ordering},
+ collections::hash_map,
fmt::Debug,
iter, mem,
ops::Range,
@@ -319,8 +323,23 @@ pub struct MessageMetadata {
timestamp: clock::Lamport,
}
-#[derive(Clone, Debug, PartialEq, Eq)]
+#[derive(Clone, Debug)]
+pub struct MessageImage {
+ image_id: u64,
+ image: Shared<Task<Option<LanguageModelImage>>>,
+}
+
+impl PartialEq for MessageImage {
+ fn eq(&self, other: &Self) -> bool {
+ self.image_id == other.image_id
+ }
+}
+
+impl Eq for MessageImage {}
+
+#[derive(Clone, Debug)]
pub struct Message {
+ pub image_offsets: SmallVec<[(usize, MessageImage); 1]>,
pub offset_range: Range<usize>,
pub index_range: Range<usize>,
pub id: MessageId,
@@ -331,13 +350,55 @@ pub struct Message {
impl Message {
fn to_request_message(&self, buffer: &Buffer) -> LanguageModelRequestMessage {
+ let mut content = Vec::new();
+
+ let mut range_start = self.offset_range.start;
+ for (image_offset, message_image) in self.image_offsets.iter() {
+ if *image_offset != range_start {
+ content.push(
+ buffer
+ .text_for_range(range_start..*image_offset)
+ .collect::<String>()
+ .into(),
+ )
+ }
+
+ if let Some(image) = message_image.image.clone().now_or_never().flatten() {
+ content.push(language_model::MessageContent::Image(image));
+ }
+
+ range_start = *image_offset;
+ }
+ if range_start != self.offset_range.end {
+ content.push(
+ buffer
+ .text_for_range(range_start..self.offset_range.end)
+ .collect::<String>()
+ .into(),
+ )
+ }
+
LanguageModelRequestMessage {
role: self.role,
- content: buffer.text_for_range(self.offset_range.clone()).collect(),
+ content,
}
}
}
+#[derive(Clone, Debug)]
+pub struct ImageAnchor {
+ pub anchor: language::Anchor,
+ pub image_id: u64,
+ pub render_image: Arc<RenderImage>,
+ pub image: Shared<Task<Option<LanguageModelImage>>>,
+}
+
+impl PartialEq for ImageAnchor {
+ fn eq(&self, other: &Self) -> bool {
+ self.image_id == other.image_id
+ }
+}
+
struct PendingCompletion {
id: usize,
_task: Task<()>,
@@ -605,6 +666,8 @@ pub struct Context {
finished_slash_commands: HashSet<SlashCommandId>,
slash_command_output_sections: Vec<SlashCommandOutputSection<language::Anchor>>,
message_anchors: Vec<MessageAnchor>,
+ images: HashMap<u64, (Arc<RenderImage>, Shared<Task<Option<LanguageModelImage>>>)>,
+ image_anchors: Vec<ImageAnchor>,
messages_metadata: HashMap<MessageId, MessageMetadata>,
summary: Option<ContextSummary>,
pending_summary: Task<Option<()>>,
@@ -677,6 +740,8 @@ impl Context {
pending_ops: Vec::new(),
operations: Vec::new(),
message_anchors: Default::default(),
+ image_anchors: Default::default(),
+ images: Default::default(),
messages_metadata: Default::default(),
pending_slash_commands: Vec::new(),
finished_slash_commands: HashSet::default(),
@@ -736,6 +801,11 @@ impl Context {
id: message.id,
start: message.offset_range.start,
metadata: self.messages_metadata[&message.id].clone(),
+ image_offsets: message
+ .image_offsets
+ .iter()
+ .map(|image_offset| (image_offset.0, image_offset.1.image_id))
+ .collect(),
})
.collect(),
summary: self
@@ -1339,7 +1409,7 @@ impl Context {
request.messages.push(LanguageModelRequestMessage {
role: Role::User,
- content: prompt,
+ content: vec![prompt.into()],
});
// Invoke the model to get its edit suggestions for this workflow step.
@@ -1743,13 +1813,15 @@ impl Context {
}
pub fn to_completion_request(&self, cx: &AppContext) -> LanguageModelRequest {
- let messages = self
+ let buffer = self.buffer.read(cx);
+ let request_messages = self
.messages(cx)
- .filter(|message| matches!(message.status, MessageStatus::Done))
- .map(|message| message.to_request_message(self.buffer.read(cx)));
+ .filter(|message| message.status == MessageStatus::Done)
+ .map(|message| message.to_request_message(&buffer))
+ .collect();
LanguageModelRequest {
- messages: messages.collect(),
+ messages: request_messages,
stop: vec![],
temperature: 1.0,
}
@@ -1847,6 +1919,55 @@ impl Context {
}
}
+ pub fn insert_image(&mut self, image: Image, cx: &mut ModelContext<Self>) -> Option<()> {
+ if let hash_map::Entry::Vacant(entry) = self.images.entry(image.id()) {
+ entry.insert((
+ image.to_image_data(cx).log_err()?,
+ LanguageModelImage::from_image(image, cx).shared(),
+ ));
+ }
+
+ Some(())
+ }
+
+ pub fn insert_image_anchor(
+ &mut self,
+ image_id: u64,
+ anchor: language::Anchor,
+ cx: &mut ModelContext<Self>,
+ ) -> bool {
+ cx.emit(ContextEvent::MessagesEdited);
+
+ let buffer = self.buffer.read(cx);
+ let insertion_ix = match self
+ .image_anchors
+ .binary_search_by(|existing_anchor| anchor.cmp(&existing_anchor.anchor, buffer))
+ {
+ Ok(ix) => ix,
+ Err(ix) => ix,
+ };
+
+ if let Some((render_image, image)) = self.images.get(&image_id) {
+ self.image_anchors.insert(
+ insertion_ix,
+ ImageAnchor {
+ anchor,
+ image_id,
+ image: image.clone(),
+ render_image: render_image.clone(),
+ },
+ );
+
+ true
+ } else {
+ false
+ }
+ }
+
+ pub fn images<'a>(&'a self, _cx: &'a AppContext) -> impl 'a + Iterator<Item = ImageAnchor> {
+ self.image_anchors.iter().cloned()
+ }
+
pub fn split_message(
&mut self,
range: Range<usize>,
@@ -1865,7 +1986,10 @@ impl Context {
let mut edited_buffer = false;
let mut suffix_start = None;
- if range.start > message.offset_range.start && range.end < message.offset_range.end - 1
+
+ // TODO: why did this start panicking?
+ if range.start > message.offset_range.start
+ && range.end < message.offset_range.end.saturating_sub(1)
{
if self.buffer.read(cx).chars_at(range.end).next() == Some('\n') {
suffix_start = Some(range.end + 1);
@@ -2007,7 +2131,9 @@ impl Context {
.map(|message| message.to_request_message(self.buffer.read(cx)))
.chain(Some(LanguageModelRequestMessage {
role: Role::User,
- content: "Summarize the context into a short title without punctuation.".into(),
+ content: vec![
+ "Summarize the context into a short title without punctuation.".into(),
+ ],
}));
let request = LanguageModelRequest {
messages: messages.collect(),
@@ -2109,25 +2235,55 @@ impl Context {
pub fn messages<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator<Item = Message> {
let buffer = self.buffer.read(cx);
- let mut message_anchors = self.message_anchors.iter().enumerate().peekable();
+ let messages = self.message_anchors.iter().enumerate();
+ let images = self.image_anchors.iter();
+
+ Self::messages_from_iters(buffer, &self.messages_metadata, messages, images)
+ }
+
+ pub fn messages_from_iters<'a>(
+ buffer: &'a Buffer,
+ metadata: &'a HashMap<MessageId, MessageMetadata>,
+ messages: impl Iterator<Item = (usize, &'a MessageAnchor)> + 'a,
+ images: impl Iterator<Item = &'a ImageAnchor> + 'a,
+ ) -> impl 'a + Iterator<Item = Message> {
+ let mut messages = messages.peekable();
+ let mut images = images.peekable();
+
iter::from_fn(move || {
- if let Some((start_ix, message_anchor)) = message_anchors.next() {
- let metadata = self.messages_metadata.get(&message_anchor.id)?;
+ if let Some((start_ix, message_anchor)) = messages.next() {
+ let metadata = metadata.get(&message_anchor.id)?;
+
let message_start = message_anchor.start.to_offset(buffer);
let mut message_end = None;
let mut end_ix = start_ix;
- while let Some((_, next_message)) = message_anchors.peek() {
+ while let Some((_, next_message)) = messages.peek() {
if next_message.start.is_valid(buffer) {
message_end = Some(next_message.start);
break;
} else {
end_ix += 1;
- message_anchors.next();
+ messages.next();
+ }
+ }
+ let message_end_anchor = message_end.unwrap_or(language::Anchor::MAX);
+ let message_end = message_end_anchor.to_offset(buffer);
+
+ let mut image_offsets = SmallVec::new();
+ while let Some(image_anchor) = images.peek() {
+ if image_anchor.anchor.cmp(&message_end_anchor, buffer).is_lt() {
+ image_offsets.push((
+ image_anchor.anchor.to_offset(buffer),
+ MessageImage {
+ image_id: image_anchor.image_id,
+ image: image_anchor.image.clone(),
+ },
+ ));
+ images.next();
+ } else {
+ break;
}
}
- let message_end = message_end
- .unwrap_or(language::Anchor::MAX)
- .to_offset(buffer);
return Some(Message {
index_range: start_ix..end_ix,
@@ -2136,6 +2292,7 @@ impl Context {
anchor: message_anchor.start,
role: metadata.role,
status: metadata.status.clone(),
+ image_offsets,
});
}
None
@@ -2173,6 +2330,9 @@ impl Context {
})?;
if let Some(summary) = summary {
+ this.read_with(&cx, |this, cx| this.serialize_images(fs.clone(), cx))?
+ .await;
+
let context = this.read_with(&cx, |this, cx| this.serialize(cx))?;
let mut discriminant = 1;
let mut new_path;
@@ -2212,6 +2372,45 @@ impl Context {
});
}
+ pub fn serialize_images(&self, fs: Arc<dyn Fs>, cx: &AppContext) -> Task<()> {
+ let mut images_to_save = self
+ .images
+ .iter()
+ .map(|(id, (_, llm_image))| {
+ let fs = fs.clone();
+ let llm_image = llm_image.clone();
+ let id = *id;
+ async move {
+ if let Some(llm_image) = llm_image.await {
+ let path: PathBuf =
+ context_images_dir().join(&format!("{}.png.base64", id));
+ if fs
+ .metadata(path.as_path())
+ .await
+ .log_err()
+ .flatten()
+ .is_none()
+ {
+ fs.atomic_write(path, llm_image.source.to_string())
+ .await
+ .log_err();
+ }
+ }
+ }
+ })
+ .collect::<FuturesUnordered<_>>();
+ cx.background_executor().spawn(async move {
+ if fs
+ .create_dir(context_images_dir().as_ref())
+ .await
+ .log_err()
+ .is_some()
+ {
+ while let Some(_) = images_to_save.next().await {}
+ }
+ })
+ }
+
pub(crate) fn custom_summary(&mut self, custom_summary: String, cx: &mut ModelContext<Self>) {
let timestamp = self.next_timestamp();
let summary = self.summary.get_or_insert(ContextSummary::default());
@@ -2265,6 +2464,9 @@ pub struct SavedMessage {
pub id: MessageId,
pub start: usize,
pub metadata: MessageMetadata,
+ #[serde(default)]
+ // This is defaulted for backwards compatibility with JSON files created before August 2024. We didn't always have this field.
+ pub image_offsets: Vec<(usize, u64)>,
}
#[derive(Serialize, Deserialize)]
@@ -2447,6 +2649,7 @@ impl SavedContextV0_3_0 {
status: metadata.status.clone(),
timestamp,
},
+ image_offsets: Vec::new(),
})
})
.collect(),
@@ -2377,7 +2377,7 @@ impl Codegen {
messages.push(LanguageModelRequestMessage {
role: Role::User,
- content: prompt,
+ content: vec![prompt.into()],
});
Ok(LanguageModelRequest {
@@ -775,7 +775,7 @@ impl PromptLibrary {
LanguageModelRequest {
messages: vec![LanguageModelRequestMessage {
role: Role::System,
- content: body.to_string(),
+ content: vec![body.to_string().into()],
}],
stop: Vec::new(),
temperature: 1.,
@@ -276,7 +276,7 @@ impl TerminalInlineAssistant {
messages.push(LanguageModelRequestMessage {
role: Role::User,
- content: prompt,
+ content: vec![prompt.into()],
});
Ok(LanguageModelRequest {
@@ -280,7 +280,7 @@ impl ChannelView {
};
let link = channel.notes_link(closest_heading.map(|heading| heading.text), cx);
- cx.write_to_clipboard(ClipboardItem::new(link));
+ cx.write_to_clipboard(ClipboardItem::new_string(link));
self.workspace
.update(cx, |workspace, cx| {
struct CopyLinkForPositionToast;
@@ -710,7 +710,7 @@ impl ChatPanel {
active_chat.read(cx).find_loaded_message(message_id)
}) {
let text = message.body.clone();
- cx.write_to_clipboard(ClipboardItem::new(text))
+ cx.write_to_clipboard(ClipboardItem::new_string(text))
}
}),
)
@@ -2042,7 +2042,7 @@ impl CollabPanel {
let Some(channel) = channel_store.channel_for_id(channel_id) else {
return;
};
- let item = ClipboardItem::new(channel.link(cx));
+ let item = ClipboardItem::new_string(channel.link(cx));
cx.write_to_clipboard(item)
}
@@ -2261,7 +2261,7 @@ impl CollabPanel {
.size(ButtonSize::None)
.visible_on_hover("section-header")
.on_click(move |_, cx| {
- let item = ClipboardItem::new(channel_link_copy.clone());
+ let item = ClipboardItem::new_string(channel_link_copy.clone());
cx.write_to_clipboard(item)
})
.tooltip(|cx| Tooltip::text("Copy channel link", cx))
@@ -175,7 +175,8 @@ impl Render for ChannelModal {
.read(cx)
.channel_for_id(channel_id)
{
- let item = ClipboardItem::new(channel.link(cx));
+ let item =
+ ClipboardItem::new_string(channel.link(cx));
cx.write_to_clipboard(item);
}
})),
@@ -55,7 +55,7 @@ impl CopilotCodeVerification {
) -> impl IntoElement {
let copied = cx
.read_from_clipboard()
- .map(|item| item.text() == &data.user_code)
+ .map(|item| item.text().as_ref() == Some(&data.user_code))
.unwrap_or(false);
h_flex()
.w_full()
@@ -68,7 +68,7 @@ impl CopilotCodeVerification {
.on_mouse_down(gpui::MouseButton::Left, {
let user_code = data.user_code.clone();
move |_, cx| {
- cx.write_to_clipboard(ClipboardItem::new(user_code.clone()));
+ cx.write_to_clipboard(ClipboardItem::new_string(user_code.clone()));
cx.refresh();
}
})
@@ -2,8 +2,8 @@ use futures::Future;
use git::blame::BlameEntry;
use git::Oid;
use gpui::{
- Asset, ClipboardItem, Element, ParentElement, Render, ScrollHandle, StatefulInteractiveElement,
- WeakView, WindowContext,
+ AppContext, Asset, ClipboardItem, Element, ParentElement, Render, ScrollHandle,
+ StatefulInteractiveElement, WeakView,
};
use settings::Settings;
use std::hash::Hash;
@@ -35,7 +35,7 @@ impl<'a> CommitAvatar<'a> {
let avatar_url = CommitAvatarAsset::new(remote.clone(), self.sha);
- let element = match cx.use_cached_asset::<CommitAvatarAsset>(&avatar_url) {
+ let element = match cx.use_asset::<CommitAvatarAsset>(&avatar_url) {
// Loading or no avatar found
None | Some(None) => Icon::new(IconName::Person)
.color(Color::Muted)
@@ -73,7 +73,7 @@ impl Asset for CommitAvatarAsset {
fn load(
source: Self::Source,
- cx: &mut WindowContext,
+ cx: &mut AppContext,
) -> impl Future<Output = Self::Output> + Send + 'static {
let client = cx.http_client();
@@ -242,9 +242,9 @@ impl Render for BlameEntryTooltip {
.icon_color(Color::Muted)
.on_click(move |_, cx| {
cx.stop_propagation();
- cx.write_to_clipboard(ClipboardItem::new(
- full_sha.clone(),
- ))
+ cx.write_to_clipboard(
+ ClipboardItem::new_string(full_sha.clone()),
+ )
}),
),
),
@@ -69,13 +69,13 @@ use git::blame::GitBlame;
use git::diff_hunk_to_display;
use gpui::{
div, impl_actions, point, prelude::*, px, relative, size, uniform_list, Action, AnyElement,
- AppContext, AsyncWindowContext, AvailableSpace, BackgroundExecutor, Bounds, ClipboardItem,
- Context, DispatchPhase, ElementId, EntityId, EventEmitter, FocusHandle, FocusOutEvent,
- FocusableView, FontId, FontWeight, HighlightStyle, Hsla, InteractiveText, KeyContext,
- ListSizingBehavior, Model, MouseButton, PaintQuad, ParentElement, Pixels, Render, SharedString,
- Size, StrikethroughStyle, Styled, StyledText, Subscription, Task, TextStyle, UnderlineStyle,
- UniformListScrollHandle, View, ViewContext, ViewInputHandler, VisualContext, WeakFocusHandle,
- WeakView, WindowContext,
+ AppContext, AsyncWindowContext, AvailableSpace, BackgroundExecutor, Bounds, ClipboardEntry,
+ ClipboardItem, Context, DispatchPhase, ElementId, EntityId, EventEmitter, FocusHandle,
+ FocusOutEvent, FocusableView, FontId, FontWeight, HighlightStyle, Hsla, InteractiveText,
+ KeyContext, ListSizingBehavior, Model, MouseButton, PaintQuad, ParentElement, Pixels, Render,
+ SharedString, Size, StrikethroughStyle, Styled, StyledText, Subscription, Task, TextStyle,
+ UnderlineStyle, UniformListScrollHandle, View, ViewContext, ViewInputHandler, VisualContext,
+ WeakFocusHandle, WeakView, WindowContext,
};
use highlight_matching_bracket::refresh_matching_bracket_highlights;
use hover_popover::{hide_hover, HoverState};
@@ -2304,7 +2304,7 @@ impl Editor {
}
if !text.is_empty() {
- cx.write_to_primary(ClipboardItem::new(text));
+ cx.write_to_primary(ClipboardItem::new_string(text));
}
}
@@ -6585,7 +6585,10 @@ impl Editor {
s.select(selections);
});
this.insert("", cx);
- cx.write_to_clipboard(ClipboardItem::new(text).with_metadata(clipboard_selections));
+ cx.write_to_clipboard(ClipboardItem::new_string_with_json_metadata(
+ text,
+ clipboard_selections,
+ ));
});
}
@@ -6624,7 +6627,10 @@ impl Editor {
}
}
- cx.write_to_clipboard(ClipboardItem::new(text).with_metadata(clipboard_selections));
+ cx.write_to_clipboard(ClipboardItem::new_string_with_json_metadata(
+ text,
+ clipboard_selections,
+ ));
}
pub fn do_paste(
@@ -6708,13 +6714,21 @@ impl Editor {
pub fn paste(&mut self, _: &Paste, cx: &mut ViewContext<Self>) {
if let Some(item) = cx.read_from_clipboard() {
- self.do_paste(
- item.text(),
- item.metadata::<Vec<ClipboardSelection>>(),
- true,
- cx,
- )
- };
+ let entries = item.entries();
+
+ match entries.first() {
+ // For now, we only support applying metadata if there's one string. In the future, we can incorporate all the selections
+ // of all the pasted entries.
+ Some(ClipboardEntry::String(clipboard_string)) if entries.len() == 1 => self
+ .do_paste(
+ clipboard_string.text(),
+ clipboard_string.metadata_json::<Vec<ClipboardSelection>>(),
+ true,
+ cx,
+ ),
+ _ => self.do_paste(&item.text().unwrap_or_default(), None, true, cx),
+ }
+ }
}
pub fn undo(&mut self, _: &Undo, cx: &mut ViewContext<Self>) {
@@ -10535,7 +10549,7 @@ impl Editor {
if let Some(buffer) = self.buffer().read(cx).as_singleton() {
if let Some(file) = buffer.read(cx).file().and_then(|f| f.as_local()) {
if let Some(path) = file.abs_path(cx).to_str() {
- cx.write_to_clipboard(ClipboardItem::new(path.to_string()));
+ cx.write_to_clipboard(ClipboardItem::new_string(path.to_string()));
}
}
}
@@ -10545,7 +10559,7 @@ impl Editor {
if let Some(buffer) = self.buffer().read(cx).as_singleton() {
if let Some(file) = buffer.read(cx).file().and_then(|f| f.as_local()) {
if let Some(path) = file.path().to_str() {
- cx.write_to_clipboard(ClipboardItem::new(path.to_string()));
+ cx.write_to_clipboard(ClipboardItem::new_string(path.to_string()));
}
}
}
@@ -10735,7 +10749,7 @@ impl Editor {
match permalink {
Ok(permalink) => {
- cx.write_to_clipboard(ClipboardItem::new(permalink.to_string()));
+ cx.write_to_clipboard(ClipboardItem::new_string(permalink.to_string()));
}
Err(err) => {
let message = format!("Failed to copy permalink: {err}");
@@ -11671,7 +11685,7 @@ impl Editor {
let Some(lines) = serde_json::to_string_pretty(&lines).log_err() else {
return;
};
- cx.write_to_clipboard(ClipboardItem::new(lines));
+ cx.write_to_clipboard(ClipboardItem::new_string(lines));
}
pub fn inlay_hint_cache(&self) -> &InlayHintCache {
@@ -12938,7 +12952,9 @@ pub fn diagnostic_block_renderer(
.visible_on_hover(group_id.clone())
.on_click({
let message = diagnostic.message.clone();
- move |_click, cx| cx.write_to_clipboard(ClipboardItem::new(message.clone()))
+ move |_click, cx| {
+ cx.write_to_clipboard(ClipboardItem::new_string(message.clone()))
+ }
})
.tooltip(|cx| Tooltip::text("Copy diagnostic message", cx)),
)
@@ -3956,8 +3956,9 @@ async fn test_clipboard(cx: &mut gpui::TestAppContext) {
the lazy dog"});
cx.update_editor(|e, cx| e.copy(&Copy, cx));
assert_eq!(
- cx.read_from_clipboard().map(|item| item.text().to_owned()),
- Some("fox jumps over\n".to_owned())
+ cx.read_from_clipboard()
+ .and_then(|item| item.text().as_deref().map(str::to_string)),
+ Some("fox jumps over\n".to_string())
);
// Paste with three selections, noticing how the copied full-line selection is inserted
@@ -642,7 +642,7 @@ impl EditorElement {
}
#[cfg(target_os = "linux")]
- if let Some(item) = cx.read_from_primary() {
+ if let Some(text) = cx.read_from_primary().and_then(|item| item.text()) {
let point_for_position =
position_map.point_for_position(text_hitbox.bounds, event.position);
let position = point_for_position.previous_valid;
@@ -655,7 +655,7 @@ impl EditorElement {
},
cx,
);
- editor.insert(item.text(), cx);
+ editor.insert(&text, cx);
}
cx.stop_propagation()
}
@@ -4290,7 +4290,7 @@ fn deploy_blame_entry_context_menu(
let sha = format!("{}", blame_entry.sha);
menu.on_blur_subscription(Subscription::new(|| {}))
.entry("Copy commit SHA", None, move |cx| {
- cx.write_to_clipboard(ClipboardItem::new(sha.clone()));
+ cx.write_to_clipboard(ClipboardItem::new_string(sha.clone()));
})
.when_some(
details.and_then(|details| details.permalink.clone()),
@@ -44,7 +44,7 @@ pub fn init(cx: &mut AppContext) {
cx.spawn(|_, mut cx| async move {
let specs = specs.await.to_string();
- cx.update(|cx| cx.write_to_clipboard(ClipboardItem::new(specs.clone())))
+ cx.update(|cx| cx.write_to_clipboard(ClipboardItem::new_string(specs.clone())))
.log_err();
cx.prompt(
@@ -65,6 +65,7 @@ serde_json.workspace = true
slotmap = "1.0.6"
smallvec.workspace = true
smol.workspace = true
+strum.workspace = true
sum_tree.workspace = true
taffy = "0.4.3"
thiserror.workspace = true
@@ -11,9 +11,12 @@ use std::{
use anyhow::{anyhow, Result};
use derive_more::{Deref, DerefMut};
-use futures::{channel::oneshot, future::LocalBoxFuture, Future};
+use futures::{
+ channel::oneshot,
+ future::{LocalBoxFuture, Shared},
+ Future, FutureExt,
+};
use slotmap::SlotMap;
-use smol::future::FutureExt;
pub use async_context::*;
use collections::{FxHashMap, FxHashSet, VecDeque};
@@ -25,8 +28,8 @@ pub use test_context::*;
use util::ResultExt;
use crate::{
- current_platform, init_app_menus, Action, ActionRegistry, Any, AnyView, AnyWindowHandle,
- AssetCache, AssetSource, BackgroundExecutor, ClipboardItem, Context, DispatchPhase, DisplayId,
+ current_platform, hash, init_app_menus, Action, ActionRegistry, Any, AnyView, AnyWindowHandle,
+ Asset, AssetSource, BackgroundExecutor, ClipboardItem, Context, DispatchPhase, DisplayId,
Entity, EventEmitter, ForegroundExecutor, Global, KeyBinding, Keymap, Keystroke, LayoutId,
Menu, MenuItem, OwnedMenu, PathPromptOptions, Pixels, Platform, PlatformDisplay, Point,
PromptBuilder, PromptHandle, PromptLevel, Render, RenderablePromptHandle, Reservation,
@@ -220,7 +223,6 @@ pub struct AppContext {
pub(crate) background_executor: BackgroundExecutor,
pub(crate) foreground_executor: ForegroundExecutor,
pub(crate) loading_assets: FxHashMap<(TypeId, u64), Box<dyn Any>>,
- pub(crate) asset_cache: AssetCache,
asset_source: Arc<dyn AssetSource>,
pub(crate) svg_renderer: SvgRenderer,
http_client: Arc<dyn HttpClient>,
@@ -276,7 +278,6 @@ impl AppContext {
background_executor: executor,
foreground_executor,
svg_renderer: SvgRenderer::new(asset_source.clone()),
- asset_cache: AssetCache::new(),
loading_assets: Default::default(),
asset_source,
http_client,
@@ -1267,6 +1268,40 @@ impl AppContext {
) {
self.prompt_builder = Some(PromptBuilder::Custom(Box::new(renderer)))
}
+
+ /// Remove an asset from GPUI's cache
+ pub fn remove_cached_asset<A: Asset + 'static>(&mut self, source: &A::Source) {
+ let asset_id = (TypeId::of::<A>(), hash(source));
+ self.loading_assets.remove(&asset_id);
+ }
+
+ /// Asynchronously load an asset, if the asset hasn't finished loading this will return None.
+ ///
+ /// Note that the multiple calls to this method will only result in one `Asset::load` call at a
+ /// time, and the results of this call will be cached
+ ///
+ /// This asset will not be cached by default, see [Self::use_cached_asset]
+ pub fn fetch_asset<A: Asset + 'static>(
+ &mut self,
+ source: &A::Source,
+ ) -> (Shared<Task<A::Output>>, bool) {
+ let asset_id = (TypeId::of::<A>(), hash(source));
+ let mut is_first = false;
+ let task = self
+ .loading_assets
+ .remove(&asset_id)
+ .map(|boxed_task| *boxed_task.downcast::<Shared<Task<A::Output>>>().unwrap())
+ .unwrap_or_else(|| {
+ is_first = true;
+ let future = A::load(source.clone(), self);
+ let task = self.background_executor().spawn(future).shared();
+ task
+ });
+
+ self.loading_assets.insert(asset_id, Box::new(task.clone()));
+
+ (task, is_first)
+ }
}
impl Context for AppContext {
@@ -1,17 +1,14 @@
-use crate::{SharedString, SharedUri, WindowContext};
-use collections::FxHashMap;
+use crate::{AppContext, SharedString, SharedUri};
use futures::Future;
-use parking_lot::Mutex;
-use std::any::TypeId;
use std::hash::{Hash, Hasher};
+use std::path::PathBuf;
use std::sync::Arc;
-use std::{any::Any, path::PathBuf};
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
pub(crate) enum UriOrPath {
Uri(SharedUri),
Path(Arc<PathBuf>),
- Asset(SharedString),
+ Embedded(SharedString),
}
impl From<SharedUri> for UriOrPath {
@@ -37,7 +34,7 @@ pub trait Asset {
/// Load the asset asynchronously
fn load(
source: Self::Source,
- cx: &mut WindowContext,
+ cx: &mut AppContext,
) -> impl Future<Output = Self::Output> + Send + 'static;
}
@@ -47,42 +44,3 @@ pub fn hash<T: Hash>(data: &T) -> u64 {
data.hash(&mut hasher);
hasher.finish()
}
-
-/// A cache for assets.
-#[derive(Clone)]
-pub struct AssetCache {
- assets: Arc<Mutex<FxHashMap<(TypeId, u64), Box<dyn Any + Send>>>>,
-}
-
-impl AssetCache {
- pub(crate) fn new() -> Self {
- Self {
- assets: Default::default(),
- }
- }
-
- /// Get the asset from the cache, if it exists.
- pub fn get<A: Asset + 'static>(&self, source: &A::Source) -> Option<A::Output> {
- self.assets
- .lock()
- .get(&(TypeId::of::<A>(), hash(&source)))
- .and_then(|task| task.downcast_ref::<A::Output>())
- .cloned()
- }
-
- /// Insert the asset into the cache.
- pub fn insert<A: Asset + 'static>(&mut self, source: A::Source, output: A::Output) {
- self.assets
- .lock()
- .insert((TypeId::of::<A>(), hash(&source)), Box::new(output));
- }
-
- /// Remove an entry from the asset cache
- pub fn remove<A: Asset + 'static>(&mut self, source: &A::Source) -> Option<A::Output> {
- self.assets
- .lock()
- .remove(&(TypeId::of::<A>(), hash(&source)))
- .and_then(|any| any.downcast::<A::Output>().ok())
- .map(|boxed| *boxed)
- }
-}
@@ -38,14 +38,22 @@ pub(crate) struct RenderImageParams {
pub(crate) frame_index: usize,
}
-/// A cached and processed image.
-pub struct ImageData {
+/// A cached and processed image, in BGRA format
+pub struct RenderImage {
/// The ID associated with this image
pub id: ImageId,
data: SmallVec<[Frame; 1]>,
}
-impl ImageData {
+impl PartialEq for RenderImage {
+ fn eq(&self, other: &Self) -> bool {
+ self.id == other.id
+ }
+}
+
+impl Eq for RenderImage {}
+
+impl RenderImage {
/// Create a new image from the given data.
pub fn new(data: impl Into<SmallVec<[Frame; 1]>>) -> Self {
static NEXT_ID: AtomicUsize = AtomicUsize::new(0);
@@ -57,8 +65,10 @@ impl ImageData {
}
/// Convert this image into a byte slice.
- pub fn as_bytes(&self, frame_index: usize) -> &[u8] {
- &self.data[frame_index].buffer()
+ pub fn as_bytes(&self, frame_index: usize) -> Option<&[u8]> {
+ self.data
+ .get(frame_index)
+ .map(|frame| frame.buffer().as_raw().as_slice())
}
/// Get the size of this image, in pixels.
@@ -78,7 +88,7 @@ impl ImageData {
}
}
-impl fmt::Debug for ImageData {
+impl fmt::Debug for RenderImage {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("ImageData")
.field("id", &self.id)
@@ -1,16 +1,14 @@
use crate::{
- point, px, size, AbsoluteLength, Asset, Bounds, DefiniteLength, DevicePixels, Element,
- ElementId, GlobalElementId, Hitbox, ImageData, InteractiveElement, Interactivity, IntoElement,
- LayoutId, Length, Pixels, SharedString, SharedUri, Size, StyleRefinement, Styled, SvgSize,
- UriOrPath, WindowContext,
+ px, AbsoluteLength, AppContext, Asset, Bounds, DefiniteLength, Element, ElementId,
+ GlobalElementId, Hitbox, Image, InteractiveElement, Interactivity, IntoElement, LayoutId,
+ Length, ObjectFit, Pixels, RenderImage, SharedString, SharedUri, Size, StyleRefinement, Styled,
+ SvgSize, UriOrPath, WindowContext,
};
use futures::{AsyncReadExt, Future};
use http_client;
use image::{
codecs::gif::GifDecoder, AnimationDecoder, Frame, ImageBuffer, ImageError, ImageFormat,
};
-#[cfg(target_os = "macos")]
-use media::core_video::CVImageBuffer;
use smallvec::SmallVec;
use std::{
fs,
@@ -23,20 +21,18 @@ use thiserror::Error;
use util::ResultExt;
/// A source of image content.
-#[derive(Clone, Debug)]
+#[derive(Clone, Debug, PartialEq, Eq)]
pub enum ImageSource {
/// Image content will be loaded from provided URI at render time.
Uri(SharedUri),
/// Image content will be loaded from the provided file at render time.
File(Arc<PathBuf>),
/// Cached image data
- Data(Arc<ImageData>),
+ Render(Arc<RenderImage>),
+ /// Cached image data
+ Image(Arc<Image>),
/// Image content will be loaded from Asset at render time.
- Asset(SharedString),
- // TODO: move surface definitions into mac platform module
- /// A CoreVideo image buffer
- #[cfg(target_os = "macos")]
- Surface(CVImageBuffer),
+ Embedded(SharedString),
}
fn is_uri(uri: &str) -> bool {
@@ -54,7 +50,7 @@ impl From<&'static str> for ImageSource {
if is_uri(&s) {
Self::Uri(s.into())
} else {
- Self::Asset(s.into())
+ Self::Embedded(s.into())
}
}
}
@@ -64,7 +60,7 @@ impl From<String> for ImageSource {
if is_uri(&s) {
Self::Uri(s.into())
} else {
- Self::Asset(s.into())
+ Self::Embedded(s.into())
}
}
}
@@ -74,7 +70,7 @@ impl From<SharedString> for ImageSource {
if is_uri(&s) {
Self::Uri(s.into())
} else {
- Self::Asset(s)
+ Self::Embedded(s)
}
}
}
@@ -91,16 +87,9 @@ impl From<PathBuf> for ImageSource {
}
}
-impl From<Arc<ImageData>> for ImageSource {
- fn from(value: Arc<ImageData>) -> Self {
- Self::Data(value)
- }
-}
-
-#[cfg(target_os = "macos")]
-impl From<CVImageBuffer> for ImageSource {
- fn from(value: CVImageBuffer) -> Self {
- Self::Surface(value)
+impl From<Arc<RenderImage>> for ImageSource {
+ fn from(value: Arc<RenderImage>) -> Self {
+ Self::Render(value)
}
}
@@ -122,121 +111,6 @@ pub fn img(source: impl Into<ImageSource>) -> Img {
}
}
-/// How to fit the image into the bounds of the element.
-pub enum ObjectFit {
- /// The image will be stretched to fill the bounds of the element.
- Fill,
- /// The image will be scaled to fit within the bounds of the element.
- Contain,
- /// The image will be scaled to cover the bounds of the element.
- Cover,
- /// The image will be scaled down to fit within the bounds of the element.
- ScaleDown,
- /// The image will maintain its original size.
- None,
-}
-
-impl ObjectFit {
- /// Get the bounds of the image within the given bounds.
- pub fn get_bounds(
- &self,
- bounds: Bounds<Pixels>,
- image_size: Size<DevicePixels>,
- ) -> Bounds<Pixels> {
- let image_size = image_size.map(|dimension| Pixels::from(u32::from(dimension)));
- let image_ratio = image_size.width / image_size.height;
- let bounds_ratio = bounds.size.width / bounds.size.height;
-
- let result_bounds = match self {
- ObjectFit::Fill => bounds,
- ObjectFit::Contain => {
- let new_size = if bounds_ratio > image_ratio {
- size(
- image_size.width * (bounds.size.height / image_size.height),
- bounds.size.height,
- )
- } else {
- size(
- bounds.size.width,
- image_size.height * (bounds.size.width / image_size.width),
- )
- };
-
- Bounds {
- origin: point(
- bounds.origin.x + (bounds.size.width - new_size.width) / 2.0,
- bounds.origin.y + (bounds.size.height - new_size.height) / 2.0,
- ),
- size: new_size,
- }
- }
- ObjectFit::ScaleDown => {
- // Check if the image is larger than the bounds in either dimension.
- if image_size.width > bounds.size.width || image_size.height > bounds.size.height {
- // If the image is larger, use the same logic as Contain to scale it down.
- let new_size = if bounds_ratio > image_ratio {
- size(
- image_size.width * (bounds.size.height / image_size.height),
- bounds.size.height,
- )
- } else {
- size(
- bounds.size.width,
- image_size.height * (bounds.size.width / image_size.width),
- )
- };
-
- Bounds {
- origin: point(
- bounds.origin.x + (bounds.size.width - new_size.width) / 2.0,
- bounds.origin.y + (bounds.size.height - new_size.height) / 2.0,
- ),
- size: new_size,
- }
- } else {
- // If the image is smaller than or equal to the container, display it at its original size,
- // centered within the container.
- let original_size = size(image_size.width, image_size.height);
- Bounds {
- origin: point(
- bounds.origin.x + (bounds.size.width - original_size.width) / 2.0,
- bounds.origin.y + (bounds.size.height - original_size.height) / 2.0,
- ),
- size: original_size,
- }
- }
- }
- ObjectFit::Cover => {
- let new_size = if bounds_ratio > image_ratio {
- size(
- bounds.size.width,
- image_size.height * (bounds.size.width / image_size.width),
- )
- } else {
- size(
- image_size.width * (bounds.size.height / image_size.height),
- bounds.size.height,
- )
- };
-
- Bounds {
- origin: point(
- bounds.origin.x + (bounds.size.width - new_size.width) / 2.0,
- bounds.origin.y + (bounds.size.height - new_size.height) / 2.0,
- ),
- size: new_size,
- }
- }
- ObjectFit::None => Bounds {
- origin: bounds.origin,
- size: image_size,
- },
- };
-
- result_bounds
- }
-}
-
impl Img {
/// A list of all format extensions currently supported by this img element
pub fn extensions() -> &'static [&'static str] {
@@ -291,7 +165,7 @@ impl Element for Img {
let layout_id = self
.interactivity
.request_layout(global_id, cx, |mut style, cx| {
- if let Some(data) = self.source.data(cx) {
+ if let Some(data) = self.source.use_data(cx) {
if let Some(state) = &mut state {
let frame_count = data.frame_count();
if frame_count > 1 {
@@ -363,7 +237,7 @@ impl Element for Img {
.paint(global_id, bounds, hitbox.as_ref(), cx, |style, cx| {
let corner_radii = style.corner_radii.to_pixels(bounds.size, cx.rem_size());
- if let Some(data) = source.data(cx) {
+ if let Some(data) = source.use_data(cx) {
let new_bounds = self.object_fit.get_bounds(bounds, data.size(*frame_index));
cx.paint_image(
new_bounds,
@@ -374,17 +248,6 @@ impl Element for Img {
)
.log_err();
}
-
- match source {
- #[cfg(target_os = "macos")]
- ImageSource::Surface(surface) => {
- let size = size(surface.width().into(), surface.height().into());
- let new_bounds = self.object_fit.get_bounds(bounds, size);
- // TODO: Add support for corner_radii and grayscale.
- cx.paint_surface(new_bounds, surface);
- }
- _ => {}
- }
})
}
}
@@ -410,39 +273,74 @@ impl InteractiveElement for Img {
}
impl ImageSource {
- fn data(&self, cx: &mut WindowContext) -> Option<Arc<ImageData>> {
+ pub(crate) fn use_data(&self, cx: &mut WindowContext) -> Option<Arc<RenderImage>> {
match self {
- ImageSource::Uri(_) | ImageSource::Asset(_) | ImageSource::File(_) => {
+ ImageSource::Uri(_) | ImageSource::Embedded(_) | ImageSource::File(_) => {
let uri_or_path: UriOrPath = match self {
ImageSource::Uri(uri) => uri.clone().into(),
ImageSource::File(path) => path.clone().into(),
- ImageSource::Asset(path) => UriOrPath::Asset(path.clone()),
+ ImageSource::Embedded(path) => UriOrPath::Embedded(path.clone()),
_ => unreachable!(),
};
- cx.use_cached_asset::<Image>(&uri_or_path)?.log_err()
+ cx.use_asset::<ImageAsset>(&uri_or_path)?.log_err()
}
- ImageSource::Data(data) => Some(data.to_owned()),
- #[cfg(target_os = "macos")]
- ImageSource::Surface(_) => None,
+ ImageSource::Render(data) => Some(data.to_owned()),
+ ImageSource::Image(data) => cx.use_asset::<ImageDecoder>(data)?.log_err(),
+ }
+ }
+
+ /// Fetch the data associated with this source, using GPUI's asset caching
+ pub async fn data(&self, cx: &mut AppContext) -> Option<Arc<RenderImage>> {
+ match self {
+ ImageSource::Uri(_) | ImageSource::Embedded(_) | ImageSource::File(_) => {
+ let uri_or_path: UriOrPath = match self {
+ ImageSource::Uri(uri) => uri.clone().into(),
+ ImageSource::File(path) => path.clone().into(),
+ ImageSource::Embedded(path) => UriOrPath::Embedded(path.clone()),
+ _ => unreachable!(),
+ };
+
+ cx.fetch_asset::<ImageAsset>(&uri_or_path).0.await.log_err()
+ }
+
+ ImageSource::Render(data) => Some(data.to_owned()),
+ ImageSource::Image(data) => cx.fetch_asset::<ImageDecoder>(data).0.await.log_err(),
}
}
}
#[derive(Clone)]
-enum Image {}
+enum ImageDecoder {}
+
+impl Asset for ImageDecoder {
+ type Source = Arc<Image>;
+ type Output = Result<Arc<RenderImage>, Arc<anyhow::Error>>;
-impl Asset for Image {
+ fn load(
+ source: Self::Source,
+ cx: &mut AppContext,
+ ) -> impl Future<Output = Self::Output> + Send + 'static {
+ let result = source.to_image_data(cx).map_err(Arc::new);
+ async { result }
+ }
+}
+
+#[derive(Clone)]
+enum ImageAsset {}
+
+impl Asset for ImageAsset {
type Source = UriOrPath;
- type Output = Result<Arc<ImageData>, ImageCacheError>;
+ type Output = Result<Arc<RenderImage>, ImageCacheError>;
fn load(
source: Self::Source,
- cx: &mut WindowContext,
+ cx: &mut AppContext,
) -> impl Future<Output = Self::Output> + Send + 'static {
let client = cx.http_client();
- let scale_factor = cx.scale_factor();
+ // TODO: Can we make SVGs always rescale?
+ // let scale_factor = cx.scale_factor();
let svg_renderer = cx.svg_renderer();
let asset_source = cx.asset_source().clone();
async move {
@@ -461,7 +359,7 @@ impl Asset for Image {
}
body
}
- UriOrPath::Asset(path) => {
+ UriOrPath::Embedded(path) => {
let data = asset_source.load(&path).ok().flatten();
if let Some(data) = data {
data.to_vec()
@@ -503,15 +401,16 @@ impl Asset for Image {
}
};
- ImageData::new(data)
+ RenderImage::new(data)
} else {
let pixmap =
- svg_renderer.render_pixmap(&bytes, SvgSize::ScaleFactor(scale_factor))?;
+ // TODO: Can we make svgs always rescale?
+ svg_renderer.render_pixmap(&bytes, SvgSize::ScaleFactor(1.0))?;
let buffer =
ImageBuffer::from_raw(pixmap.width(), pixmap.height(), pixmap.take()).unwrap();
- ImageData::new(SmallVec::from_elem(Frame::new(buffer), 1))
+ RenderImage::new(SmallVec::from_elem(Frame::new(buffer), 1))
};
Ok(Arc::new(data))
@@ -5,6 +5,7 @@ mod deferred;
mod div;
mod img;
mod list;
+mod surface;
mod svg;
mod text;
mod uniform_list;
@@ -16,6 +17,7 @@ pub use deferred::*;
pub use div::*;
pub use img::*;
pub use list::*;
+pub use surface::*;
pub use svg::*;
pub use text::*;
pub use uniform_list::*;
@@ -0,0 +1,111 @@
+use crate::{
+ Bounds, Element, ElementId, GlobalElementId, IntoElement, LayoutId, ObjectFit, Pixels, Style,
+ StyleRefinement, Styled, WindowContext,
+};
+#[cfg(target_os = "macos")]
+use media::core_video::CVImageBuffer;
+use refineable::Refineable;
+
+/// A source of a surface's content.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum SurfaceSource {
+ /// A macOS image buffer from CoreVideo
+ #[cfg(target_os = "macos")]
+ Surface(CVImageBuffer),
+}
+
+#[cfg(target_os = "macos")]
+impl From<CVImageBuffer> for SurfaceSource {
+ fn from(value: CVImageBuffer) -> Self {
+ SurfaceSource::Surface(value)
+ }
+}
+
+/// A surface element.
+pub struct Surface {
+ source: SurfaceSource,
+ object_fit: ObjectFit,
+ style: StyleRefinement,
+}
+
+/// Create a new surface element.
+pub fn surface(source: impl Into<SurfaceSource>) -> Surface {
+ Surface {
+ source: source.into(),
+ object_fit: ObjectFit::Contain,
+ style: Default::default(),
+ }
+}
+
+impl Surface {
+ /// Set the object fit for the image.
+ pub fn object_fit(mut self, object_fit: ObjectFit) -> Self {
+ self.object_fit = object_fit;
+ self
+ }
+}
+
+impl Element for Surface {
+ type RequestLayoutState = ();
+ type PrepaintState = ();
+
+ fn id(&self) -> Option<ElementId> {
+ None
+ }
+
+ fn request_layout(
+ &mut self,
+ _global_id: Option<&GlobalElementId>,
+ cx: &mut WindowContext,
+ ) -> (LayoutId, Self::RequestLayoutState) {
+ let mut style = Style::default();
+ style.refine(&self.style);
+ let layout_id = cx.request_layout(style, []);
+ (layout_id, ())
+ }
+
+ fn prepaint(
+ &mut self,
+ _global_id: Option<&GlobalElementId>,
+ _bounds: Bounds<Pixels>,
+ _request_layout: &mut Self::RequestLayoutState,
+ _cx: &mut WindowContext,
+ ) -> Self::PrepaintState {
+ ()
+ }
+
+ fn paint(
+ &mut self,
+ _global_id: Option<&GlobalElementId>,
+ #[cfg_attr(not(target_os = "macos"), allow(unused_variables))] bounds: Bounds<Pixels>,
+ _: &mut Self::RequestLayoutState,
+ _: &mut Self::PrepaintState,
+ #[cfg_attr(not(target_os = "macos"), allow(unused_variables))] cx: &mut WindowContext,
+ ) {
+ match &self.source {
+ #[cfg(target_os = "macos")]
+ SurfaceSource::Surface(surface) => {
+ let size = crate::size(surface.width().into(), surface.height().into());
+ let new_bounds = self.object_fit.get_bounds(bounds, size);
+ // TODO: Add support for corner_radii
+ cx.paint_surface(new_bounds, surface.clone());
+ }
+ #[allow(unreachable_patterns)]
+ _ => {}
+ }
+ }
+}
+
+impl IntoElement for Surface {
+ type Element = Self;
+
+ fn into_element(self) -> Self::Element {
+ self
+ }
+}
+
+impl Styled for Surface {
+ fn style(&mut self) -> &mut StyleRefinement {
+ &mut self.style
+ }
+}
@@ -2447,10 +2447,24 @@ impl From<usize> for Pixels {
/// affected by the device's scale factor, `DevicePixels` always correspond to real pixels on the
/// display.
#[derive(
- Add, AddAssign, Clone, Copy, Default, Div, Eq, Hash, Ord, PartialEq, PartialOrd, Sub, SubAssign,
+ Add,
+ AddAssign,
+ Clone,
+ Copy,
+ Default,
+ Div,
+ Eq,
+ Hash,
+ Ord,
+ PartialEq,
+ PartialOrd,
+ Sub,
+ SubAssign,
+ Serialize,
+ Deserialize,
)]
#[repr(transparent)]
-pub struct DevicePixels(pub(crate) i32);
+pub struct DevicePixels(pub i32);
impl DevicePixels {
/// Converts the `DevicePixels` value to the number of bytes needed to represent it in memory.
@@ -20,21 +20,25 @@ mod test;
mod windows;
use crate::{
- point, Action, AnyWindowHandle, AsyncWindowContext, BackgroundExecutor, Bounds, DevicePixels,
- DispatchEventResult, Font, FontId, FontMetrics, FontRun, ForegroundExecutor, GPUSpecs, GlyphId,
- Keymap, LineLayout, Pixels, PlatformInput, Point, RenderGlyphParams, RenderImageParams,
- RenderSvgParams, Scene, SharedString, Size, Task, TaskLabel, WindowContext,
- DEFAULT_WINDOW_SIZE,
+ point, Action, AnyWindowHandle, AppContext, AsyncWindowContext, BackgroundExecutor, Bounds,
+ DevicePixels, DispatchEventResult, Font, FontId, FontMetrics, FontRun, ForegroundExecutor,
+ GPUSpecs, GlyphId, ImageSource, Keymap, LineLayout, Pixels, PlatformInput, Point,
+ RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams, Scene, SharedString, Size,
+ SvgSize, Task, TaskLabel, WindowContext, DEFAULT_WINDOW_SIZE,
};
use anyhow::Result;
use async_task::Runnable;
use futures::channel::oneshot;
+use image::codecs::gif::GifDecoder;
+use image::{AnimationDecoder as _, Frame};
use parking::Unparker;
use raw_window_handle::{HasDisplayHandle, HasWindowHandle};
use seahash::SeaHasher;
use serde::{Deserialize, Serialize};
+use smallvec::SmallVec;
use std::borrow::Cow;
use std::hash::{Hash, Hasher};
+use std::io::Cursor;
use std::time::{Duration, Instant};
use std::{
fmt::{self, Debug},
@@ -43,6 +47,7 @@ use std::{
rc::Rc,
sync::Arc,
};
+use strum::EnumIter;
use uuid::Uuid;
pub use app_menu::*;
@@ -969,12 +974,210 @@ impl Default for CursorStyle {
/// A clipboard item that should be copied to the clipboard
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct ClipboardItem {
+ entries: Vec<ClipboardEntry>,
+}
+
+/// Either a ClipboardString or a ClipboardImage
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub enum ClipboardEntry {
+ /// A string entry
+ String(ClipboardString),
+ /// An image entry
+ Image(Image),
+}
+
+impl ClipboardItem {
+ /// Create a new ClipboardItem::String with no associated metadata
+ pub fn new_string(text: String) -> Self {
+ Self {
+ entries: vec![ClipboardEntry::String(ClipboardString::new(text))],
+ }
+ }
+
+ /// Create a new ClipboardItem::String with the given text and associated metadata
+ pub fn new_string_with_metadata(text: String, metadata: String) -> Self {
+ Self {
+ entries: vec![ClipboardEntry::String(ClipboardString {
+ text,
+ metadata: Some(metadata),
+ })],
+ }
+ }
+
+ /// Create a new ClipboardItem::String with the given text and associated metadata
+ pub fn new_string_with_json_metadata<T: Serialize>(text: String, metadata: T) -> Self {
+ Self {
+ entries: vec![ClipboardEntry::String(
+ ClipboardString::new(text).with_json_metadata(metadata),
+ )],
+ }
+ }
+
+ /// Concatenates together all the ClipboardString entries in the item.
+ /// Returns None if there were no ClipboardString entries.
+ pub fn text(&self) -> Option<String> {
+ let mut answer = String::new();
+ let mut any_entries = false;
+
+ for entry in self.entries.iter() {
+ if let ClipboardEntry::String(ClipboardString { text, metadata: _ }) = entry {
+ answer.push_str(text);
+ any_entries = true;
+ }
+ }
+
+ if any_entries {
+ Some(answer)
+ } else {
+ None
+ }
+ }
+
+ /// If this item is one ClipboardEntry::String, returns its metadata.
+ #[cfg_attr(not(target_os = "windows"), allow(dead_code))]
+ pub fn metadata(&self) -> Option<&String> {
+ match self.entries().first() {
+ Some(ClipboardEntry::String(clipboard_string)) if self.entries.len() == 1 => {
+ clipboard_string.metadata.as_ref()
+ }
+ _ => None,
+ }
+ }
+
+ /// Get the item's entries
+ pub fn entries(&self) -> &[ClipboardEntry] {
+ &self.entries
+ }
+
+ /// Get owned versions of the item's entries
+ pub fn into_entries(self) -> impl Iterator<Item = ClipboardEntry> {
+ self.entries.into_iter()
+ }
+}
+
+/// One of the editor's supported image formats (e.g. PNG, JPEG) - used when dealing with images in the clipboard
+#[derive(Clone, Copy, Debug, Eq, PartialEq, EnumIter, Hash)]
+pub enum ImageFormat {
+ // Sorted from most to least likely to be pasted into an editor,
+ // which matters when we iterate through them trying to see if
+ // clipboard content matches them.
+ /// .png
+ Png,
+ /// .jpeg or .jpg
+ Jpeg,
+ /// .webp
+ Webp,
+ /// .gif
+ Gif,
+ /// .svg
+ Svg,
+ /// .bmp
+ Bmp,
+ /// .tif or .tiff
+ Tiff,
+}
+
+/// An image, with a format and certain bytes
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct Image {
+ /// The image format the bytes represent (e.g. PNG)
+ format: ImageFormat,
+ /// The raw image bytes
+ bytes: Vec<u8>,
+ id: u64,
+}
+
+impl Hash for Image {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ state.write_u64(self.id);
+ }
+}
+
+impl Image {
+ /// Get this image's ID
+ pub fn id(&self) -> u64 {
+ self.id
+ }
+
+ /// Use the GPUI `use_asset` API to make this image renderable
+ pub fn use_render_image(self: Arc<Self>, cx: &mut WindowContext) -> Option<Arc<RenderImage>> {
+ ImageSource::Image(self).use_data(cx)
+ }
+
+ /// Convert the clipboard image to an `ImageData` object.
+ pub fn to_image_data(&self, cx: &AppContext) -> Result<Arc<RenderImage>> {
+ fn frames_for_image(
+ bytes: &[u8],
+ format: image::ImageFormat,
+ ) -> Result<SmallVec<[Frame; 1]>> {
+ let mut data = image::load_from_memory_with_format(bytes, format)?.into_rgba8();
+
+ // Convert from RGBA to BGRA.
+ for pixel in data.chunks_exact_mut(4) {
+ pixel.swap(0, 2);
+ }
+
+ Ok(SmallVec::from_elem(Frame::new(data), 1))
+ }
+
+ let frames = match self.format {
+ ImageFormat::Gif => {
+ let decoder = GifDecoder::new(Cursor::new(&self.bytes))?;
+ let mut frames = SmallVec::new();
+
+ for frame in decoder.into_frames() {
+ let mut frame = frame?;
+ // Convert from RGBA to BGRA.
+ for pixel in frame.buffer_mut().chunks_exact_mut(4) {
+ pixel.swap(0, 2);
+ }
+ frames.push(frame);
+ }
+
+ frames
+ }
+ ImageFormat::Png => frames_for_image(&self.bytes, image::ImageFormat::Png)?,
+ ImageFormat::Jpeg => frames_for_image(&self.bytes, image::ImageFormat::Jpeg)?,
+ ImageFormat::Webp => frames_for_image(&self.bytes, image::ImageFormat::WebP)?,
+ ImageFormat::Bmp => frames_for_image(&self.bytes, image::ImageFormat::Bmp)?,
+ ImageFormat::Tiff => frames_for_image(&self.bytes, image::ImageFormat::Tiff)?,
+ ImageFormat::Svg => {
+ // TODO: Fix this
+ let pixmap = cx
+ .svg_renderer()
+ .render_pixmap(&self.bytes, SvgSize::ScaleFactor(1.0))?;
+
+ let buffer =
+ image::ImageBuffer::from_raw(pixmap.width(), pixmap.height(), pixmap.take())
+ .unwrap();
+
+ SmallVec::from_elem(Frame::new(buffer), 1)
+ }
+ };
+
+ Ok(Arc::new(RenderImage::new(frames)))
+ }
+
+ /// Get the format of the clipboard image
+ pub fn format(&self) -> ImageFormat {
+ self.format
+ }
+
+ /// Get the raw bytes of the clipboard image
+ pub fn bytes(&self) -> &[u8] {
+ self.bytes.as_slice()
+ }
+}
+
+/// A clipboard item that should be copied to the clipboard
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct ClipboardString {
pub(crate) text: String,
pub(crate) metadata: Option<String>,
}
-impl ClipboardItem {
- /// Create a new clipboard item with the given text
+impl ClipboardString {
+ /// Create a new clipboard string with the given text
pub fn new(text: String) -> Self {
Self {
text,
@@ -982,19 +1185,25 @@ impl ClipboardItem {
}
}
- /// Create a new clipboard item with the given text and metadata
- pub fn with_metadata<T: Serialize>(mut self, metadata: T) -> Self {
+ /// Return a new clipboard item with the metadata replaced by the given metadata,
+ /// after serializing it as JSON.
+ pub fn with_json_metadata<T: Serialize>(mut self, metadata: T) -> Self {
self.metadata = Some(serde_json::to_string(&metadata).unwrap());
self
}
- /// Get the text of the clipboard item
+ /// Get the text of the clipboard string
pub fn text(&self) -> &String {
&self.text
}
- /// Get the metadata of the clipboard item
- pub fn metadata<T>(&self) -> Option<T>
+ /// Get the owned text of the clipboard string
+ pub fn into_text(self) -> String {
+ self.text
+ }
+
+ /// Get the metadata of the clipboard string, formatted as JSON
+ pub fn metadata_json<T>(&self) -> Option<T>
where
T: for<'a> Deserialize<'a>,
{
@@ -112,14 +112,18 @@ impl Clipboard {
}
pub fn send(&self, _mime_type: String, fd: OwnedFd) {
- if let Some(contents) = &self.contents {
- self.send_internal(fd, contents.text.as_bytes().to_owned());
+ if let Some(text) = self.contents.as_ref().and_then(|contents| contents.text()) {
+ self.send_internal(fd, text.as_bytes().to_owned());
}
}
pub fn send_primary(&self, _mime_type: String, fd: OwnedFd) {
- if let Some(primary_contents) = &self.primary_contents {
- self.send_internal(fd, primary_contents.text.as_bytes().to_owned());
+ if let Some(text) = self
+ .primary_contents
+ .as_ref()
+ .and_then(|contents| contents.text())
+ {
+ self.send_internal(fd, text.as_bytes().to_owned());
}
}
@@ -145,7 +149,7 @@ impl Clipboard {
match unsafe { read_fd(fd) } {
Ok(v) => {
- self.cached_read = Some(ClipboardItem::new(v));
+ self.cached_read = Some(ClipboardItem::new_string(v));
self.cached_read.clone()
}
Err(err) => {
@@ -177,7 +181,7 @@ impl Clipboard {
match unsafe { read_fd(fd) } {
Ok(v) => {
- self.cached_primary_read = Some(ClipboardItem::new(v.clone()));
+ self.cached_primary_read = Some(ClipboardItem::new_string(v.clone()));
self.cached_primary_read.clone()
}
Err(err) => {
@@ -1259,7 +1259,7 @@ impl LinuxClient for X11Client {
.store(
state.clipboard.setter.atoms.primary,
state.clipboard.setter.atoms.utf8_string,
- item.text().as_bytes(),
+ item.text().unwrap_or_default().as_bytes(),
)
.ok();
}
@@ -1271,7 +1271,7 @@ impl LinuxClient for X11Client {
.store(
state.clipboard.setter.atoms.clipboard,
state.clipboard.setter.atoms.utf8_string,
- item.text().as_bytes(),
+ item.text().unwrap_or_default().as_bytes(),
)
.ok();
state.clipboard_item.replace(item);
@@ -1287,10 +1287,7 @@ impl LinuxClient for X11Client {
state.clipboard.getter.atoms.property,
Duration::from_secs(3),
)
- .map(|text| crate::ClipboardItem {
- text: String::from_utf8(text).unwrap(),
- metadata: None,
- })
+ .map(|text| crate::ClipboardItem::new_string(String::from_utf8(text).unwrap()))
.ok()
}
@@ -1318,10 +1315,7 @@ impl LinuxClient for X11Client {
state.clipboard.getter.atoms.property,
Duration::from_secs(3),
)
- .map(|text| crate::ClipboardItem {
- text: String::from_utf8(text).unwrap(),
- metadata: None,
- })
+ .map(|text| crate::ClipboardItem::new_string(String::from_utf8(text).unwrap()))
.ok()
}
@@ -16,6 +16,7 @@ use metal_renderer as renderer;
#[cfg(feature = "macos-blade")]
use crate::platform::blade as renderer;
+mod attributed_string;
mod open_type;
mod platform;
mod text_system;
@@ -0,0 +1,122 @@
+use cocoa::base::id;
+use cocoa::foundation::NSRange;
+use objc::{class, msg_send, sel, sel_impl};
+
+/// The `cocoa` crate does not define NSAttributedString (and related Cocoa classes),
+/// which are needed for copying rich text (that is, text intermingled with images)
+/// to the clipboard. This adds access to those APIs.
+
+#[allow(non_snake_case)]
+pub trait NSAttributedString: Sized {
+ unsafe fn alloc(_: Self) -> id {
+ msg_send![class!(NSAttributedString), alloc]
+ }
+
+ unsafe fn init_attributed_string(self, string: id) -> id;
+ unsafe fn appendAttributedString_(self, attr_string: id);
+ unsafe fn RTFDFromRange_documentAttributes_(self, range: NSRange, attrs: id) -> id;
+ unsafe fn RTFFromRange_documentAttributes_(self, range: NSRange, attrs: id) -> id;
+ unsafe fn string(self) -> id;
+}
+
+impl NSAttributedString for id {
+ unsafe fn init_attributed_string(self, string: id) -> id {
+ msg_send![self, initWithString: string]
+ }
+
+ unsafe fn appendAttributedString_(self, attr_string: id) {
+ let _: () = msg_send![self, appendAttributedString: attr_string];
+ }
+
+ unsafe fn RTFDFromRange_documentAttributes_(self, range: NSRange, attrs: id) -> id {
+ msg_send![self, RTFDFromRange: range documentAttributes: attrs]
+ }
+
+ unsafe fn RTFFromRange_documentAttributes_(self, range: NSRange, attrs: id) -> id {
+ msg_send![self, RTFFromRange: range documentAttributes: attrs]
+ }
+
+ unsafe fn string(self) -> id {
+ msg_send![self, string]
+ }
+}
+
+pub trait NSMutableAttributedString: NSAttributedString {
+ unsafe fn alloc(_: Self) -> id {
+ msg_send![class!(NSMutableAttributedString), alloc]
+ }
+}
+
+impl NSMutableAttributedString for id {}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use cocoa::appkit::NSImage;
+ use cocoa::base::nil;
+ use cocoa::foundation::NSString;
+ #[test]
+ #[ignore] // This was SIGSEGV-ing on CI but not locally; need to investigate https://github.com/zed-industries/zed/actions/runs/10362363230/job/28684225486?pr=15782#step:4:1348
+ fn test_nsattributed_string() {
+ // TODO move these to parent module once it's actually ready to be used
+ #[allow(non_snake_case)]
+ pub trait NSTextAttachment: Sized {
+ unsafe fn alloc(_: Self) -> id {
+ msg_send![class!(NSTextAttachment), alloc]
+ }
+ }
+
+ impl NSTextAttachment for id {}
+
+ unsafe {
+ let image: id = msg_send![class!(NSImage), alloc];
+ image.initWithContentsOfFile_(
+ NSString::alloc(nil).init_str("/Users/rtfeldman/Downloads/test.jpeg"),
+ );
+ let _size = image.size();
+
+ let string = NSString::alloc(nil).init_str("Test String");
+ let attr_string = NSMutableAttributedString::alloc(nil).init_attributed_string(string);
+ let hello_string = NSString::alloc(nil).init_str("Hello World");
+ let hello_attr_string =
+ NSAttributedString::alloc(nil).init_attributed_string(hello_string);
+ attr_string.appendAttributedString_(hello_attr_string);
+
+ let attachment = NSTextAttachment::alloc(nil);
+ let _: () = msg_send![attachment, setImage: image];
+ let image_attr_string =
+ msg_send![class!(NSAttributedString), attributedStringWithAttachment: attachment];
+ attr_string.appendAttributedString_(image_attr_string);
+
+ let another_string = NSString::alloc(nil).init_str("Another String");
+ let another_attr_string =
+ NSAttributedString::alloc(nil).init_attributed_string(another_string);
+ attr_string.appendAttributedString_(another_attr_string);
+
+ let _len: cocoa::foundation::NSUInteger = msg_send![attr_string, length];
+
+ ///////////////////////////////////////////////////
+ // pasteboard.clearContents();
+
+ let rtfd_data = attr_string.RTFDFromRange_documentAttributes_(
+ NSRange::new(0, msg_send![attr_string, length]),
+ nil,
+ );
+ assert_ne!(rtfd_data, nil);
+ // if rtfd_data != nil {
+ // pasteboard.setData_forType(rtfd_data, NSPasteboardTypeRTFD);
+ // }
+
+ // let rtf_data = attributed_string.RTFFromRange_documentAttributes_(
+ // NSRange::new(0, attributed_string.length()),
+ // nil,
+ // );
+ // if rtf_data != nil {
+ // pasteboard.setData_forType(rtf_data, NSPasteboardTypeRTF);
+ // }
+
+ // let plain_text = attributed_string.string();
+ // pasteboard.setString_forType(plain_text, NSPasteboardTypeString);
+ }
+ }
+}
@@ -1,8 +1,8 @@
use super::metal_atlas::MetalAtlas;
use crate::{
point, size, AtlasTextureId, AtlasTextureKind, AtlasTile, Bounds, ContentMask, DevicePixels,
- Hsla, MonochromeSprite, Path, PathId, PathVertex, PolychromeSprite, PrimitiveBatch, Quad,
- ScaledPixels, Scene, Shadow, Size, Surface, Underline,
+ Hsla, MonochromeSprite, PaintSurface, Path, PathId, PathVertex, PolychromeSprite,
+ PrimitiveBatch, Quad, ScaledPixels, Scene, Shadow, Size, Surface, Underline,
};
use anyhow::{anyhow, Result};
use block::ConcreteBlock;
@@ -1020,7 +1020,7 @@ impl MetalRenderer {
fn draw_surfaces(
&mut self,
- surfaces: &[Surface],
+ surfaces: &[PaintSurface],
instance_buffer: &mut InstanceBuffer,
instance_offset: &mut usize,
viewport_size: Size<DevicePixels>,
@@ -1,8 +1,13 @@
-use super::{events::key_to_native, BoolExt};
+use super::{
+ attributed_string::{NSAttributedString, NSMutableAttributedString},
+ events::key_to_native,
+ BoolExt,
+};
use crate::{
- Action, AnyWindowHandle, BackgroundExecutor, ClipboardItem, CursorStyle, ForegroundExecutor,
- Keymap, MacDispatcher, MacDisplay, MacTextSystem, MacWindow, Menu, MenuItem, PathPromptOptions,
- Platform, PlatformDisplay, PlatformTextSystem, PlatformWindow, Result, SemanticVersion, Task,
+ hash, Action, AnyWindowHandle, BackgroundExecutor, ClipboardEntry, ClipboardItem,
+ ClipboardString, CursorStyle, ForegroundExecutor, Image, ImageFormat, Keymap, MacDispatcher,
+ MacDisplay, MacTextSystem, MacWindow, Menu, MenuItem, PathPromptOptions, Platform,
+ PlatformDisplay, PlatformTextSystem, PlatformWindow, Result, SemanticVersion, Task,
WindowAppearance, WindowParams,
};
use anyhow::anyhow;
@@ -11,16 +16,17 @@ use cocoa::{
appkit::{
NSApplication, NSApplicationActivationPolicy::NSApplicationActivationPolicyRegular,
NSEventModifierFlags, NSMenu, NSMenuItem, NSModalResponse, NSOpenPanel, NSPasteboard,
- NSPasteboardTypeString, NSSavePanel, NSWindow,
+ NSPasteboardTypePNG, NSPasteboardTypeRTF, NSPasteboardTypeRTFD, NSPasteboardTypeString,
+ NSPasteboardTypeTIFF, NSSavePanel, NSWindow,
},
base::{id, nil, selector, BOOL, YES},
foundation::{
- NSArray, NSAutoreleasePool, NSBundle, NSData, NSInteger, NSProcessInfo, NSString,
+ NSArray, NSAutoreleasePool, NSBundle, NSData, NSInteger, NSProcessInfo, NSRange, NSString,
NSUInteger, NSURL,
},
};
use core_foundation::{
- base::{CFRelease, CFType, CFTypeRef, OSStatus, TCFType as _},
+ base::{CFRelease, CFType, CFTypeRef, OSStatus, TCFType},
boolean::CFBoolean,
data::CFData,
dictionary::{CFDictionary, CFDictionaryRef, CFMutableDictionary},
@@ -50,6 +56,7 @@ use std::{
slice, str,
sync::Arc,
};
+use strum::IntoEnumIterator;
use super::renderer;
@@ -421,7 +428,7 @@ impl Platform for MacPlatform {
pool.drain();
(*app).set_ivar(MAC_PLATFORM_IVAR, null_mut::<c_void>());
- (*app.delegate()).set_ivar(MAC_PLATFORM_IVAR, null_mut::<c_void>());
+ (*NSWindow::delegate(app)).set_ivar(MAC_PLATFORM_IVAR, null_mut::<c_void>());
}
}
@@ -749,7 +756,7 @@ impl Platform for MacPlatform {
let app: id = msg_send![APP_CLASS, sharedApplication];
let mut state = self.0.lock();
let actions = &mut state.menu_actions;
- app.setMainMenu_(self.create_menu_bar(menus, app.delegate(), actions, keymap));
+ app.setMainMenu_(self.create_menu_bar(menus, NSWindow::delegate(app), actions, keymap));
}
}
@@ -758,7 +765,7 @@ impl Platform for MacPlatform {
let app: id = msg_send![APP_CLASS, sharedApplication];
let mut state = self.0.lock();
let actions = &mut state.menu_actions;
- let new = self.create_dock_menu(menu, app.delegate(), actions, keymap);
+ let new = self.create_dock_menu(menu, NSWindow::delegate(app), actions, keymap);
if let Some(old) = state.dock_menu.replace(new) {
CFRelease(old as _)
}
@@ -851,79 +858,115 @@ impl Platform for MacPlatform {
}
fn write_to_clipboard(&self, item: ClipboardItem) {
- let state = self.0.lock();
+ use crate::ClipboardEntry;
+
unsafe {
- state.pasteboard.clearContents();
+ // We only want to use NSAttributedString if there are multiple entries to write.
+ if item.entries.len() <= 1 {
+ match item.entries.first() {
+ Some(entry) => match entry {
+ ClipboardEntry::String(string) => {
+ self.write_plaintext_to_clipboard(string);
+ }
+ ClipboardEntry::Image(image) => {
+ self.write_image_to_clipboard(image);
+ }
+ },
+ None => {
+ // Writing an empty list of entries just clears the clipboard.
+ let state = self.0.lock();
+ state.pasteboard.clearContents();
+ }
+ }
+ } else {
+ let mut any_images = false;
+ let attributed_string = {
+ let mut buf = NSMutableAttributedString::alloc(nil)
+ // TODO can we skip this? Or at least part of it?
+ .init_attributed_string(NSString::alloc(nil).init_str(""));
+
+ for entry in item.entries {
+ if let ClipboardEntry::String(ClipboardString { text, metadata: _ }) = entry
+ {
+ let to_append = NSAttributedString::alloc(nil)
+ .init_attributed_string(NSString::alloc(nil).init_str(&text));
+
+ buf.appendAttributedString_(to_append);
+ }
+ }
- let text_bytes = NSData::dataWithBytes_length_(
- nil,
- item.text.as_ptr() as *const c_void,
- item.text.len() as u64,
- );
- state
- .pasteboard
- .setData_forType(text_bytes, NSPasteboardTypeString);
-
- if let Some(metadata) = item.metadata.as_ref() {
- let hash_bytes = ClipboardItem::text_hash(&item.text).to_be_bytes();
- let hash_bytes = NSData::dataWithBytes_length_(
- nil,
- hash_bytes.as_ptr() as *const c_void,
- hash_bytes.len() as u64,
- );
- state
- .pasteboard
- .setData_forType(hash_bytes, state.text_hash_pasteboard_type);
+ buf
+ };
- let metadata_bytes = NSData::dataWithBytes_length_(
- nil,
- metadata.as_ptr() as *const c_void,
- metadata.len() as u64,
- );
+ let state = self.0.lock();
+ state.pasteboard.clearContents();
+
+ // Only set rich text clipboard types if we actually have 1+ images to include.
+ if any_images {
+ let rtfd_data = attributed_string.RTFDFromRange_documentAttributes_(
+ NSRange::new(0, msg_send![attributed_string, length]),
+ nil,
+ );
+ if rtfd_data != nil {
+ state
+ .pasteboard
+ .setData_forType(rtfd_data, NSPasteboardTypeRTFD);
+ }
+
+ let rtf_data = attributed_string.RTFFromRange_documentAttributes_(
+ NSRange::new(0, attributed_string.length()),
+ nil,
+ );
+ if rtf_data != nil {
+ state
+ .pasteboard
+ .setData_forType(rtf_data, NSPasteboardTypeRTF);
+ }
+ }
+
+ let plain_text = attributed_string.string();
state
.pasteboard
- .setData_forType(metadata_bytes, state.metadata_pasteboard_type);
+ .setString_forType(plain_text, NSPasteboardTypeString);
}
}
}
fn read_from_clipboard(&self) -> Option<ClipboardItem> {
let state = self.0.lock();
+ let pasteboard = state.pasteboard;
+
+ // First, see if it's a string.
unsafe {
- if let Some(text_bytes) =
- self.read_from_pasteboard(state.pasteboard, NSPasteboardTypeString)
- {
- let text = String::from_utf8_lossy(text_bytes).to_string();
- let hash_bytes = self
- .read_from_pasteboard(state.pasteboard, state.text_hash_pasteboard_type)
- .and_then(|bytes| bytes.try_into().ok())
- .map(u64::from_be_bytes);
- let metadata_bytes = self
- .read_from_pasteboard(state.pasteboard, state.metadata_pasteboard_type)
- .and_then(|bytes| String::from_utf8(bytes.to_vec()).ok());
-
- if let Some((hash, metadata)) = hash_bytes.zip(metadata_bytes) {
- if hash == ClipboardItem::text_hash(&text) {
- Some(ClipboardItem {
- text,
- metadata: Some(metadata),
- })
- } else {
- Some(ClipboardItem {
- text,
- metadata: None,
- })
- }
+ let types: id = pasteboard.types();
+ let string_type: id = ns_string("public.utf8-plain-text");
+
+ if msg_send![types, containsObject: string_type] {
+ let data = pasteboard.dataForType(string_type);
+ if data == nil {
+ return None;
+ } else if data.bytes().is_null() {
+ // https://developer.apple.com/documentation/foundation/nsdata/1410616-bytes?language=objc
+ // "If the length of the NSData object is 0, this property returns nil."
+ return Some(self.read_string_from_clipboard(&state, &[]));
} else {
- Some(ClipboardItem {
- text,
- metadata: None,
- })
+ let bytes =
+ slice::from_raw_parts(data.bytes() as *mut u8, data.length() as usize);
+
+ return Some(self.read_string_from_clipboard(&state, bytes));
+ }
+ }
+
+ // If it wasn't a string, try the various supported image types.
+ for format in ImageFormat::iter() {
+ if let Some(item) = try_clipboard_image(pasteboard, format) {
+ return Some(item);
}
- } else {
- None
}
}
+
+ // If it wasn't a string or a supported image type, give up.
+ None
}
fn write_credentials(&self, url: &str, username: &str, password: &[u8]) -> Task<Result<()>> {
@@ -1038,6 +1081,110 @@ impl Platform for MacPlatform {
}
}
+impl MacPlatform {
+ unsafe fn read_string_from_clipboard(
+ &self,
+ state: &MacPlatformState,
+ text_bytes: &[u8],
+ ) -> ClipboardItem {
+ let text = String::from_utf8_lossy(text_bytes).to_string();
+ let metadata = self
+ .read_from_pasteboard(state.pasteboard, state.text_hash_pasteboard_type)
+ .and_then(|hash_bytes| {
+ let hash_bytes = hash_bytes.try_into().ok()?;
+ let hash = u64::from_be_bytes(hash_bytes);
+ let metadata =
+ self.read_from_pasteboard(state.pasteboard, state.metadata_pasteboard_type)?;
+
+ if hash == ClipboardString::text_hash(&text) {
+ String::from_utf8(metadata.to_vec()).ok()
+ } else {
+ None
+ }
+ });
+
+ ClipboardItem {
+ entries: vec![ClipboardEntry::String(ClipboardString { text, metadata })],
+ }
+ }
+
+ unsafe fn write_plaintext_to_clipboard(&self, string: &ClipboardString) {
+ let state = self.0.lock();
+ state.pasteboard.clearContents();
+
+ let text_bytes = NSData::dataWithBytes_length_(
+ nil,
+ string.text.as_ptr() as *const c_void,
+ string.text.len() as u64,
+ );
+ state
+ .pasteboard
+ .setData_forType(text_bytes, NSPasteboardTypeString);
+
+ if let Some(metadata) = string.metadata.as_ref() {
+ let hash_bytes = ClipboardString::text_hash(&string.text).to_be_bytes();
+ let hash_bytes = NSData::dataWithBytes_length_(
+ nil,
+ hash_bytes.as_ptr() as *const c_void,
+ hash_bytes.len() as u64,
+ );
+ state
+ .pasteboard
+ .setData_forType(hash_bytes, state.text_hash_pasteboard_type);
+
+ let metadata_bytes = NSData::dataWithBytes_length_(
+ nil,
+ metadata.as_ptr() as *const c_void,
+ metadata.len() as u64,
+ );
+ state
+ .pasteboard
+ .setData_forType(metadata_bytes, state.metadata_pasteboard_type);
+ }
+ }
+
+ unsafe fn write_image_to_clipboard(&self, image: &Image) {
+ let state = self.0.lock();
+ state.pasteboard.clearContents();
+
+ let bytes = NSData::dataWithBytes_length_(
+ nil,
+ image.bytes.as_ptr() as *const c_void,
+ image.bytes.len() as u64,
+ );
+
+ state
+ .pasteboard
+ .setData_forType(bytes, Into::<UTType>::into(image.format).inner_mut());
+ }
+}
+
+fn try_clipboard_image(pasteboard: id, format: ImageFormat) -> Option<ClipboardItem> {
+ let mut ut_type: UTType = format.into();
+
+ unsafe {
+ let types: id = pasteboard.types();
+ if msg_send![types, containsObject: ut_type.inner()] {
+ let data = pasteboard.dataForType(ut_type.inner_mut());
+ if data == nil {
+ None
+ } else {
+ let bytes = Vec::from(slice::from_raw_parts(
+ data.bytes() as *mut u8,
+ data.length() as usize,
+ ));
+ let id = hash(&bytes);
+
+ Some(ClipboardItem {
+ entries: vec![ClipboardEntry::Image(Image { format, bytes, id })],
+ })
+ }
+ } else {
+ None
+ }
+ }
+}
+
unsafe fn path_from_objc(path: id) -> PathBuf {
let len = msg_send![path, lengthOfBytesUsingEncoding: NSUTF8StringEncoding];
let bytes = path.UTF8String() as *const u8;
@@ -1216,6 +1363,68 @@ mod security {
pub const errSecItemNotFound: OSStatus = -25300;
}
+impl From<ImageFormat> for UTType {
+ fn from(value: ImageFormat) -> Self {
+ match value {
+ ImageFormat::Png => Self::png(),
+ ImageFormat::Jpeg => Self::jpeg(),
+ ImageFormat::Tiff => Self::tiff(),
+ ImageFormat::Webp => Self::webp(),
+ ImageFormat::Gif => Self::gif(),
+ ImageFormat::Bmp => Self::bmp(),
+ ImageFormat::Svg => Self::svg(),
+ }
+ }
+}
+
+// See https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/
+struct UTType(id);
+
+impl UTType {
+ pub fn png() -> Self {
+ // https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/png
+ Self(unsafe { NSPasteboardTypePNG }) // This is a rare case where there's a built-in NSPasteboardType
+ }
+
+ pub fn jpeg() -> Self {
+ // https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/jpeg
+ Self(unsafe { ns_string("public.jpeg") })
+ }
+
+ pub fn gif() -> Self {
+ // https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/gif
+ Self(unsafe { ns_string("com.compuserve.gif") })
+ }
+
+ pub fn webp() -> Self {
+ // https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/webp
+ Self(unsafe { ns_string("org.webmproject.webp") })
+ }
+
+ pub fn bmp() -> Self {
+ // https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/bmp
+ Self(unsafe { ns_string("com.microsoft.bmp") })
+ }
+
+ pub fn svg() -> Self {
+ // https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/svg
+ Self(unsafe { ns_string("public.svg-image") })
+ }
+
+ pub fn tiff() -> Self {
+ // https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/tiff
+ Self(unsafe { NSPasteboardTypeTIFF }) // This is a rare case where there's a built-in NSPasteboardType
+ }
+
+ fn inner(&self) -> *const Object {
+ self.0
+ }
+
+ fn inner_mut(&mut self) -> *mut Object {
+ self.0 as *mut _
+ }
+}
+
#[cfg(test)]
mod tests {
use crate::ClipboardItem;
@@ -1227,11 +1436,15 @@ mod tests {
let platform = build_platform();
assert_eq!(platform.read_from_clipboard(), None);
- let item = ClipboardItem::new("1".to_string());
+ let item = ClipboardItem::new_string("1".to_string());
platform.write_to_clipboard(item.clone());
assert_eq!(platform.read_from_clipboard(), Some(item));
- let item = ClipboardItem::new("2".to_string()).with_metadata(vec![3, 4]);
+ let item = ClipboardItem {
+ entries: vec![ClipboardEntry::String(
+ ClipboardString::new("2".to_string()).with_json_metadata(vec![3, 4]),
+ )],
+ };
platform.write_to_clipboard(item.clone());
assert_eq!(platform.read_from_clipboard(), Some(item));
@@ -1250,7 +1463,7 @@ mod tests {
}
assert_eq!(
platform.read_from_clipboard(),
- Some(ClipboardItem::new(text_from_other_app.to_string()))
+ Some(ClipboardItem::new_string(text_from_other_app.to_string()))
);
}
@@ -735,12 +735,17 @@ fn write_to_clipboard_inner(
unsafe {
OpenClipboard(None)?;
EmptyClipboard()?;
- let encode_wide = item.text.encode_utf16().chain(Some(0)).collect_vec();
+ let encode_wide = item
+ .text()
+ .unwrap_or_default()
+ .encode_utf16()
+ .chain(Some(0))
+ .collect_vec();
set_data_to_clipboard(&encode_wide, CF_UNICODETEXT.0 as u32)?;
- if let Some(ref metadata) = item.metadata {
+ if let Some((metadata, text)) = item.metadata().zip(item.text()) {
let hash_result = {
- let hash = ClipboardItem::text_hash(&item.text);
+ let hash = ClipboardString::text_hash(&text);
hash.to_ne_bytes()
};
let encode_wide = std::slice::from_raw_parts(hash_result.as_ptr().cast::<u16>(), 4);
@@ -778,20 +783,17 @@ fn read_from_clipboard_inner(hash_format: u32, metadata_format: u32) -> Result<C
let text = PCWSTR(handle.0 as *const u16);
String::from_utf16_lossy(text.as_wide())
};
- let mut item = ClipboardItem {
- text,
- metadata: None,
- };
let Some(hash) = read_hash_from_clipboard(hash_format) else {
- return Ok(item);
+ return Ok(ClipboardItem::new_string(text));
};
let Some(metadata) = read_metadata_from_clipboard(metadata_format) else {
- return Ok(item);
+ return Ok(ClipboardItem::new_string(text));
};
- if hash == ClipboardItem::text_hash(&item.text) {
- item.metadata = Some(metadata);
+ if hash == ClipboardString::text_hash(&text) {
+ Ok(ClipboardItem::new_string_with_metadata(text, metadata))
+ } else {
+ Ok(ClipboardItem::new_string(text))
}
- Ok(item)
}
}
@@ -826,15 +828,15 @@ mod tests {
#[test]
fn test_clipboard() {
let platform = WindowsPlatform::new();
- let item = ClipboardItem::new("你好".to_string());
+ let item = ClipboardItem::new_string("你好".to_string());
platform.write_to_clipboard(item.clone());
assert_eq!(platform.read_from_clipboard(), Some(item));
- let item = ClipboardItem::new("12345".to_string());
+ let item = ClipboardItem::new_string("12345".to_string());
platform.write_to_clipboard(item.clone());
assert_eq!(platform.read_from_clipboard(), Some(item));
- let item = ClipboardItem::new("abcdef".to_string()).with_metadata(vec![3, 4]);
+ let item = ClipboardItem::new_string_with_json_metadata("abcdef".to_string(), vec![3, 4]);
platform.write_to_clipboard(item.clone());
assert_eq!(platform.read_from_clipboard(), Some(item));
}
@@ -23,7 +23,7 @@ pub(crate) struct Scene {
pub(crate) underlines: Vec<Underline>,
pub(crate) monochrome_sprites: Vec<MonochromeSprite>,
pub(crate) polychrome_sprites: Vec<PolychromeSprite>,
- pub(crate) surfaces: Vec<Surface>,
+ pub(crate) surfaces: Vec<PaintSurface>,
}
impl Scene {
@@ -183,7 +183,7 @@ pub(crate) enum Primitive {
Underline(Underline),
MonochromeSprite(MonochromeSprite),
PolychromeSprite(PolychromeSprite),
- Surface(Surface),
+ Surface(PaintSurface),
}
impl Primitive {
@@ -231,9 +231,9 @@ struct BatchIterator<'a> {
polychrome_sprites: &'a [PolychromeSprite],
polychrome_sprites_start: usize,
polychrome_sprites_iter: Peekable<slice::Iter<'a, PolychromeSprite>>,
- surfaces: &'a [Surface],
+ surfaces: &'a [PaintSurface],
surfaces_start: usize,
- surfaces_iter: Peekable<slice::Iter<'a, Surface>>,
+ surfaces_iter: Peekable<slice::Iter<'a, PaintSurface>>,
}
impl<'a> Iterator for BatchIterator<'a> {
@@ -411,7 +411,7 @@ pub(crate) enum PrimitiveBatch<'a> {
texture_id: AtlasTextureId,
sprites: &'a [PolychromeSprite],
},
- Surfaces(&'a [Surface]),
+ Surfaces(&'a [PaintSurface]),
}
#[derive(Default, Debug, Clone, Eq, PartialEq)]
@@ -673,7 +673,7 @@ impl From<PolychromeSprite> for Primitive {
}
#[derive(Clone, Debug, Eq, PartialEq)]
-pub(crate) struct Surface {
+pub(crate) struct PaintSurface {
pub order: DrawOrder,
pub bounds: Bounds<ScaledPixels>,
pub content_mask: ContentMask<ScaledPixels>,
@@ -681,20 +681,20 @@ pub(crate) struct Surface {
pub image_buffer: media::core_video::CVImageBuffer,
}
-impl Ord for Surface {
+impl Ord for PaintSurface {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.order.cmp(&other.order)
}
}
-impl PartialOrd for Surface {
+impl PartialOrd for PaintSurface {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
-impl From<Surface> for Primitive {
- fn from(surface: Surface) -> Self {
+impl From<PaintSurface> for Primitive {
+ fn from(surface: PaintSurface) -> Self {
Primitive::Surface(surface)
}
}
@@ -5,10 +5,10 @@ use std::{
};
use crate::{
- black, phi, point, quad, rems, AbsoluteLength, Bounds, ContentMask, Corners, CornersRefinement,
- CursorStyle, DefiniteLength, Edges, EdgesRefinement, Font, FontFallbacks, FontFeatures,
- FontStyle, FontWeight, Hsla, Length, Pixels, Point, PointRefinement, Rgba, SharedString, Size,
- SizeRefinement, Styled, TextRun, WindowContext,
+ black, phi, point, quad, rems, size, AbsoluteLength, Bounds, ContentMask, Corners,
+ CornersRefinement, CursorStyle, DefiniteLength, DevicePixels, Edges, EdgesRefinement, Font,
+ FontFallbacks, FontFeatures, FontStyle, FontWeight, Hsla, Length, Pixels, Point,
+ PointRefinement, Rgba, SharedString, Size, SizeRefinement, Styled, TextRun, WindowContext,
};
use collections::HashSet;
use refineable::Refineable;
@@ -27,6 +27,121 @@ pub struct DebugBelow;
#[cfg(debug_assertions)]
impl crate::Global for DebugBelow {}
+/// How to fit the image into the bounds of the element.
+pub enum ObjectFit {
+ /// The image will be stretched to fill the bounds of the element.
+ Fill,
+ /// The image will be scaled to fit within the bounds of the element.
+ Contain,
+ /// The image will be scaled to cover the bounds of the element.
+ Cover,
+ /// The image will be scaled down to fit within the bounds of the element.
+ ScaleDown,
+ /// The image will maintain its original size.
+ None,
+}
+
+impl ObjectFit {
+ /// Get the bounds of the image within the given bounds.
+ pub fn get_bounds(
+ &self,
+ bounds: Bounds<Pixels>,
+ image_size: Size<DevicePixels>,
+ ) -> Bounds<Pixels> {
+ let image_size = image_size.map(|dimension| Pixels::from(u32::from(dimension)));
+ let image_ratio = image_size.width / image_size.height;
+ let bounds_ratio = bounds.size.width / bounds.size.height;
+
+ let result_bounds = match self {
+ ObjectFit::Fill => bounds,
+ ObjectFit::Contain => {
+ let new_size = if bounds_ratio > image_ratio {
+ size(
+ image_size.width * (bounds.size.height / image_size.height),
+ bounds.size.height,
+ )
+ } else {
+ size(
+ bounds.size.width,
+ image_size.height * (bounds.size.width / image_size.width),
+ )
+ };
+
+ Bounds {
+ origin: point(
+ bounds.origin.x + (bounds.size.width - new_size.width) / 2.0,
+ bounds.origin.y + (bounds.size.height - new_size.height) / 2.0,
+ ),
+ size: new_size,
+ }
+ }
+ ObjectFit::ScaleDown => {
+ // Check if the image is larger than the bounds in either dimension.
+ if image_size.width > bounds.size.width || image_size.height > bounds.size.height {
+ // If the image is larger, use the same logic as Contain to scale it down.
+ let new_size = if bounds_ratio > image_ratio {
+ size(
+ image_size.width * (bounds.size.height / image_size.height),
+ bounds.size.height,
+ )
+ } else {
+ size(
+ bounds.size.width,
+ image_size.height * (bounds.size.width / image_size.width),
+ )
+ };
+
+ Bounds {
+ origin: point(
+ bounds.origin.x + (bounds.size.width - new_size.width) / 2.0,
+ bounds.origin.y + (bounds.size.height - new_size.height) / 2.0,
+ ),
+ size: new_size,
+ }
+ } else {
+ // If the image is smaller than or equal to the container, display it at its original size,
+ // centered within the container.
+ let original_size = size(image_size.width, image_size.height);
+ Bounds {
+ origin: point(
+ bounds.origin.x + (bounds.size.width - original_size.width) / 2.0,
+ bounds.origin.y + (bounds.size.height - original_size.height) / 2.0,
+ ),
+ size: original_size,
+ }
+ }
+ }
+ ObjectFit::Cover => {
+ let new_size = if bounds_ratio > image_ratio {
+ size(
+ bounds.size.width,
+ image_size.height * (bounds.size.width / image_size.width),
+ )
+ } else {
+ size(
+ image_size.width * (bounds.size.height / image_size.height),
+ bounds.size.height,
+ )
+ };
+
+ Bounds {
+ origin: point(
+ bounds.origin.x + (bounds.size.width - new_size.width) / 2.0,
+ bounds.origin.y + (bounds.size.height - new_size.height) / 2.0,
+ ),
+ size: new_size,
+ }
+ }
+ ObjectFit::None => Bounds {
+ origin: bounds.origin,
+ size: image_size,
+ },
+ };
+
+ result_bounds
+ }
+}
+
/// The CSS styling that can be applied to an element via the `Styled` trait
#[derive(Clone, Refineable, Debug)]
#[refineable(Debug)]
@@ -1,26 +1,25 @@
use crate::{
- hash, point, prelude::*, px, size, transparent_black, Action, AnyDrag, AnyElement, AnyTooltip,
+ point, prelude::*, px, size, transparent_black, Action, AnyDrag, AnyElement, AnyTooltip,
AnyView, AppContext, Arena, Asset, AsyncWindowContext, AvailableSpace, Bounds, BoxShadow,
Context, Corners, CursorStyle, Decorations, DevicePixels, DispatchActionListener,
DispatchNodeId, DispatchTree, DisplayId, Edges, Effect, Entity, EntityId, EventEmitter,
- FileDropEvent, Flatten, FontId, GPUSpecs, Global, GlobalElementId, GlyphId, Hsla, ImageData,
- InputHandler, IsZero, KeyBinding, KeyContext, KeyDownEvent, KeyEvent, Keystroke,
- KeystrokeEvent, LayoutId, LineLayoutIndex, Model, ModelContext, Modifiers,
- ModifiersChangedEvent, MonochromeSprite, MouseButton, MouseEvent, MouseMoveEvent, MouseUpEvent,
- Path, Pixels, PlatformAtlas, PlatformDisplay, PlatformInput, PlatformInputHandler,
- PlatformWindow, Point, PolychromeSprite, PromptLevel, Quad, Render, RenderGlyphParams,
- RenderImageParams, RenderSvgParams, Replay, ResizeEdge, ScaledPixels, Scene, Shadow,
- SharedString, Size, StrikethroughStyle, Style, SubscriberSet, Subscription, TaffyLayoutEngine,
- Task, TextStyle, TextStyleRefinement, TransformationMatrix, Underline, UnderlineStyle, View,
- VisualContext, WeakView, WindowAppearance, WindowBackgroundAppearance, WindowBounds,
- WindowControls, WindowDecorations, WindowOptions, WindowParams, WindowTextSystem,
- SUBPIXEL_VARIANTS,
+ FileDropEvent, Flatten, FontId, GPUSpecs, Global, GlobalElementId, GlyphId, Hsla, InputHandler,
+ IsZero, KeyBinding, KeyContext, KeyDownEvent, KeyEvent, Keystroke, KeystrokeEvent, LayoutId,
+ LineLayoutIndex, Model, ModelContext, Modifiers, ModifiersChangedEvent, MonochromeSprite,
+ MouseButton, MouseEvent, MouseMoveEvent, MouseUpEvent, Path, Pixels, PlatformAtlas,
+ PlatformDisplay, PlatformInput, PlatformInputHandler, PlatformWindow, Point, PolychromeSprite,
+ PromptLevel, Quad, Render, RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams,
+ Replay, ResizeEdge, ScaledPixels, Scene, Shadow, SharedString, Size, StrikethroughStyle, Style,
+ SubscriberSet, Subscription, TaffyLayoutEngine, Task, TextStyle, TextStyleRefinement,
+ TransformationMatrix, Underline, UnderlineStyle, View, VisualContext, WeakView,
+ WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowControls, WindowDecorations,
+ WindowOptions, WindowParams, WindowTextSystem, SUBPIXEL_VARIANTS,
};
use anyhow::{anyhow, Context as _, Result};
use collections::{FxHashMap, FxHashSet};
use derive_more::{Deref, DerefMut};
use futures::channel::oneshot;
-use futures::{future::Shared, FutureExt};
+use futures::FutureExt;
#[cfg(target_os = "macos")]
use media::core_video::CVImageBuffer;
use parking_lot::RwLock;
@@ -1956,36 +1955,6 @@ impl<'a> WindowContext<'a> {
self.window.requested_autoscroll.take()
}
- /// Remove an asset from GPUI's cache
- pub fn remove_cached_asset<A: Asset + 'static>(
- &mut self,
- source: &A::Source,
- ) -> Option<A::Output> {
- self.asset_cache.remove::<A>(source)
- }
-
- /// Asynchronously load an asset, if the asset hasn't finished loading this will return None.
- /// Your view will be re-drawn once the asset has finished loading.
- ///
- /// Note that the multiple calls to this method will only result in one `Asset::load` call.
- /// The results of that call will be cached, and returned on subsequent uses of this API.
- ///
- /// Use [Self::remove_cached_asset] to reload your asset.
- pub fn use_cached_asset<A: Asset + 'static>(
- &mut self,
- source: &A::Source,
- ) -> Option<A::Output> {
- self.asset_cache.get::<A>(source).or_else(|| {
- if let Some(asset) = self.use_asset::<A>(source) {
- self.asset_cache
- .insert::<A>(source.to_owned(), asset.clone());
- Some(asset)
- } else {
- None
- }
- })
- }
-
/// Asynchronously load an asset, if the asset hasn't finished loading this will return None.
/// Your view will be re-drawn once the asset has finished loading.
///
@@ -1994,19 +1963,7 @@ impl<'a> WindowContext<'a> {
///
/// This asset will not be cached by default, see [Self::use_cached_asset]
pub fn use_asset<A: Asset + 'static>(&mut self, source: &A::Source) -> Option<A::Output> {
- let asset_id = (TypeId::of::<A>(), hash(source));
- let mut is_first = false;
- let task = self
- .loading_assets
- .remove(&asset_id)
- .map(|boxed_task| *boxed_task.downcast::<Shared<Task<A::Output>>>().unwrap())
- .unwrap_or_else(|| {
- is_first = true;
- let future = A::load(source.clone(), self);
- let task = self.background_executor().spawn(future).shared();
- task
- });
-
+ let (task, is_first) = self.fetch_asset::<A>(source);
task.clone().now_or_never().or_else(|| {
if is_first {
let parent_id = self.parent_view_id();
@@ -2027,12 +1984,9 @@ impl<'a> WindowContext<'a> {
.detach();
}
- self.loading_assets.insert(asset_id, Box::new(task));
-
None
})
}
-
/// Obtain the current element offset. This method should only be called during the
/// prepaint phase of element drawing.
pub fn element_offset(&self) -> Point<Pixels> {
@@ -2610,13 +2564,14 @@ impl<'a> WindowContext<'a> {
}
/// Paint an image into the scene for the next frame at the current z-index.
+ /// This method will panic if the frame_index is not valid
///
/// This method should only be called as part of the paint phase of element drawing.
pub fn paint_image(
&mut self,
bounds: Bounds<Pixels>,
corner_radii: Corners<Pixels>,
- data: Arc<ImageData>,
+ data: Arc<RenderImage>,
frame_index: usize,
grayscale: bool,
) -> Result<()> {
@@ -2639,7 +2594,10 @@ impl<'a> WindowContext<'a> {
.get_or_insert_with(¶ms.clone().into(), &mut || {
Ok(Some((
data.size(frame_index),
- Cow::Borrowed(data.as_bytes(frame_index)),
+ Cow::Borrowed(
+ data.as_bytes(frame_index)
+ .expect("It's the caller's job to pass a valid frame index"),
+ ),
)))
})?
.expect("Callback above only returns Some");
@@ -2665,6 +2623,8 @@ impl<'a> WindowContext<'a> {
/// This method should only be called as part of the paint phase of element drawing.
#[cfg(target_os = "macos")]
pub fn paint_surface(&mut self, bounds: Bounds<Pixels>, image_buffer: CVImageBuffer) {
+ use crate::PaintSurface;
+
debug_assert_eq!(
self.window.draw_phase,
DrawPhase::Paint,
@@ -2674,15 +2634,12 @@ impl<'a> WindowContext<'a> {
let scale_factor = self.scale_factor();
let bounds = bounds.scale(scale_factor);
let content_mask = self.content_mask().scale(scale_factor);
- self.window
- .next_frame
- .scene
- .insert_primitive(crate::Surface {
- order: 0,
- bounds,
- content_mask,
- image_buffer,
- });
+ self.window.next_frame.scene.insert_primitive(PaintSurface {
+ order: 0,
+ bounds,
+ content_mask,
+ image_buffer,
+ });
}
#[must_use]
@@ -50,6 +50,9 @@ theme.workspace = true
tiktoken-rs.workspace = true
ui.workspace = true
util.workspace = true
+base64.workspace = true
+image.workspace = true
+
[dev-dependencies]
ctor.workspace = true
@@ -221,24 +221,44 @@ pub fn count_anthropic_tokens(
) -> BoxFuture<'static, Result<usize>> {
cx.background_executor()
.spawn(async move {
- let messages = request
- .messages
- .into_iter()
- .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
- role: match message.role {
- Role::User => "user".into(),
- Role::Assistant => "assistant".into(),
- Role::System => "system".into(),
- },
- content: Some(message.content),
- name: None,
- function_call: None,
- })
- .collect::<Vec<_>>();
+ let messages = request.messages;
+ let mut tokens_from_images = 0;
+ let mut string_messages = Vec::with_capacity(messages.len());
+
+ for message in messages {
+ use crate::MessageContent;
+
+ let mut string_contents = String::new();
+
+ for content in message.content {
+ match content {
+ MessageContent::Text(string) => {
+ string_contents.push_str(&string);
+ }
+ MessageContent::Image(image) => {
+ tokens_from_images += image.estimate_tokens();
+ }
+ }
+ }
+
+ if !string_contents.is_empty() {
+ string_messages.push(tiktoken_rs::ChatCompletionRequestMessage {
+ role: match message.role {
+ Role::User => "user".into(),
+ Role::Assistant => "assistant".into(),
+ Role::System => "system".into(),
+ },
+ content: Some(string_contents),
+ name: None,
+ function_call: None,
+ });
+ }
+ }
// Tiktoken doesn't yet support these models, so we manually use the
// same tokenizer as GPT-4.
- tiktoken_rs::num_tokens_from_messages("gpt-4", &messages)
+ tiktoken_rs::num_tokens_from_messages("gpt-4", &string_messages)
+ .map(|tokens| tokens + tokens_from_images)
})
.boxed()
}
@@ -193,7 +193,7 @@ impl LanguageModel for CopilotChatLanguageModel {
cx: &AsyncAppContext,
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
if let Some(message) = request.messages.last() {
- if message.content.trim().is_empty() {
+ if message.contents_empty() {
const EMPTY_PROMPT_MSG: &str =
"Empty prompts aren't allowed. Please provide a non-empty prompt.";
return futures::future::ready(Err(anyhow::anyhow!(EMPTY_PROMPT_MSG))).boxed();
@@ -270,7 +270,7 @@ impl CopilotChatLanguageModel {
Role::Assistant => CopilotChatRole::Assistant,
Role::System => CopilotChatRole::System,
},
- content: msg.content,
+ content: msg.string_contents(),
})
.collect(),
)
@@ -182,14 +182,14 @@ impl OllamaLanguageModel {
.into_iter()
.map(|msg| match msg.role {
Role::User => ChatMessage::User {
- content: msg.content,
+ content: msg.string_contents(),
},
Role::Assistant => ChatMessage::Assistant {
- content: msg.content,
+ content: msg.string_contents(),
tool_calls: None,
},
Role::System => ChatMessage::System {
- content: msg.content,
+ content: msg.string_contents(),
},
})
.collect(),
@@ -257,7 +257,7 @@ impl LanguageModel for OllamaLanguageModel {
let token_count = request
.messages
.iter()
- .map(|msg| msg.content.chars().count())
+ .map(|msg| msg.string_contents().chars().count())
.sum::<usize>()
/ 4;
@@ -363,7 +363,7 @@ pub fn count_open_ai_tokens(
Role::Assistant => "assistant".into(),
Role::System => "system".into(),
},
- content: Some(message.content),
+ content: Some(message.string_contents()),
name: None,
function_call: None,
})
@@ -1,10 +1,223 @@
+use std::io::{Cursor, Write};
+
use crate::role::Role;
+use base64::write::EncoderWriter;
+use gpui::{point, size, AppContext, DevicePixels, Image, ObjectFit, RenderImage, Size, Task};
+use image::{codecs::png::PngEncoder, imageops::resize, DynamicImage, ImageDecoder};
use serde::{Deserialize, Serialize};
+use ui::{px, SharedString};
+use util::ResultExt;
+
+#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug, Hash)]
+pub struct LanguageModelImage {
+ // A base64 encoded PNG image
+ pub source: SharedString,
+ size: Size<DevicePixels>,
+}
+
+const ANTHROPIC_SIZE_LIMT: f32 = 1568.0; // Anthropic wants uploaded images to be smaller than this in both dimensions
+
+impl LanguageModelImage {
+ pub fn from_image(data: Image, cx: &mut AppContext) -> Task<Option<Self>> {
+ cx.background_executor().spawn(async move {
+ match data.format() {
+ gpui::ImageFormat::Png
+ | gpui::ImageFormat::Jpeg
+ | gpui::ImageFormat::Webp
+ | gpui::ImageFormat::Gif => {}
+ _ => return None,
+ };
+
+ let image = image::codecs::png::PngDecoder::new(Cursor::new(data.bytes())).log_err()?;
+ let (width, height) = image.dimensions();
+ let image_size = size(DevicePixels(width as i32), DevicePixels(height as i32));
+
+ let mut base64_image = Vec::new();
+
+ {
+ let mut base64_encoder = EncoderWriter::new(
+ Cursor::new(&mut base64_image),
+ &base64::engine::general_purpose::STANDARD,
+ );
+
+ if image_size.width.0 > ANTHROPIC_SIZE_LIMT as i32
+ || image_size.height.0 > ANTHROPIC_SIZE_LIMT as i32
+ {
+ let new_bounds = ObjectFit::ScaleDown.get_bounds(
+ gpui::Bounds {
+ origin: point(px(0.0), px(0.0)),
+ size: size(px(ANTHROPIC_SIZE_LIMT), px(ANTHROPIC_SIZE_LIMT)),
+ },
+ image_size,
+ );
+ let image = DynamicImage::from_decoder(image).log_err()?.resize(
+ new_bounds.size.width.0 as u32,
+ new_bounds.size.height.0 as u32,
+ image::imageops::FilterType::Triangle,
+ );
+
+ let mut png = Vec::new();
+ image
+ .write_with_encoder(PngEncoder::new(&mut png))
+ .log_err()?;
+
+ base64_encoder.write_all(png.as_slice()).log_err()?;
+ } else {
+ base64_encoder.write_all(data.bytes()).log_err()?;
+ }
+ }
+
+ // SAFETY: The base64 encoder should not produce non-UTF8
+ let source = unsafe { String::from_utf8_unchecked(base64_image) };
+
+ Some(LanguageModelImage {
+ size: image_size,
+ source: source.into(),
+ })
+ })
+ }
+
+ /// Resolves image into an LLM-ready format (base64)
+ pub fn from_render_image(data: &RenderImage) -> Option<Self> {
+ let image_size = data.size(0);
+
+ let mut bytes = data.as_bytes(0).unwrap_or(&[]).to_vec();
+ // Convert from BGRA to RGBA.
+ for pixel in bytes.chunks_exact_mut(4) {
+ pixel.swap(2, 0);
+ }
+ let mut image = image::RgbaImage::from_vec(
+ image_size.width.0 as u32,
+ image_size.height.0 as u32,
+ bytes,
+ )
+ .expect("We already know this works");
+
+ // https://docs.anthropic.com/en/docs/build-with-claude/vision
+ if image_size.width.0 > ANTHROPIC_SIZE_LIMT as i32
+ || image_size.height.0 > ANTHROPIC_SIZE_LIMT as i32
+ {
+ let new_bounds = ObjectFit::ScaleDown.get_bounds(
+ gpui::Bounds {
+ origin: point(px(0.0), px(0.0)),
+ size: size(px(ANTHROPIC_SIZE_LIMT), px(ANTHROPIC_SIZE_LIMT)),
+ },
+ image_size,
+ );
+
+ image = resize(
+ &image,
+ new_bounds.size.width.0 as u32,
+ new_bounds.size.height.0 as u32,
+ image::imageops::FilterType::Triangle,
+ );
+ }
+
+ let mut png = Vec::new();
+
+ image
+ .write_with_encoder(PngEncoder::new(&mut png))
+ .log_err()?;
+
+ let mut base64_image = Vec::new();
+
+ {
+ let mut base64_encoder = EncoderWriter::new(
+ Cursor::new(&mut base64_image),
+ &base64::engine::general_purpose::STANDARD,
+ );
+
+ base64_encoder.write_all(png.as_slice()).log_err()?;
+ }
+
+ // SAFETY: The base64 encoder should not produce non-UTF8
+ let source = unsafe { String::from_utf8_unchecked(base64_image) };
+
+ Some(LanguageModelImage {
+ size: image_size,
+ source: source.into(),
+ })
+ }
+
+ pub fn estimate_tokens(&self) -> usize {
+ let width = self.size.width.0.unsigned_abs() as usize;
+ let height = self.size.height.0.unsigned_abs() as usize;
+
+ // From: https://docs.anthropic.com/en/docs/build-with-claude/vision#calculate-image-costs
+ // Note that are a lot of conditions on anthropic's API, and OpenAI doesn't use this,
+ // so this method is more of a rough guess
+ (width * height) / 750
+ }
+}
+
+#[derive(Clone, Serialize, Deserialize, Eq, PartialEq, Hash)]
+pub enum MessageContent {
+ Text(String),
+ Image(LanguageModelImage),
+}
+
+impl std::fmt::Debug for MessageContent {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ MessageContent::Text(t) => f.debug_struct("MessageContent").field("text", t).finish(),
+ MessageContent::Image(i) => f
+ .debug_struct("MessageContent")
+ .field("image", &i.source.len())
+ .finish(),
+ }
+ }
+}
+
+impl MessageContent {
+ pub fn as_string(&self) -> &str {
+ match self {
+ MessageContent::Text(s) => s.as_str(),
+ MessageContent::Image(_) => "",
+ }
+ }
+}
+
+impl From<String> for MessageContent {
+ fn from(value: String) -> Self {
+ MessageContent::Text(value)
+ }
+}
+
+impl From<&str> for MessageContent {
+ fn from(value: &str) -> Self {
+ MessageContent::Text(value.to_string())
+ }
+}
#[derive(Clone, Serialize, Deserialize, Debug, PartialEq, Hash)]
pub struct LanguageModelRequestMessage {
pub role: Role,
- pub content: String,
+ pub content: Vec<MessageContent>,
+}
+
+impl LanguageModelRequestMessage {
+ pub fn string_contents(&self) -> String {
+ let mut string_buffer = String::new();
+ for string in self.content.iter().filter_map(|content| match content {
+ MessageContent::Text(s) => Some(s),
+ MessageContent::Image(_) => None,
+ }) {
+ string_buffer.push_str(string.as_str())
+ }
+ string_buffer
+ }
+
+ pub fn contents_empty(&self) -> bool {
+ self.content.is_empty()
+ || self
+ .content
+ .get(0)
+ .map(|content| match content {
+ MessageContent::Text(s) => s.is_empty(),
+ MessageContent::Image(_) => true,
+ })
+ .unwrap_or(false)
+ }
}
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
@@ -23,14 +236,14 @@ impl LanguageModelRequest {
.into_iter()
.map(|msg| match msg.role {
Role::User => open_ai::RequestMessage::User {
- content: msg.content,
+ content: msg.string_contents(),
},
Role::Assistant => open_ai::RequestMessage::Assistant {
- content: Some(msg.content),
+ content: Some(msg.string_contents()),
tool_calls: Vec::new(),
},
Role::System => open_ai::RequestMessage::System {
- content: msg.content,
+ content: msg.string_contents(),
},
})
.collect(),
@@ -51,7 +264,7 @@ impl LanguageModelRequest {
.into_iter()
.map(|msg| google_ai::Content {
parts: vec![google_ai::Part::TextPart(google_ai::TextPart {
- text: msg.content,
+ text: msg.string_contents(),
})],
role: match msg.role {
Role::User => google_ai::Role::User,
@@ -77,7 +290,7 @@ impl LanguageModelRequest {
let mut system_message = String::new();
for message in self.messages {
- if message.content.is_empty() {
+ if message.contents_empty() {
continue;
}
@@ -85,8 +298,11 @@ impl LanguageModelRequest {
Role::User | Role::Assistant => {
if let Some(last_message) = new_messages.last_mut() {
if last_message.role == message.role {
- last_message.content.push_str("\n\n");
- last_message.content.push_str(&message.content);
+ // TODO: is this append done properly?
+ last_message.content.push(MessageContent::Text(format!(
+ "\n\n{}",
+ message.string_contents()
+ )));
continue;
}
}
@@ -97,7 +313,7 @@ impl LanguageModelRequest {
if !system_message.is_empty() {
system_message.push_str("\n\n");
}
- system_message.push_str(&message.content);
+ system_message.push_str(&message.string_contents());
}
}
}
@@ -113,9 +329,24 @@ impl LanguageModelRequest {
Role::Assistant => anthropic::Role::Assistant,
Role::System => return None,
},
- content: vec![anthropic::Content::Text {
- text: message.content,
- }],
+ content: message
+ .content
+ .into_iter()
+ // TODO: filter out the empty messages in the message construction step
+ .filter_map(|content| match content {
+ MessageContent::Text(t) if !t.is_empty() => {
+ Some(anthropic::Content::Text { text: t })
+ }
+ MessageContent::Image(i) => Some(anthropic::Content::Image {
+ source: anthropic::ImageSource {
+ source_type: "base64".to_string(),
+ media_type: "image/png".to_string(),
+ data: i.source.to_string(),
+ },
+ }),
+ _ => None,
+ })
+ .collect(),
})
})
.collect(),
@@ -3,7 +3,7 @@ use anyhow::{anyhow, Context, Result};
use async_trait::async_trait;
use collections::{btree_map::Entry as BTreeEntry, hash_map::Entry, BTreeMap, HashMap, HashSet};
use futures::Stream;
-use gpui::{BackgroundExecutor, ImageSource};
+use gpui::{BackgroundExecutor, SurfaceSource};
use live_kit_server::{proto, token};
use parking_lot::Mutex;
@@ -870,7 +870,7 @@ impl Frame {
self.height
}
- pub fn image(&self) -> ImageSource {
+ pub fn image(&self) -> SurfaceSource {
unimplemented!("you can't call this in test mode")
}
}
@@ -150,7 +150,7 @@ impl Markdown {
return;
}
let text = text.text_for_range(self.selection.start..self.selection.end);
- cx.write_to_clipboard(ClipboardItem::new(text));
+ cx.write_to_clipboard(ClipboardItem::new_string(text));
}
fn parse(&mut self, cx: &mut ViewContext<Self>) {
@@ -480,7 +480,7 @@ impl MarkdownElement {
{
let text = rendered_text
.text_for_range(markdown.selection.start..markdown.selection.end);
- cx.write_to_primary(ClipboardItem::new(text))
+ cx.write_to_primary(ClipboardItem::new_string(text))
}
cx.notify();
}
@@ -1067,7 +1067,7 @@ impl OutlinePanel {
.and_then(|entry| self.abs_path(&entry, cx))
.map(|p| p.to_string_lossy().to_string())
{
- cx.write_to_clipboard(ClipboardItem::new(clipboard_text));
+ cx.write_to_clipboard(ClipboardItem::new_string(clipboard_text));
}
}
@@ -1082,7 +1082,7 @@ impl OutlinePanel {
})
.map(|p| p.to_string_lossy().to_string())
{
- cx.write_to_clipboard(ClipboardItem::new(clipboard_text));
+ cx.write_to_clipboard(ClipboardItem::new_string(clipboard_text));
}
}
@@ -170,6 +170,12 @@ pub fn contexts_dir() -> &'static PathBuf {
})
}
+/// Returns the path within the contexts directory where images from contexts are stored.
+pub fn context_images_dir() -> &'static PathBuf {
+ static CONTEXT_IMAGES_DIR: OnceLock<PathBuf> = OnceLock::new();
+ CONTEXT_IMAGES_DIR.get_or_init(|| contexts_dir().join("images"))
+}
+
/// Returns the path to the contexts directory.
///
/// This is where the prompts for use with the Assistant are stored.
@@ -1357,7 +1357,7 @@ impl ProjectPanel {
fn copy_path(&mut self, _: &CopyPath, cx: &mut ViewContext<Self>) {
if let Some((worktree, entry)) = self.selected_entry(cx) {
- cx.write_to_clipboard(ClipboardItem::new(
+ cx.write_to_clipboard(ClipboardItem::new_string(
worktree
.abs_path()
.join(&entry.path)
@@ -1369,7 +1369,9 @@ impl ProjectPanel {
fn copy_relative_path(&mut self, _: &CopyRelativePath, cx: &mut ViewContext<Self>) {
if let Some((_, entry)) = self.selected_entry(cx) {
- cx.write_to_clipboard(ClipboardItem::new(entry.path.to_string_lossy().to_string()));
+ cx.write_to_clipboard(ClipboardItem::new_string(
+ entry.path.to_string_lossy().to_string(),
+ ));
}
}
@@ -5,7 +5,7 @@ use crate::stdio::TerminalOutput;
use anyhow::Result;
use base64::prelude::*;
use gpui::{
- img, percentage, Animation, AnimationExt, AnyElement, FontWeight, ImageData, Render, Task,
+ img, percentage, Animation, AnimationExt, AnyElement, FontWeight, Render, RenderImage, Task,
TextRun, Transformation, View,
};
use runtimelib::datatable::TableSchema;
@@ -38,7 +38,7 @@ fn rank_mime_type(mimetype: &MimeType) -> usize {
pub struct ImageView {
height: u32,
width: u32,
- image: Arc<ImageData>,
+ image: Arc<RenderImage>,
}
impl ImageView {
@@ -76,7 +76,7 @@ impl ImageView {
let height = data.height();
let width = data.width();
- let gpui_image_data = ImageData::new(vec![image::Frame::new(data)]);
+ let gpui_image_data = RenderImage::new(vec![image::Frame::new(data)]);
return Ok(ImageView {
height,
@@ -656,13 +656,17 @@ impl Terminal {
cx.emit(Event::BreadcrumbsChanged);
}
AlacTermEvent::ClipboardStore(_, data) => {
- cx.write_to_clipboard(ClipboardItem::new(data.to_string()))
+ cx.write_to_clipboard(ClipboardItem::new_string(data.to_string()))
+ }
+ AlacTermEvent::ClipboardLoad(_, format) => {
+ self.write_to_pty(
+ match &cx.read_from_clipboard().and_then(|item| item.text()) {
+ // The terminal only supports pasting strings, not images.
+ Some(text) => format(text),
+ _ => format(""),
+ },
+ )
}
- AlacTermEvent::ClipboardLoad(_, format) => self.write_to_pty(format(
- &cx.read_from_clipboard()
- .map(|ci| ci.text().to_string())
- .unwrap_or_else(|| "".to_string()),
- )),
AlacTermEvent::PtyWrite(out) => self.write_to_pty(out.clone()),
AlacTermEvent::TextAreaSizeRequest(format) => {
self.write_to_pty(format(self.last_content.size.into()))
@@ -767,7 +771,7 @@ impl Terminal {
#[cfg(target_os = "linux")]
if let Some(selection_text) = term.selection_to_string() {
- cx.write_to_primary(ClipboardItem::new(selection_text));
+ cx.write_to_primary(ClipboardItem::new_string(selection_text));
}
if let Some((_, head)) = selection {
@@ -788,7 +792,7 @@ impl Terminal {
#[cfg(target_os = "linux")]
if let Some(selection_text) = term.selection_to_string() {
- cx.write_to_primary(ClipboardItem::new(selection_text));
+ cx.write_to_primary(ClipboardItem::new_string(selection_text));
}
self.selection_head = Some(point);
@@ -798,7 +802,7 @@ impl Terminal {
InternalEvent::Copy => {
if let Some(txt) = term.selection_to_string() {
- cx.write_to_clipboard(ClipboardItem::new(txt))
+ cx.write_to_clipboard(ClipboardItem::new_string(txt))
}
}
InternalEvent::ScrollToAlacPoint(point) => {
@@ -1342,7 +1346,7 @@ impl Terminal {
#[cfg(target_os = "linux")]
MouseButton::Middle => {
if let Some(item) = _cx.read_from_primary() {
- let text = item.text().to_string();
+ let text = item.text().unwrap_or_default().to_string();
self.input(text);
}
}
@@ -488,9 +488,9 @@ impl TerminalView {
///Attempt to paste the clipboard into the terminal
fn paste(&mut self, _: &Paste, cx: &mut ViewContext<Self>) {
- if let Some(item) = cx.read_from_clipboard() {
+ if let Some(clipboard_string) = cx.read_from_clipboard().and_then(|item| item.text()) {
self.terminal
- .update(cx, |terminal, _cx| terminal.paste(item.text()));
+ .update(cx, |terminal, _cx| terminal.paste(&clipboard_string));
}
}
@@ -361,7 +361,8 @@ mod test {
Mode::Normal,
);
assert_eq!(
- cx.read_from_clipboard().map(|item| item.text().clone()),
+ cx.read_from_clipboard()
+ .map(|item| item.text().unwrap().to_string()),
Some("jumps".into())
);
cx.simulate_keystrokes("d d p");
@@ -373,10 +374,11 @@ mod test {
Mode::Normal,
);
assert_eq!(
- cx.read_from_clipboard().map(|item| item.text().clone()),
+ cx.read_from_clipboard()
+ .map(|item| item.text().unwrap().to_string()),
Some("jumps".into())
);
- cx.write_to_clipboard(ClipboardItem::new("test-copy".to_string()));
+ cx.write_to_clipboard(ClipboardItem::new_string("test-copy".to_string()));
cx.simulate_keystrokes("shift-p");
cx.assert_state(
indoc! {"
@@ -5,7 +5,7 @@ use crate::surrounds::SurroundsType;
use crate::{motion::Motion, object::Object};
use collections::HashMap;
use editor::{Anchor, ClipboardSelection};
-use gpui::{Action, ClipboardItem, KeyContext};
+use gpui::{Action, ClipboardEntry, ClipboardItem, KeyContext};
use language::{CursorShape, Selection, TransactionId};
use serde::{Deserialize, Serialize};
use ui::SharedString;
@@ -129,20 +129,24 @@ pub struct Register {
impl From<Register> for ClipboardItem {
fn from(register: Register) -> Self {
- let item = ClipboardItem::new(register.text.into());
if let Some(clipboard_selections) = register.clipboard_selections {
- item.with_metadata(clipboard_selections)
+ ClipboardItem::new_string_with_json_metadata(register.text.into(), clipboard_selections)
} else {
- item
+ ClipboardItem::new_string(register.text.into())
}
}
}
impl From<ClipboardItem> for Register {
- fn from(value: ClipboardItem) -> Self {
- Register {
- text: value.text().to_owned().into(),
- clipboard_selections: value.metadata::<Vec<ClipboardSelection>>(),
+ fn from(item: ClipboardItem) -> Self {
+ // For now, we don't store metadata for multiple entries.
+ match item.entries().first() {
+ Some(ClipboardEntry::String(value)) if item.entries().len() == 1 => Register {
+ text: value.text().to_owned().into(),
+ clipboard_selections: value.metadata_json::<Vec<ClipboardSelection>>(),
+ },
+ // For now, registers can't store images. This could change in the future.
+ _ => Register::default(),
}
}
}
@@ -247,7 +247,12 @@ impl NeovimBackedTestContext {
register: '"',
state: self.shared_state().await,
neovim: self.neovim.read_register('"').await,
- editor: self.read_from_clipboard().unwrap().text().clone(),
+ editor: self
+ .read_from_clipboard()
+ .unwrap()
+ .text()
+ .unwrap()
+ .to_owned(),
}
}
@@ -586,7 +586,7 @@ impl Vim {
} else {
self.workspace_state.last_yank = cx
.read_from_clipboard()
- .map(|item| item.text().to_owned().into());
+ .and_then(|item| item.text().map(|string| string.into()))
}
self.workspace_state.registers.insert('"', content.clone());
@@ -663,7 +663,7 @@ impl Vim {
fn system_clipboard_is_newer(&self, cx: &mut AppContext) -> bool {
cx.read_from_clipboard().is_some_and(|item| {
if let Some(last_state) = &self.workspace_state.last_yank {
- last_state != item.text()
+ Some(last_state.as_ref()) != item.text().as_deref()
} else {
true
}
@@ -927,7 +927,7 @@ mod test {
the lazy dog"});
assert_eq!(
cx.read_from_clipboard()
- .map(|item| item.text().clone())
+ .map(|item| item.text().unwrap().to_string())
.unwrap(),
"The q"
);
@@ -342,7 +342,7 @@ impl Render for LanguageServerPrompt {
.on_click({
let message = request.message.clone();
move |_, cx| {
- cx.write_to_clipboard(ClipboardItem::new(
+ cx.write_to_clipboard(ClipboardItem::new_string(
message.clone(),
))
}
@@ -1632,7 +1632,7 @@ impl Pane {
.and_then(|entry| entry.project_path(cx))
.map(|p| p.path.to_string_lossy().to_string())
{
- cx.write_to_clipboard(ClipboardItem::new(clipboard_text));
+ cx.write_to_clipboard(ClipboardItem::new_string(clipboard_text));
}
}
@@ -1842,7 +1842,7 @@ impl Pane {
"Copy Path",
Some(Box::new(CopyPath)),
cx.handler_for(&pane, move |_, cx| {
- cx.write_to_clipboard(ClipboardItem::new(
+ cx.write_to_clipboard(ClipboardItem::new_string(
abs_path.to_string_lossy().to_string(),
));
}),
@@ -7,7 +7,7 @@ use call::participant::{Frame, RemoteVideoTrack};
use client::{proto::PeerId, User};
use futures::StreamExt;
use gpui::{
- div, img, AppContext, EventEmitter, FocusHandle, FocusableView, InteractiveElement,
+ div, surface, AppContext, EventEmitter, FocusHandle, FocusableView, InteractiveElement,
ParentElement, Render, SharedString, Styled, Task, View, ViewContext, VisualContext,
WindowContext,
};
@@ -75,7 +75,7 @@ impl Render for SharedScreen {
.children(
self.frame
.as_ref()
- .map(|frame| img(frame.image()).size_full()),
+ .map(|frame| surface(frame.image()).size_full()),
)
}
}