Detailed changes
@@ -13,6 +13,12 @@ rustflags = ["-C", "link-arg=-fuse-ld=mold"]
linker = "clang"
rustflags = ["-C", "link-arg=-fuse-ld=mold"]
+[target.aarch64-apple-darwin]
+rustflags = ["-C", "link-args=-Objc -all_load"]
+
+[target.x86_64-apple-darwin]
+rustflags = ["-C", "link-args=-Objc -all_load"]
+
# This cfg will reduce the size of `windows::core::Error` from 16 bytes to 4 bytes
[target.'cfg(target_os = "windows")']
rustflags = ["--cfg", "windows_slim_errors"]
@@ -129,6 +129,7 @@ jobs:
run: |
cargo build --workspace --bins --all-features
cargo check -p gpui --features "macos-blade"
+ cargo check -p workspace --features "livekit-cross-platform"
cargo build -p remote_server
linux_tests:
@@ -962,6 +962,22 @@ dependencies = [
"syn 2.0.87",
]
+[[package]]
+name = "async-tungstenite"
+version = "0.25.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2cca750b12e02c389c1694d35c16539f88b8bbaa5945934fdc1b41a776688589"
+dependencies = [
+ "async-native-tls",
+ "async-std",
+ "async-tls",
+ "futures-io",
+ "futures-util",
+ "log",
+ "pin-project-lite",
+ "tungstenite 0.21.0",
+]
+
[[package]]
name = "async-tungstenite"
version = "0.28.0"
@@ -1830,7 +1846,7 @@ dependencies = [
"arrayvec",
"cc",
"cfg-if",
- "constant_time_eq",
+ "constant_time_eq 0.3.1",
]
[[package]]
@@ -2015,6 +2031,27 @@ dependencies = [
"either",
]
+[[package]]
+name = "bzip2"
+version = "0.4.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bdb116a6ef3f6c3698828873ad02c3014b3c85cadb88496095628e3ef1e347f8"
+dependencies = [
+ "bzip2-sys",
+ "libc",
+]
+
+[[package]]
+name = "bzip2-sys"
+version = "0.1.11+1.0.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "736a955f3fa7875102d57c82b8cac37ec45224a07fd32d58f9f7a186b6cd4cdc"
+dependencies = [
+ "cc",
+ "libc",
+ "pkg-config",
+]
+
[[package]]
name = "call"
version = "0.1.0"
@@ -2023,12 +2060,14 @@ dependencies = [
"audio",
"client",
"collections",
+ "feature_flags",
"fs",
"futures 0.3.31",
"gpui",
"http_client",
"language",
- "live_kit_client",
+ "livekit_client",
+ "livekit_client_macos",
"log",
"postage",
"project",
@@ -2486,7 +2525,7 @@ dependencies = [
"anyhow",
"async-native-tls",
"async-recursion 0.3.2",
- "async-tungstenite",
+ "async-tungstenite 0.28.0",
"chrono",
"clock",
"cocoa 0.26.0",
@@ -2618,7 +2657,7 @@ dependencies = [
"assistant_tool",
"async-stripe",
"async-trait",
- "async-tungstenite",
+ "async-tungstenite 0.28.0",
"audio",
"aws-config",
"aws-sdk-kinesis",
@@ -2656,8 +2695,9 @@ dependencies = [
"jsonwebtoken",
"language",
"language_model",
- "live_kit_client",
- "live_kit_server",
+ "livekit_client",
+ "livekit_client_macos",
+ "livekit_server",
"log",
"lsp",
"menu",
@@ -2670,7 +2710,7 @@ dependencies = [
"pretty_assertions",
"project",
"prometheus",
- "prost",
+ "prost 0.9.0",
"rand 0.8.5",
"recent_projects",
"release_channel",
@@ -2870,6 +2910,12 @@ dependencies = [
"tiny-keccak",
]
+[[package]]
+name = "constant_time_eq"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc"
+
[[package]]
name = "constant_time_eq"
version = "0.3.1"
@@ -3077,6 +3123,17 @@ dependencies = [
"coreaudio-sys",
]
+[[package]]
+name = "coreaudio-rs"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "34ca07354f6d0640333ef95f48d460a4bcf34812a7e7967f9b44c728a8f37c28"
+dependencies = [
+ "bitflags 1.3.2",
+ "core-foundation-sys",
+ "coreaudio-sys",
+]
+
[[package]]
name = "coreaudio-sys"
version = "0.2.16"
@@ -3111,12 +3168,11 @@ dependencies = [
[[package]]
name = "cpal"
version = "0.15.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "873dab07c8f743075e57f524c583985fbaf745602acbe916a01539364369a779"
+source = "git+https://github.com/zed-industries/cpal?rev=fd8bc2fd39f1f5fdee5a0690656caff9a26d9d50#fd8bc2fd39f1f5fdee5a0690656caff9a26d9d50"
dependencies = [
"alsa",
"core-foundation-sys",
- "coreaudio-rs",
+ "coreaudio-rs 0.11.3",
"dasp_sample",
"jni",
"js-sys",
@@ -3448,6 +3504,65 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96a6ac251f4a2aca6b3f91340350eab87ae57c3f127ffeb585e92bd336717991"
+[[package]]
+name = "cxx"
+version = "1.0.133"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "05e1ec88093d2abd9cf1b09ffd979136b8e922bf31cad966a8fe0d73233112ef"
+dependencies = [
+ "cc",
+ "cxxbridge-cmd",
+ "cxxbridge-flags",
+ "cxxbridge-macro",
+ "foldhash",
+ "link-cplusplus",
+]
+
+[[package]]
+name = "cxx-build"
+version = "1.0.133"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9afa390d956ee7ccb41aeed7ed7856ab3ffb4fc587e7216be7e0f83e949b4e6c"
+dependencies = [
+ "cc",
+ "codespan-reporting",
+ "proc-macro2",
+ "quote",
+ "scratch",
+ "syn 2.0.87",
+]
+
+[[package]]
+name = "cxxbridge-cmd"
+version = "1.0.133"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3c23bfff654d6227cbc83de8e059d2f8678ede5fc3a6c5a35d5c379983cc61e6"
+dependencies = [
+ "clap",
+ "codespan-reporting",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.87",
+]
+
+[[package]]
+name = "cxxbridge-flags"
+version = "1.0.133"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f7c01b36e22051bc6928a78583f1621abaaf7621561c2ada1b00f7878fbe2caa"
+
+[[package]]
+name = "cxxbridge-macro"
+version = "1.0.133"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f6e14013136fac689345d17b9a6df55977251f11d333c0a571e8d963b55e1f95"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "rustversion",
+ "syn 2.0.87",
+]
+
[[package]]
name = "dashmap"
version = "5.5.3"
@@ -4706,6 +4821,16 @@ dependencies = [
"windows-sys 0.52.0",
]
+[[package]]
+name = "fs2"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213"
+dependencies = [
+ "libc",
+ "winapi",
+]
+
[[package]]
name = "fsevent"
version = "0.1.0"
@@ -6338,6 +6463,15 @@ dependencies = [
"either",
]
+[[package]]
+name = "itertools"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57"
+dependencies = [
+ "either",
+]
+
[[package]]
name = "itertools"
version = "0.12.1"
@@ -6463,7 +6597,7 @@ checksum = "58d9afa5bc6eeafb78f710a2efc585f69099f8b6a99dc7eb826581e3773a6e31"
dependencies = [
"anyhow",
"async-trait",
- "async-tungstenite",
+ "async-tungstenite 0.28.0",
"futures 0.3.31",
"jupyter-protocol",
"serde",
@@ -6881,6 +7015,29 @@ dependencies = [
"vcpkg",
]
+[[package]]
+name = "libwebrtc"
+version = "0.3.7"
+source = "git+https://github.com/zed-industries/rust-sdks?rev=799f10133d93ba2a88642cd480d01ec4da53408c#799f10133d93ba2a88642cd480d01ec4da53408c"
+dependencies = [
+ "cxx",
+ "jni",
+ "js-sys",
+ "lazy_static",
+ "livekit-protocol",
+ "livekit-runtime",
+ "log",
+ "parking_lot",
+ "serde",
+ "serde_json",
+ "thiserror 1.0.69",
+ "tokio",
+ "wasm-bindgen",
+ "wasm-bindgen-futures",
+ "web-sys",
+ "webrtc-sys",
+]
+
[[package]]
name = "libz-sys"
version = "1.1.20"
@@ -6893,6 +7050,15 @@ dependencies = [
"vcpkg",
]
+[[package]]
+name = "link-cplusplus"
+version = "1.0.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9d240c6f7e1ba3a28b0249f774e6a9dd0175054b52dfbb61b16eb8505c3785c9"
+dependencies = [
+ "cc",
+]
+
[[package]]
name = "linkify"
version = "0.10.0"
@@ -6941,7 +7107,112 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "643cb0b8d4fcc284004d5fd0d67ccf61dfffadb7f75e1e71bc420f4688a3a704"
[[package]]
-name = "live_kit_client"
+name = "livekit"
+version = "0.7.0"
+source = "git+https://github.com/zed-industries/rust-sdks?rev=799f10133d93ba2a88642cd480d01ec4da53408c#799f10133d93ba2a88642cd480d01ec4da53408c"
+dependencies = [
+ "chrono",
+ "futures-util",
+ "lazy_static",
+ "libwebrtc",
+ "livekit-api",
+ "livekit-protocol",
+ "livekit-runtime",
+ "log",
+ "parking_lot",
+ "prost 0.12.6",
+ "semver",
+ "serde",
+ "serde_json",
+ "thiserror 1.0.69",
+ "tokio",
+]
+
+[[package]]
+name = "livekit-api"
+version = "0.4.1"
+source = "git+https://github.com/zed-industries/rust-sdks?rev=799f10133d93ba2a88642cd480d01ec4da53408c#799f10133d93ba2a88642cd480d01ec4da53408c"
+dependencies = [
+ "async-tungstenite 0.25.1",
+ "futures-util",
+ "http 0.2.12",
+ "jsonwebtoken",
+ "livekit-protocol",
+ "livekit-runtime",
+ "log",
+ "parking_lot",
+ "prost 0.12.6",
+ "reqwest 0.11.27",
+ "scopeguard",
+ "serde",
+ "serde_json",
+ "sha2",
+ "thiserror 1.0.69",
+ "tokio",
+ "tokio-tungstenite 0.20.1",
+ "url",
+]
+
+[[package]]
+name = "livekit-protocol"
+version = "0.3.6"
+source = "git+https://github.com/zed-industries/rust-sdks?rev=799f10133d93ba2a88642cd480d01ec4da53408c#799f10133d93ba2a88642cd480d01ec4da53408c"
+dependencies = [
+ "futures-util",
+ "livekit-runtime",
+ "parking_lot",
+ "pbjson",
+ "pbjson-types",
+ "prost 0.12.6",
+ "prost-types 0.12.6",
+ "serde",
+ "thiserror 1.0.69",
+ "tokio",
+]
+
+[[package]]
+name = "livekit-runtime"
+version = "0.3.1"
+source = "git+https://github.com/zed-industries/rust-sdks?rev=799f10133d93ba2a88642cd480d01ec4da53408c#799f10133d93ba2a88642cd480d01ec4da53408c"
+dependencies = [
+ "async-io 2.4.0",
+ "async-std",
+ "async-task",
+ "futures 0.3.31",
+]
+
+[[package]]
+name = "livekit_client"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "async-trait",
+ "collections",
+ "core-foundation 0.9.4",
+ "coreaudio-rs 0.12.1",
+ "cpal",
+ "futures 0.3.31",
+ "gpui",
+ "http 0.2.12",
+ "http_client",
+ "image",
+ "livekit",
+ "livekit_server",
+ "log",
+ "media",
+ "nanoid",
+ "parking_lot",
+ "postage",
+ "serde",
+ "serde_json",
+ "sha2",
+ "simplelog",
+ "smallvec",
+ "util",
+]
+
+[[package]]
+name = "livekit_client_macos"
version = "0.1.0"
dependencies = [
"anyhow",
@@ -6951,7 +7222,7 @@ dependencies = [
"core-foundation 0.9.4",
"futures 0.3.31",
"gpui",
- "live_kit_server",
+ "livekit_server",
"log",
"media",
"nanoid",
@@ -6964,16 +7235,16 @@ dependencies = [
]
[[package]]
-name = "live_kit_server"
+name = "livekit_server"
version = "0.1.0"
dependencies = [
"anyhow",
"async-trait",
"jsonwebtoken",
"log",
- "prost",
- "prost-build",
- "prost-types",
+ "prost 0.9.0",
+ "prost-build 0.9.0",
+ "prost-types 0.9.0",
"reqwest 0.12.8",
"serde",
]
@@ -7262,6 +7533,7 @@ dependencies = [
"anyhow",
"bindgen",
"core-foundation 0.9.4",
+ "ctor",
"foreign-types 0.5.0",
"metal",
"objc",
@@ -7954,7 +8226,7 @@ dependencies = [
"md-5",
"num",
"num-bigint-dig",
- "pbkdf2",
+ "pbkdf2 0.12.2",
"rand 0.8.5",
"serde",
"sha2",
@@ -8274,6 +8546,17 @@ dependencies = [
"windows-targets 0.52.6",
]
+[[package]]
+name = "password-hash"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7676374caaee8a325c9e7a2ae557f216c5563a171d6997b0ef8a65af35147700"
+dependencies = [
+ "base64ct",
+ "rand_core 0.6.4",
+ "subtle",
+]
+
[[package]]
name = "password-hash"
version = "0.5.0"
@@ -8324,6 +8607,55 @@ dependencies = [
"util",
]
+[[package]]
+name = "pbjson"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1030c719b0ec2a2d25a5df729d6cff1acf3cc230bf766f4f97833591f7577b90"
+dependencies = [
+ "base64 0.21.7",
+ "serde",
+]
+
+[[package]]
+name = "pbjson-build"
+version = "0.6.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2580e33f2292d34be285c5bc3dba5259542b083cfad6037b6d70345f24dcb735"
+dependencies = [
+ "heck 0.4.1",
+ "itertools 0.11.0",
+ "prost 0.12.6",
+ "prost-types 0.12.6",
+]
+
+[[package]]
+name = "pbjson-types"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "18f596653ba4ac51bdecbb4ef6773bc7f56042dc13927910de1684ad3d32aa12"
+dependencies = [
+ "bytes 1.8.0",
+ "chrono",
+ "pbjson",
+ "pbjson-build",
+ "prost 0.12.6",
+ "prost-build 0.12.6",
+ "serde",
+]
+
+[[package]]
+name = "pbkdf2"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "83a0692ec44e4cf1ef28ca317f14f8f07da2d95ec3fa01f86e4467b725e60917"
+dependencies = [
+ "digest",
+ "hmac",
+ "password-hash 0.4.2",
+ "sha2",
+]
+
[[package]]
name = "pbkdf2"
version = "0.12.2"
@@ -9353,7 +9685,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "444879275cb4fd84958b1a1d5420d15e6fcf7c235fe47f053c9c2a80aceb6001"
dependencies = [
"bytes 1.8.0",
- "prost-derive",
+ "prost-derive 0.9.0",
+]
+
+[[package]]
+name = "prost"
+version = "0.12.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "deb1435c188b76130da55f17a466d252ff7b1418b2ad3e037d127b94e3411f29"
+dependencies = [
+ "bytes 1.8.0",
+ "prost-derive 0.12.6",
]
[[package]]
@@ -9369,13 +9711,34 @@ dependencies = [
"log",
"multimap",
"petgraph",
- "prost",
- "prost-types",
+ "prost 0.9.0",
+ "prost-types 0.9.0",
"regex",
"tempfile",
"which 4.4.2",
]
+[[package]]
+name = "prost-build"
+version = "0.12.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "22505a5c94da8e3b7c2996394d1c933236c4d743e81a410bcca4e6989fc066a4"
+dependencies = [
+ "bytes 1.8.0",
+ "heck 0.5.0",
+ "itertools 0.12.1",
+ "log",
+ "multimap",
+ "once_cell",
+ "petgraph",
+ "prettyplease",
+ "prost 0.12.6",
+ "prost-types 0.12.6",
+ "regex",
+ "syn 2.0.87",
+ "tempfile",
+]
+
[[package]]
name = "prost-derive"
version = "0.9.0"
@@ -9389,6 +9752,19 @@ dependencies = [
"syn 1.0.109",
]
+[[package]]
+name = "prost-derive"
+version = "0.12.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "81bddcdb20abf9501610992b6759a4c888aef7d1a7247ef75e2404275ac24af1"
+dependencies = [
+ "anyhow",
+ "itertools 0.12.1",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.87",
+]
+
[[package]]
name = "prost-types"
version = "0.9.0"
@@ -9396,7 +9772,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "534b7a0e836e3c482d2693070f982e39e7611da9695d4d1f5a4b186b51faef0a"
dependencies = [
"bytes 1.8.0",
- "prost",
+ "prost 0.9.0",
+]
+
+[[package]]
+name = "prost-types"
+version = "0.12.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9091c90b0a32608e984ff2fa4091273cbdd755d54935c51d520887f4a1dbd5b0"
+dependencies = [
+ "prost 0.12.6",
]
[[package]]
@@ -9405,8 +9790,8 @@ version = "0.1.0"
dependencies = [
"anyhow",
"collections",
- "prost",
- "prost-build",
+ "prost 0.9.0",
+ "prost-build 0.9.0",
"serde",
]
@@ -9906,7 +10291,7 @@ dependencies = [
"log",
"parking_lot",
"paths",
- "prost",
+ "prost 0.9.0",
"release_channel",
"rpc",
"serde",
@@ -10041,6 +10426,7 @@ dependencies = [
"http 0.2.12",
"http-body 0.4.6",
"hyper 0.14.31",
+ "hyper-rustls 0.24.2",
"hyper-tls",
"ipnet",
"js-sys",
@@ -10050,6 +10436,8 @@ dependencies = [
"once_cell",
"percent-encoding",
"pin-project-lite",
+ "rustls 0.21.12",
+ "rustls-native-certs 0.6.3",
"rustls-pemfile 1.0.4",
"serde",
"serde_json",
@@ -10058,6 +10446,7 @@ dependencies = [
"system-configuration 0.5.1",
"tokio",
"tokio-native-tls",
+ "tokio-rustls 0.24.1",
"tower-service",
"url",
"wasm-bindgen",
@@ -10281,7 +10670,7 @@ name = "rpc"
version = "0.1.0"
dependencies = [
"anyhow",
- "async-tungstenite",
+ "async-tungstenite 0.28.0",
"base64 0.22.1",
"chrono",
"collections",
@@ -10657,14 +11046,20 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
+[[package]]
+name = "scratch"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a3cf7c11c38cb994f3d40e8a8cde3bbd1f72a435e4c49e85d6553d8312306152"
+
[[package]]
name = "scrypt"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0516a385866c09368f0b5bcd1caff3366aace790fcd46e2bb032697bb172fd1f"
dependencies = [
- "password-hash",
- "pbkdf2",
+ "password-hash 0.5.0",
+ "pbkdf2 0.12.2",
"salsa20",
"sha2",
]
@@ -12823,7 +13218,10 @@ checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c"
dependencies = [
"futures-util",
"log",
+ "rustls 0.21.12",
+ "rustls-native-certs 0.6.3",
"tokio",
+ "tokio-rustls 0.24.1",
"tungstenite 0.20.1",
]
@@ -13331,6 +13729,7 @@ dependencies = [
"httparse",
"log",
"rand 0.8.5",
+ "rustls 0.21.12",
"sha1",
"thiserror 1.0.69",
"url",
@@ -13349,6 +13748,7 @@ dependencies = [
"http 1.1.0",
"httparse",
"log",
+ "native-tls",
"rand 0.8.5",
"sha1",
"thiserror 1.0.69",
@@ -14461,6 +14861,32 @@ dependencies = [
"rustls-pki-types",
]
+[[package]]
+name = "webrtc-sys"
+version = "0.3.5"
+source = "git+https://github.com/zed-industries/rust-sdks?rev=799f10133d93ba2a88642cd480d01ec4da53408c#799f10133d93ba2a88642cd480d01ec4da53408c"
+dependencies = [
+ "cc",
+ "cxx",
+ "cxx-build",
+ "glob",
+ "log",
+ "webrtc-sys-build",
+]
+
+[[package]]
+name = "webrtc-sys-build"
+version = "0.3.5"
+source = "git+https://github.com/zed-industries/rust-sdks?rev=799f10133d93ba2a88642cd480d01ec4da53408c#799f10133d93ba2a88642cd480d01ec4da53408c"
+dependencies = [
+ "fs2",
+ "regex",
+ "reqwest 0.11.27",
+ "scratch",
+ "semver",
+ "zip",
+]
+
[[package]]
name = "weezl"
version = "0.1.8"
@@ -16026,6 +16452,26 @@ dependencies = [
"syn 2.0.87",
]
+[[package]]
+name = "zip"
+version = "0.6.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261"
+dependencies = [
+ "aes",
+ "byteorder",
+ "bzip2",
+ "constant_time_eq 0.1.5",
+ "crc32fast",
+ "crossbeam-utils",
+ "flate2",
+ "hmac",
+ "pbkdf2 0.11.0",
+ "sha1",
+ "time",
+ "zstd",
+]
+
[[package]]
name = "zstd"
version = "0.11.2+zstd.1.5.2"
@@ -65,8 +65,9 @@ members = [
"crates/language_selector",
"crates/language_tools",
"crates/languages",
- "crates/live_kit_client",
- "crates/live_kit_server",
+ "crates/livekit_client",
+ "crates/livekit_client_macos",
+ "crates/livekit_server",
"crates/lsp",
"crates/markdown",
"crates/markdown_preview",
@@ -248,8 +249,9 @@ language_models = { path = "crates/language_models" }
language_selector = { path = "crates/language_selector" }
language_tools = { path = "crates/language_tools" }
languages = { path = "crates/languages" }
-live_kit_client = { path = "crates/live_kit_client" }
-live_kit_server = { path = "crates/live_kit_server" }
+livekit_client = { path = "crates/livekit_client" }
+livekit_client_macos = { path = "crates/livekit_client_macos" }
+livekit_server = { path = "crates/livekit_server" }
lsp = { path = "crates/lsp" }
markdown = { path = "crates/markdown" }
markdown_preview = { path = "crates/markdown_preview" }
@@ -382,6 +384,7 @@ heed = { version = "0.20.1", features = ["read-txn-no-tls"] }
hex = "0.4.3"
html5ever = "0.27.0"
hyper = "0.14"
+http = "1.1"
ignore = "0.4.22"
image = "0.25.1"
indexmap = { version = "1.6.2", features = ["serde"] }
@@ -393,6 +396,7 @@ jupyter-websocket-client = { version = "0.8.0" }
libc = "0.2"
libsqlite3-sys = { version = "0.30.1", features = ["bundled"] }
linkify = "0.10.0"
+livekit = { git = "https://github.com/zed-industries/rust-sdks", rev="799f10133d93ba2a88642cd480d01ec4da53408c", features = ["dispatcher", "services-dispatcher", "rustls-tls-native-roots"], default-features = false }
log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] }
markup5ever_rcdom = "0.3.0"
nanoid = "0.4"
@@ -571,6 +575,10 @@ features = [
"Win32_UI_WindowsAndMessaging",
]
+# TODO livekit https://github.com/RustAudio/cpal/pull/891
+[patch.crates-io]
+cpal = { git = "https://github.com/zed-industries/cpal", rev = "fd8bc2fd39f1f5fdee5a0690656caff9a26d9d50" }
+
[profile.dev]
split-debuginfo = "unpacked"
debug = "limited"
@@ -17,21 +17,23 @@ test-support = [
"client/test-support",
"collections/test-support",
"gpui/test-support",
- "live_kit_client/test-support",
+ "livekit_client/test-support",
"project/test-support",
"util/test-support"
]
+livekit-macos = ["livekit_client_macos"]
+livekit-cross-platform = ["livekit_client"]
[dependencies]
anyhow.workspace = true
audio.workspace = true
client.workspace = true
collections.workspace = true
+feature_flags.workspace = true
fs.workspace = true
futures.workspace = true
gpui.workspace = true
language.workspace = true
-live_kit_client.workspace = true
log.workspace = true
postage.workspace = true
project.workspace = true
@@ -40,6 +42,8 @@ serde.workspace = true
serde_derive.workspace = true
settings.workspace = true
util.workspace = true
+livekit_client_macos = { workspace = true, optional = true }
+livekit_client = { workspace = true, optional = true }
[dev-dependencies]
client = { workspace = true, features = ["test-support"] }
@@ -47,7 +51,12 @@ collections = { workspace = true, features = ["test-support"] }
fs = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
language = { workspace = true, features = ["test-support"] }
-live_kit_client = { workspace = true, features = ["test-support"] }
project = { workspace = true, features = ["test-support"] }
util = { workspace = true, features = ["test-support"] }
http_client = { workspace = true, features = ["test-support"] }
+
+[target.'cfg(target_os = "macos")'.dev-dependencies]
+livekit_client_macos = { workspace = true, features = ["test-support"] }
+
+[target.'cfg(not(target_os = "macos"))'.dev-dependencies]
+livekit_client = { workspace = true, features = ["test-support"] }
@@ -1,546 +1,41 @@
pub mod call_settings;
-pub mod participant;
-pub mod room;
-use anyhow::{anyhow, Result};
-use audio::Audio;
-use call_settings::CallSettings;
-use client::{proto, ChannelId, Client, TypedEnvelope, User, UserStore, ZED_ALWAYS_ACTIVE};
-use collections::HashSet;
-use futures::{channel::oneshot, future::Shared, Future, FutureExt};
-use gpui::{
- AppContext, AsyncAppContext, Context, EventEmitter, Global, Model, ModelContext, Subscription,
- Task, WeakModel,
-};
-use postage::watch;
-use project::Project;
-use room::Event;
-use settings::Settings;
-use std::sync::Arc;
-
-pub use participant::ParticipantLocation;
-pub use room::Room;
-
-struct GlobalActiveCall(Model<ActiveCall>);
-
-impl Global for GlobalActiveCall {}
-
-pub fn init(client: Arc<Client>, user_store: Model<UserStore>, cx: &mut AppContext) {
- CallSettings::register(cx);
-
- let active_call = cx.new_model(|cx| ActiveCall::new(client, user_store, cx));
- cx.set_global(GlobalActiveCall(active_call));
-}
-
-pub struct OneAtATime {
- cancel: Option<oneshot::Sender<()>>,
-}
-
-impl OneAtATime {
- /// spawn a task in the given context.
- /// if another task is spawned before that resolves, or if the OneAtATime itself is dropped, the first task will be cancelled and return Ok(None)
- /// otherwise you'll see the result of the task.
- fn spawn<F, Fut, R>(&mut self, cx: &mut AppContext, f: F) -> Task<Result<Option<R>>>
- where
- F: 'static + FnOnce(AsyncAppContext) -> Fut,
- Fut: Future<Output = Result<R>>,
- R: 'static,
- {
- let (tx, rx) = oneshot::channel();
- self.cancel.replace(tx);
- cx.spawn(|cx| async move {
- futures::select_biased! {
- _ = rx.fuse() => Ok(None),
- result = f(cx).fuse() => result.map(Some),
- }
- })
- }
-
- fn running(&self) -> bool {
- self.cancel
- .as_ref()
- .is_some_and(|cancel| !cancel.is_canceled())
- }
-}
-
-#[derive(Clone)]
-pub struct IncomingCall {
- pub room_id: u64,
- pub calling_user: Arc<User>,
- pub participants: Vec<Arc<User>>,
- pub initial_project: Option<proto::ParticipantProject>,
-}
-
-/// Singleton global maintaining the user's participation in a room across workspaces.
-pub struct ActiveCall {
- room: Option<(Model<Room>, Vec<Subscription>)>,
- pending_room_creation: Option<Shared<Task<Result<Model<Room>, Arc<anyhow::Error>>>>>,
- location: Option<WeakModel<Project>>,
- _join_debouncer: OneAtATime,
- pending_invites: HashSet<u64>,
- incoming_call: (
- watch::Sender<Option<IncomingCall>>,
- watch::Receiver<Option<IncomingCall>>,
+#[cfg(any(
+ all(target_os = "macos", feature = "livekit-macos"),
+ all(
+ not(target_os = "macos"),
+ feature = "livekit-macos",
+ not(feature = "livekit-cross-platform")
+ )
+))]
+mod macos;
+
+#[cfg(any(
+ all(target_os = "macos", feature = "livekit-macos"),
+ all(
+ not(target_os = "macos"),
+ feature = "livekit-macos",
+ not(feature = "livekit-cross-platform")
+ )
+))]
+pub use macos::*;
+
+#[cfg(any(
+ all(
+ target_os = "macos",
+ feature = "livekit-cross-platform",
+ not(feature = "livekit-macos"),
),
- client: Arc<Client>,
- user_store: Model<UserStore>,
- _subscriptions: Vec<client::Subscription>,
-}
-
-impl EventEmitter<Event> for ActiveCall {}
-
-impl ActiveCall {
- fn new(client: Arc<Client>, user_store: Model<UserStore>, cx: &mut ModelContext<Self>) -> Self {
- Self {
- room: None,
- pending_room_creation: None,
- location: None,
- pending_invites: Default::default(),
- incoming_call: watch::channel(),
- _join_debouncer: OneAtATime { cancel: None },
- _subscriptions: vec![
- client.add_request_handler(cx.weak_model(), Self::handle_incoming_call),
- client.add_message_handler(cx.weak_model(), Self::handle_call_canceled),
- ],
- client,
- user_store,
- }
- }
-
- pub fn channel_id(&self, cx: &AppContext) -> Option<ChannelId> {
- self.room()?.read(cx).channel_id()
- }
-
- async fn handle_incoming_call(
- this: Model<Self>,
- envelope: TypedEnvelope<proto::IncomingCall>,
- mut cx: AsyncAppContext,
- ) -> Result<proto::Ack> {
- let user_store = this.update(&mut cx, |this, _| this.user_store.clone())?;
- let call = IncomingCall {
- room_id: envelope.payload.room_id,
- participants: user_store
- .update(&mut cx, |user_store, cx| {
- user_store.get_users(envelope.payload.participant_user_ids, cx)
- })?
- .await?,
- calling_user: user_store
- .update(&mut cx, |user_store, cx| {
- user_store.get_user(envelope.payload.calling_user_id, cx)
- })?
- .await?,
- initial_project: envelope.payload.initial_project,
- };
- this.update(&mut cx, |this, _| {
- *this.incoming_call.0.borrow_mut() = Some(call);
- })?;
-
- Ok(proto::Ack {})
- }
-
- async fn handle_call_canceled(
- this: Model<Self>,
- envelope: TypedEnvelope<proto::CallCanceled>,
- mut cx: AsyncAppContext,
- ) -> Result<()> {
- this.update(&mut cx, |this, _| {
- let mut incoming_call = this.incoming_call.0.borrow_mut();
- if incoming_call
- .as_ref()
- .map_or(false, |call| call.room_id == envelope.payload.room_id)
- {
- incoming_call.take();
- }
- })?;
- Ok(())
- }
-
- pub fn global(cx: &AppContext) -> Model<Self> {
- cx.global::<GlobalActiveCall>().0.clone()
- }
-
- pub fn try_global(cx: &AppContext) -> Option<Model<Self>> {
- cx.try_global::<GlobalActiveCall>()
- .map(|call| call.0.clone())
- }
-
- pub fn invite(
- &mut self,
- called_user_id: u64,
- initial_project: Option<Model<Project>>,
- cx: &mut ModelContext<Self>,
- ) -> Task<Result<()>> {
- if !self.pending_invites.insert(called_user_id) {
- return Task::ready(Err(anyhow!("user was already invited")));
- }
- cx.notify();
-
- if self._join_debouncer.running() {
- return Task::ready(Ok(()));
- }
-
- let room = if let Some(room) = self.room().cloned() {
- Some(Task::ready(Ok(room)).shared())
- } else {
- self.pending_room_creation.clone()
- };
-
- let invite = if let Some(room) = room {
- cx.spawn(move |_, mut cx| async move {
- let room = room.await.map_err(|err| anyhow!("{:?}", err))?;
-
- let initial_project_id = if let Some(initial_project) = initial_project {
- Some(
- room.update(&mut cx, |room, cx| room.share_project(initial_project, cx))?
- .await?,
- )
- } else {
- None
- };
-
- room.update(&mut cx, move |room, cx| {
- room.call(called_user_id, initial_project_id, cx)
- })?
- .await?;
-
- anyhow::Ok(())
- })
- } else {
- let client = self.client.clone();
- let user_store = self.user_store.clone();
- let room = cx
- .spawn(move |this, mut cx| async move {
- let create_room = async {
- let room = cx
- .update(|cx| {
- Room::create(
- called_user_id,
- initial_project,
- client,
- user_store,
- cx,
- )
- })?
- .await?;
-
- this.update(&mut cx, |this, cx| this.set_room(Some(room.clone()), cx))?
- .await?;
-
- anyhow::Ok(room)
- };
-
- let room = create_room.await;
- this.update(&mut cx, |this, _| this.pending_room_creation = None)?;
- room.map_err(Arc::new)
- })
- .shared();
- self.pending_room_creation = Some(room.clone());
- cx.background_executor().spawn(async move {
- room.await.map_err(|err| anyhow!("{:?}", err))?;
- anyhow::Ok(())
- })
- };
-
- cx.spawn(move |this, mut cx| async move {
- let result = invite.await;
- if result.is_ok() {
- this.update(&mut cx, |this, cx| this.report_call_event("invite", cx))?;
- } else {
- //TODO: report collaboration error
- log::error!("invite failed: {:?}", result);
- }
-
- this.update(&mut cx, |this, cx| {
- this.pending_invites.remove(&called_user_id);
- cx.notify();
- })?;
- result
- })
- }
-
- pub fn cancel_invite(
- &mut self,
- called_user_id: u64,
- cx: &mut ModelContext<Self>,
- ) -> Task<Result<()>> {
- let room_id = if let Some(room) = self.room() {
- room.read(cx).id()
- } else {
- return Task::ready(Err(anyhow!("no active call")));
- };
-
- let client = self.client.clone();
- cx.background_executor().spawn(async move {
- client
- .request(proto::CancelCall {
- room_id,
- called_user_id,
- })
- .await?;
- anyhow::Ok(())
- })
- }
-
- pub fn incoming(&self) -> watch::Receiver<Option<IncomingCall>> {
- self.incoming_call.1.clone()
- }
-
- pub fn accept_incoming(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
- if self.room.is_some() {
- return Task::ready(Err(anyhow!("cannot join while on another call")));
- }
-
- let call = if let Some(call) = self.incoming_call.0.borrow_mut().take() {
- call
- } else {
- return Task::ready(Err(anyhow!("no incoming call")));
- };
-
- if self.pending_room_creation.is_some() {
- return Task::ready(Ok(()));
- }
-
- let room_id = call.room_id;
- let client = self.client.clone();
- let user_store = self.user_store.clone();
- let join = self
- ._join_debouncer
- .spawn(cx, move |cx| Room::join(room_id, client, user_store, cx));
-
- cx.spawn(|this, mut cx| async move {
- let room = join.await?;
- this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))?
- .await?;
- this.update(&mut cx, |this, cx| {
- this.report_call_event("accept incoming", cx)
- })?;
- Ok(())
- })
- }
-
- pub fn decline_incoming(&mut self, _: &mut ModelContext<Self>) -> Result<()> {
- let call = self
- .incoming_call
- .0
- .borrow_mut()
- .take()
- .ok_or_else(|| anyhow!("no incoming call"))?;
- report_call_event_for_room("decline incoming", call.room_id, None, &self.client);
- self.client.send(proto::DeclineCall {
- room_id: call.room_id,
- })?;
- Ok(())
- }
-
- pub fn join_channel(
- &mut self,
- channel_id: ChannelId,
- cx: &mut ModelContext<Self>,
- ) -> Task<Result<Option<Model<Room>>>> {
- if let Some(room) = self.room().cloned() {
- if room.read(cx).channel_id() == Some(channel_id) {
- return Task::ready(Ok(Some(room)));
- } else {
- room.update(cx, |room, cx| room.clear_state(cx));
- }
- }
-
- if self.pending_room_creation.is_some() {
- return Task::ready(Ok(None));
- }
-
- let client = self.client.clone();
- let user_store = self.user_store.clone();
- let join = self._join_debouncer.spawn(cx, move |cx| async move {
- Room::join_channel(channel_id, client, user_store, cx).await
- });
-
- cx.spawn(|this, mut cx| async move {
- let room = join.await?;
- this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))?
- .await?;
- this.update(&mut cx, |this, cx| {
- this.report_call_event("join channel", cx)
- })?;
- Ok(room)
- })
- }
-
- pub fn hang_up(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
- cx.notify();
- self.report_call_event("hang up", cx);
-
- Audio::end_call(cx);
-
- let channel_id = self.channel_id(cx);
- if let Some((room, _)) = self.room.take() {
- cx.emit(Event::RoomLeft { channel_id });
- room.update(cx, |room, cx| room.leave(cx))
- } else {
- Task::ready(Ok(()))
- }
- }
-
- pub fn share_project(
- &mut self,
- project: Model<Project>,
- cx: &mut ModelContext<Self>,
- ) -> Task<Result<u64>> {
- if let Some((room, _)) = self.room.as_ref() {
- self.report_call_event("share project", cx);
- room.update(cx, |room, cx| room.share_project(project, cx))
- } else {
- Task::ready(Err(anyhow!("no active call")))
- }
- }
-
- pub fn unshare_project(
- &mut self,
- project: Model<Project>,
- cx: &mut ModelContext<Self>,
- ) -> Result<()> {
- if let Some((room, _)) = self.room.as_ref() {
- self.report_call_event("unshare project", cx);
- room.update(cx, |room, cx| room.unshare_project(project, cx))
- } else {
- Err(anyhow!("no active call"))
- }
- }
-
- pub fn location(&self) -> Option<&WeakModel<Project>> {
- self.location.as_ref()
- }
-
- pub fn set_location(
- &mut self,
- project: Option<&Model<Project>>,
- cx: &mut ModelContext<Self>,
- ) -> Task<Result<()>> {
- if project.is_some() || !*ZED_ALWAYS_ACTIVE {
- self.location = project.map(|project| project.downgrade());
- if let Some((room, _)) = self.room.as_ref() {
- return room.update(cx, |room, cx| room.set_location(project, cx));
- }
- }
- Task::ready(Ok(()))
- }
-
- fn set_room(
- &mut self,
- room: Option<Model<Room>>,
- cx: &mut ModelContext<Self>,
- ) -> Task<Result<()>> {
- if room.as_ref() == self.room.as_ref().map(|room| &room.0) {
- Task::ready(Ok(()))
- } else {
- cx.notify();
- if let Some(room) = room {
- if room.read(cx).status().is_offline() {
- self.room = None;
- Task::ready(Ok(()))
- } else {
- let subscriptions = vec![
- cx.observe(&room, |this, room, cx| {
- if room.read(cx).status().is_offline() {
- this.set_room(None, cx).detach_and_log_err(cx);
- }
-
- cx.notify();
- }),
- cx.subscribe(&room, |_, _, event, cx| cx.emit(event.clone())),
- ];
- self.room = Some((room.clone(), subscriptions));
- let location = self
- .location
- .as_ref()
- .and_then(|location| location.upgrade());
- let channel_id = room.read(cx).channel_id();
- cx.emit(Event::RoomJoined { channel_id });
- room.update(cx, |room, cx| room.set_location(location.as_ref(), cx))
- }
- } else {
- self.room = None;
- Task::ready(Ok(()))
- }
- }
- }
-
- pub fn room(&self) -> Option<&Model<Room>> {
- self.room.as_ref().map(|(room, _)| room)
- }
-
- pub fn client(&self) -> Arc<Client> {
- self.client.clone()
- }
-
- pub fn pending_invites(&self) -> &HashSet<u64> {
- &self.pending_invites
- }
-
- pub fn report_call_event(&self, operation: &'static str, cx: &mut AppContext) {
- if let Some(room) = self.room() {
- let room = room.read(cx);
- report_call_event_for_room(operation, room.id(), room.channel_id(), &self.client);
- }
- }
-}
-
-pub fn report_call_event_for_room(
- operation: &'static str,
- room_id: u64,
- channel_id: Option<ChannelId>,
- client: &Arc<Client>,
-) {
- let telemetry = client.telemetry();
-
- telemetry.report_call_event(operation, Some(room_id), channel_id)
-}
-
-pub fn report_call_event_for_channel(
- operation: &'static str,
- channel_id: ChannelId,
- client: &Arc<Client>,
- cx: &AppContext,
-) {
- let room = ActiveCall::global(cx).read(cx).room();
-
- let telemetry = client.telemetry();
-
- telemetry.report_call_event(operation, room.map(|r| r.read(cx).id()), Some(channel_id))
-}
-
-#[cfg(test)]
-mod test {
- use gpui::TestAppContext;
-
- use crate::OneAtATime;
-
- #[gpui::test]
- async fn test_one_at_a_time(cx: &mut TestAppContext) {
- let mut one_at_a_time = OneAtATime { cancel: None };
-
- assert_eq!(
- cx.update(|cx| one_at_a_time.spawn(cx, |_| async { Ok(1) }))
- .await
- .unwrap(),
- Some(1)
- );
-
- let (a, b) = cx.update(|cx| {
- (
- one_at_a_time.spawn(cx, |_| async {
- panic!("");
- }),
- one_at_a_time.spawn(cx, |_| async { Ok(3) }),
- )
- });
-
- assert_eq!(a.await.unwrap(), None::<u32>);
- assert_eq!(b.await.unwrap(), Some(3));
-
- let promise = cx.update(|cx| one_at_a_time.spawn(cx, |_| async { Ok(4) }));
- drop(one_at_a_time);
-
- assert_eq!(promise.await.unwrap(), None);
- }
-}
+ all(not(target_os = "macos"), feature = "livekit-cross-platform"),
+))]
+mod cross_platform;
+
+#[cfg(any(
+ all(
+ target_os = "macos",
+ feature = "livekit-cross-platform",
+ not(feature = "livekit-macos"),
+ ),
+ all(not(target_os = "macos"), feature = "livekit-cross-platform"),
+))]
+pub use cross_platform::*;
@@ -0,0 +1,552 @@
+pub mod participant;
+pub mod room;
+
+use crate::call_settings::CallSettings;
+use anyhow::{anyhow, Result};
+use audio::Audio;
+use client::{proto, ChannelId, Client, TypedEnvelope, User, UserStore, ZED_ALWAYS_ACTIVE};
+use collections::HashSet;
+use futures::{channel::oneshot, future::Shared, Future, FutureExt};
+use gpui::{
+ AppContext, AsyncAppContext, Context, EventEmitter, Global, Model, ModelContext, Subscription,
+ Task, WeakModel,
+};
+use postage::watch;
+use project::Project;
+use room::Event;
+use settings::Settings;
+use std::sync::Arc;
+
+pub use livekit_client::{
+ track::RemoteVideoTrack, RemoteVideoTrackView, RemoteVideoTrackViewEvent,
+};
+pub use participant::ParticipantLocation;
+pub use room::Room;
+
+struct GlobalActiveCall(Model<ActiveCall>);
+
+impl Global for GlobalActiveCall {}
+
+pub fn init(client: Arc<Client>, user_store: Model<UserStore>, cx: &mut AppContext) {
+ livekit_client::init(
+ cx.background_executor().dispatcher.clone(),
+ cx.http_client(),
+ );
+ CallSettings::register(cx);
+
+ let active_call = cx.new_model(|cx| ActiveCall::new(client, user_store, cx));
+ cx.set_global(GlobalActiveCall(active_call));
+}
+
+pub struct OneAtATime {
+ cancel: Option<oneshot::Sender<()>>,
+}
+
+impl OneAtATime {
+ /// spawn a task in the given context.
+ /// if another task is spawned before that resolves, or if the OneAtATime itself is dropped, the first task will be cancelled and return Ok(None)
+ /// otherwise you'll see the result of the task.
+ fn spawn<F, Fut, R>(&mut self, cx: &mut AppContext, f: F) -> Task<Result<Option<R>>>
+ where
+ F: 'static + FnOnce(AsyncAppContext) -> Fut,
+ Fut: Future<Output = Result<R>>,
+ R: 'static,
+ {
+ let (tx, rx) = oneshot::channel();
+ self.cancel.replace(tx);
+ cx.spawn(|cx| async move {
+ futures::select_biased! {
+ _ = rx.fuse() => Ok(None),
+ result = f(cx).fuse() => result.map(Some),
+ }
+ })
+ }
+
+ fn running(&self) -> bool {
+ self.cancel
+ .as_ref()
+ .is_some_and(|cancel| !cancel.is_canceled())
+ }
+}
+
+#[derive(Clone)]
+pub struct IncomingCall {
+ pub room_id: u64,
+ pub calling_user: Arc<User>,
+ pub participants: Vec<Arc<User>>,
+ pub initial_project: Option<proto::ParticipantProject>,
+}
+
+/// Singleton global maintaining the user's participation in a room across workspaces.
+pub struct ActiveCall {
+ room: Option<(Model<Room>, Vec<Subscription>)>,
+ pending_room_creation: Option<Shared<Task<Result<Model<Room>, Arc<anyhow::Error>>>>>,
+ location: Option<WeakModel<Project>>,
+ _join_debouncer: OneAtATime,
+ pending_invites: HashSet<u64>,
+ incoming_call: (
+ watch::Sender<Option<IncomingCall>>,
+ watch::Receiver<Option<IncomingCall>>,
+ ),
+ client: Arc<Client>,
+ user_store: Model<UserStore>,
+ _subscriptions: Vec<client::Subscription>,
+}
+
+impl EventEmitter<Event> for ActiveCall {}
+
+impl ActiveCall {
+ fn new(client: Arc<Client>, user_store: Model<UserStore>, cx: &mut ModelContext<Self>) -> Self {
+ Self {
+ room: None,
+ pending_room_creation: None,
+ location: None,
+ pending_invites: Default::default(),
+ incoming_call: watch::channel(),
+ _join_debouncer: OneAtATime { cancel: None },
+ _subscriptions: vec![
+ client.add_request_handler(cx.weak_model(), Self::handle_incoming_call),
+ client.add_message_handler(cx.weak_model(), Self::handle_call_canceled),
+ ],
+ client,
+ user_store,
+ }
+ }
+
+ pub fn channel_id(&self, cx: &AppContext) -> Option<ChannelId> {
+ self.room()?.read(cx).channel_id()
+ }
+
+ async fn handle_incoming_call(
+ this: Model<Self>,
+ envelope: TypedEnvelope<proto::IncomingCall>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::Ack> {
+ let user_store = this.update(&mut cx, |this, _| this.user_store.clone())?;
+ let call = IncomingCall {
+ room_id: envelope.payload.room_id,
+ participants: user_store
+ .update(&mut cx, |user_store, cx| {
+ user_store.get_users(envelope.payload.participant_user_ids, cx)
+ })?
+ .await?,
+ calling_user: user_store
+ .update(&mut cx, |user_store, cx| {
+ user_store.get_user(envelope.payload.calling_user_id, cx)
+ })?
+ .await?,
+ initial_project: envelope.payload.initial_project,
+ };
+ this.update(&mut cx, |this, _| {
+ *this.incoming_call.0.borrow_mut() = Some(call);
+ })?;
+
+ Ok(proto::Ack {})
+ }
+
+ async fn handle_call_canceled(
+ this: Model<Self>,
+ envelope: TypedEnvelope<proto::CallCanceled>,
+ mut cx: AsyncAppContext,
+ ) -> Result<()> {
+ this.update(&mut cx, |this, _| {
+ let mut incoming_call = this.incoming_call.0.borrow_mut();
+ if incoming_call
+ .as_ref()
+ .map_or(false, |call| call.room_id == envelope.payload.room_id)
+ {
+ incoming_call.take();
+ }
+ })?;
+ Ok(())
+ }
+
+ pub fn global(cx: &AppContext) -> Model<Self> {
+ cx.global::<GlobalActiveCall>().0.clone()
+ }
+
+ pub fn try_global(cx: &AppContext) -> Option<Model<Self>> {
+ cx.try_global::<GlobalActiveCall>()
+ .map(|call| call.0.clone())
+ }
+
+ pub fn invite(
+ &mut self,
+ called_user_id: u64,
+ initial_project: Option<Model<Project>>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<()>> {
+ if !self.pending_invites.insert(called_user_id) {
+ return Task::ready(Err(anyhow!("user was already invited")));
+ }
+ cx.notify();
+
+ if self._join_debouncer.running() {
+ return Task::ready(Ok(()));
+ }
+
+ let room = if let Some(room) = self.room().cloned() {
+ Some(Task::ready(Ok(room)).shared())
+ } else {
+ self.pending_room_creation.clone()
+ };
+
+ let invite = if let Some(room) = room {
+ cx.spawn(move |_, mut cx| async move {
+ let room = room.await.map_err(|err| anyhow!("{:?}", err))?;
+
+ let initial_project_id = if let Some(initial_project) = initial_project {
+ Some(
+ room.update(&mut cx, |room, cx| room.share_project(initial_project, cx))?
+ .await?,
+ )
+ } else {
+ None
+ };
+
+ room.update(&mut cx, move |room, cx| {
+ room.call(called_user_id, initial_project_id, cx)
+ })?
+ .await?;
+
+ anyhow::Ok(())
+ })
+ } else {
+ let client = self.client.clone();
+ let user_store = self.user_store.clone();
+ let room = cx
+ .spawn(move |this, mut cx| async move {
+ let create_room = async {
+ let room = cx
+ .update(|cx| {
+ Room::create(
+ called_user_id,
+ initial_project,
+ client,
+ user_store,
+ cx,
+ )
+ })?
+ .await?;
+
+ this.update(&mut cx, |this, cx| this.set_room(Some(room.clone()), cx))?
+ .await?;
+
+ anyhow::Ok(room)
+ };
+
+ let room = create_room.await;
+ this.update(&mut cx, |this, _| this.pending_room_creation = None)?;
+ room.map_err(Arc::new)
+ })
+ .shared();
+ self.pending_room_creation = Some(room.clone());
+ cx.background_executor().spawn(async move {
+ room.await.map_err(|err| anyhow!("{:?}", err))?;
+ anyhow::Ok(())
+ })
+ };
+
+ cx.spawn(move |this, mut cx| async move {
+ let result = invite.await;
+ if result.is_ok() {
+ this.update(&mut cx, |this, cx| this.report_call_event("invite", cx))?;
+ } else {
+ //TODO: report collaboration error
+ log::error!("invite failed: {:?}", result);
+ }
+
+ this.update(&mut cx, |this, cx| {
+ this.pending_invites.remove(&called_user_id);
+ cx.notify();
+ })?;
+ result
+ })
+ }
+
+ pub fn cancel_invite(
+ &mut self,
+ called_user_id: u64,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<()>> {
+ let room_id = if let Some(room) = self.room() {
+ room.read(cx).id()
+ } else {
+ return Task::ready(Err(anyhow!("no active call")));
+ };
+
+ let client = self.client.clone();
+ cx.background_executor().spawn(async move {
+ client
+ .request(proto::CancelCall {
+ room_id,
+ called_user_id,
+ })
+ .await?;
+ anyhow::Ok(())
+ })
+ }
+
+ pub fn incoming(&self) -> watch::Receiver<Option<IncomingCall>> {
+ self.incoming_call.1.clone()
+ }
+
+ pub fn accept_incoming(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
+ if self.room.is_some() {
+ return Task::ready(Err(anyhow!("cannot join while on another call")));
+ }
+
+ let call = if let Some(call) = self.incoming_call.0.borrow_mut().take() {
+ call
+ } else {
+ return Task::ready(Err(anyhow!("no incoming call")));
+ };
+
+ if self.pending_room_creation.is_some() {
+ return Task::ready(Ok(()));
+ }
+
+ let room_id = call.room_id;
+ let client = self.client.clone();
+ let user_store = self.user_store.clone();
+ let join = self
+ ._join_debouncer
+ .spawn(cx, move |cx| Room::join(room_id, client, user_store, cx));
+
+ cx.spawn(|this, mut cx| async move {
+ let room = join.await?;
+ this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))?
+ .await?;
+ this.update(&mut cx, |this, cx| {
+ this.report_call_event("accept incoming", cx)
+ })?;
+ Ok(())
+ })
+ }
+
+ pub fn decline_incoming(&mut self, _: &mut ModelContext<Self>) -> Result<()> {
+ let call = self
+ .incoming_call
+ .0
+ .borrow_mut()
+ .take()
+ .ok_or_else(|| anyhow!("no incoming call"))?;
+ report_call_event_for_room("decline incoming", call.room_id, None, &self.client);
+ self.client.send(proto::DeclineCall {
+ room_id: call.room_id,
+ })?;
+ Ok(())
+ }
+
+ pub fn join_channel(
+ &mut self,
+ channel_id: ChannelId,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Option<Model<Room>>>> {
+ if let Some(room) = self.room().cloned() {
+ if room.read(cx).channel_id() == Some(channel_id) {
+ return Task::ready(Ok(Some(room)));
+ } else {
+ room.update(cx, |room, cx| room.clear_state(cx));
+ }
+ }
+
+ if self.pending_room_creation.is_some() {
+ return Task::ready(Ok(None));
+ }
+
+ let client = self.client.clone();
+ let user_store = self.user_store.clone();
+ let join = self._join_debouncer.spawn(cx, move |cx| async move {
+ Room::join_channel(channel_id, client, user_store, cx).await
+ });
+
+ cx.spawn(|this, mut cx| async move {
+ let room = join.await?;
+ this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))?
+ .await?;
+ this.update(&mut cx, |this, cx| {
+ this.report_call_event("join channel", cx)
+ })?;
+ Ok(room)
+ })
+ }
+
+ pub fn hang_up(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
+ cx.notify();
+ self.report_call_event("hang up", cx);
+
+ Audio::end_call(cx);
+
+ let channel_id = self.channel_id(cx);
+ if let Some((room, _)) = self.room.take() {
+ cx.emit(Event::RoomLeft { channel_id });
+ room.update(cx, |room, cx| room.leave(cx))
+ } else {
+ Task::ready(Ok(()))
+ }
+ }
+
+ pub fn share_project(
+ &mut self,
+ project: Model<Project>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<u64>> {
+ if let Some((room, _)) = self.room.as_ref() {
+ self.report_call_event("share project", cx);
+ room.update(cx, |room, cx| room.share_project(project, cx))
+ } else {
+ Task::ready(Err(anyhow!("no active call")))
+ }
+ }
+
+ pub fn unshare_project(
+ &mut self,
+ project: Model<Project>,
+ cx: &mut ModelContext<Self>,
+ ) -> Result<()> {
+ if let Some((room, _)) = self.room.as_ref() {
+ self.report_call_event("unshare project", cx);
+ room.update(cx, |room, cx| room.unshare_project(project, cx))
+ } else {
+ Err(anyhow!("no active call"))
+ }
+ }
+
+ pub fn location(&self) -> Option<&WeakModel<Project>> {
+ self.location.as_ref()
+ }
+
+ pub fn set_location(
+ &mut self,
+ project: Option<&Model<Project>>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<()>> {
+ if project.is_some() || !*ZED_ALWAYS_ACTIVE {
+ self.location = project.map(|project| project.downgrade());
+ if let Some((room, _)) = self.room.as_ref() {
+ return room.update(cx, |room, cx| room.set_location(project, cx));
+ }
+ }
+ Task::ready(Ok(()))
+ }
+
+ fn set_room(
+ &mut self,
+ room: Option<Model<Room>>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<()>> {
+ if room.as_ref() == self.room.as_ref().map(|room| &room.0) {
+ Task::ready(Ok(()))
+ } else {
+ cx.notify();
+ if let Some(room) = room {
+ if room.read(cx).status().is_offline() {
+ self.room = None;
+ Task::ready(Ok(()))
+ } else {
+ let subscriptions = vec![
+ cx.observe(&room, |this, room, cx| {
+ if room.read(cx).status().is_offline() {
+ this.set_room(None, cx).detach_and_log_err(cx);
+ }
+
+ cx.notify();
+ }),
+ cx.subscribe(&room, |_, _, event, cx| cx.emit(event.clone())),
+ ];
+ self.room = Some((room.clone(), subscriptions));
+ let location = self
+ .location
+ .as_ref()
+ .and_then(|location| location.upgrade());
+ let channel_id = room.read(cx).channel_id();
+ cx.emit(Event::RoomJoined { channel_id });
+ room.update(cx, |room, cx| room.set_location(location.as_ref(), cx))
+ }
+ } else {
+ self.room = None;
+ Task::ready(Ok(()))
+ }
+ }
+ }
+
+ pub fn room(&self) -> Option<&Model<Room>> {
+ self.room.as_ref().map(|(room, _)| room)
+ }
+
+ pub fn client(&self) -> Arc<Client> {
+ self.client.clone()
+ }
+
+ pub fn pending_invites(&self) -> &HashSet<u64> {
+ &self.pending_invites
+ }
+
+ pub fn report_call_event(&self, operation: &'static str, cx: &mut AppContext) {
+ if let Some(room) = self.room() {
+ let room = room.read(cx);
+ report_call_event_for_room(operation, room.id(), room.channel_id(), &self.client);
+ }
+ }
+}
+
+pub fn report_call_event_for_room(
+ operation: &'static str,
+ room_id: u64,
+ channel_id: Option<ChannelId>,
+ client: &Arc<Client>,
+) {
+ let telemetry = client.telemetry();
+
+ telemetry.report_call_event(operation, Some(room_id), channel_id)
+}
+
+pub fn report_call_event_for_channel(
+ operation: &'static str,
+ channel_id: ChannelId,
+ client: &Arc<Client>,
+ cx: &AppContext,
+) {
+ let room = ActiveCall::global(cx).read(cx).room();
+
+ let telemetry = client.telemetry();
+
+ telemetry.report_call_event(operation, room.map(|r| r.read(cx).id()), Some(channel_id))
+}
+
+#[cfg(test)]
+mod test {
+ use gpui::TestAppContext;
+
+ use crate::OneAtATime;
+
+ #[gpui::test]
+ async fn test_one_at_a_time(cx: &mut TestAppContext) {
+ let mut one_at_a_time = OneAtATime { cancel: None };
+
+ assert_eq!(
+ cx.update(|cx| one_at_a_time.spawn(cx, |_| async { Ok(1) }))
+ .await
+ .unwrap(),
+ Some(1)
+ );
+
+ let (a, b) = cx.update(|cx| {
+ (
+ one_at_a_time.spawn(cx, |_| async {
+ panic!("");
+ }),
+ one_at_a_time.spawn(cx, |_| async { Ok(3) }),
+ )
+ });
+
+ assert_eq!(a.await.unwrap(), None::<u32>);
+ assert_eq!(b.await.unwrap(), Some(3));
+
+ let promise = cx.update(|cx| one_at_a_time.spawn(cx, |_| async { Ok(4) }));
+ drop(one_at_a_time);
+
+ assert_eq!(promise.await.unwrap(), None);
+ }
+}
@@ -0,0 +1,68 @@
+#![cfg_attr(target_os = "windows", allow(unused))]
+
+use anyhow::{anyhow, Result};
+use client::{proto, ParticipantIndex, User};
+use collections::HashMap;
+use gpui::WeakModel;
+use livekit_client::AudioStream;
+use project::Project;
+use std::sync::Arc;
+
+#[cfg(not(target_os = "windows"))]
+pub use livekit_client::id::TrackSid;
+pub use livekit_client::track::{RemoteAudioTrack, RemoteVideoTrack};
+
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+pub enum ParticipantLocation {
+ SharedProject { project_id: u64 },
+ UnsharedProject,
+ External,
+}
+
+impl ParticipantLocation {
+ pub fn from_proto(location: Option<proto::ParticipantLocation>) -> Result<Self> {
+ match location.and_then(|l| l.variant) {
+ Some(proto::participant_location::Variant::SharedProject(project)) => {
+ Ok(Self::SharedProject {
+ project_id: project.id,
+ })
+ }
+ Some(proto::participant_location::Variant::UnsharedProject(_)) => {
+ Ok(Self::UnsharedProject)
+ }
+ Some(proto::participant_location::Variant::External(_)) => Ok(Self::External),
+ None => Err(anyhow!("participant location was not provided")),
+ }
+ }
+}
+
+#[derive(Clone, Default)]
+pub struct LocalParticipant {
+ pub projects: Vec<proto::ParticipantProject>,
+ pub active_project: Option<WeakModel<Project>>,
+ pub role: proto::ChannelRole,
+}
+
+pub struct RemoteParticipant {
+ pub user: Arc<User>,
+ pub peer_id: proto::PeerId,
+ pub role: proto::ChannelRole,
+ pub projects: Vec<proto::ParticipantProject>,
+ pub location: ParticipantLocation,
+ pub participant_index: ParticipantIndex,
+ pub muted: bool,
+ pub speaking: bool,
+ #[cfg(not(target_os = "windows"))]
+ pub video_tracks: HashMap<TrackSid, RemoteVideoTrack>,
+ #[cfg(not(target_os = "windows"))]
+ pub audio_tracks: HashMap<TrackSid, (RemoteAudioTrack, AudioStream)>,
+}
+
+impl RemoteParticipant {
+ pub fn has_video_tracks(&self) -> bool {
+ #[cfg(not(target_os = "windows"))]
+ return !self.video_tracks.is_empty();
+ #[cfg(target_os = "windows")]
+ return false;
+ }
+}
@@ -0,0 +1,1771 @@
+#![cfg_attr(target_os = "windows", allow(unused))]
+
+use crate::{
+ call_settings::CallSettings,
+ participant::{LocalParticipant, ParticipantLocation, RemoteParticipant},
+};
+use anyhow::{anyhow, Result};
+use audio::{Audio, Sound};
+use client::{
+ proto::{self, PeerId},
+ ChannelId, Client, ParticipantIndex, TypedEnvelope, User, UserStore,
+};
+use collections::{BTreeMap, HashMap, HashSet};
+use fs::Fs;
+use futures::{FutureExt, StreamExt};
+use gpui::{
+ AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Task, WeakModel,
+};
+use language::LanguageRegistry;
+#[cfg(not(target_os = "windows"))]
+use livekit::{
+ capture_local_audio_track, capture_local_video_track,
+ id::ParticipantIdentity,
+ options::{TrackPublishOptions, VideoCodec},
+ play_remote_audio_track,
+ publication::LocalTrackPublication,
+ track::{TrackKind, TrackSource},
+ RoomEvent, RoomOptions,
+};
+#[cfg(target_os = "windows")]
+use livekit::{publication::LocalTrackPublication, RoomEvent};
+use livekit_client as livekit;
+use postage::{sink::Sink, stream::Stream, watch};
+use project::Project;
+use settings::Settings as _;
+use std::{any::Any, future::Future, mem, sync::Arc, time::Duration};
+use util::{post_inc, ResultExt, TryFutureExt};
+
+pub const RECONNECT_TIMEOUT: Duration = Duration::from_secs(30);
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum Event {
+ RoomJoined {
+ channel_id: Option<ChannelId>,
+ },
+ ParticipantLocationChanged {
+ participant_id: proto::PeerId,
+ },
+ RemoteVideoTracksChanged {
+ participant_id: proto::PeerId,
+ },
+ RemoteAudioTracksChanged {
+ participant_id: proto::PeerId,
+ },
+ RemoteProjectShared {
+ owner: Arc<User>,
+ project_id: u64,
+ worktree_root_names: Vec<String>,
+ },
+ RemoteProjectUnshared {
+ project_id: u64,
+ },
+ RemoteProjectJoined {
+ project_id: u64,
+ },
+ RemoteProjectInvitationDiscarded {
+ project_id: u64,
+ },
+ RoomLeft {
+ channel_id: Option<ChannelId>,
+ },
+}
+
+pub struct Room {
+ id: u64,
+ channel_id: Option<ChannelId>,
+ live_kit: Option<LiveKitRoom>,
+ status: RoomStatus,
+ shared_projects: HashSet<WeakModel<Project>>,
+ joined_projects: HashSet<WeakModel<Project>>,
+ local_participant: LocalParticipant,
+ remote_participants: BTreeMap<u64, RemoteParticipant>,
+ pending_participants: Vec<Arc<User>>,
+ participant_user_ids: HashSet<u64>,
+ pending_call_count: usize,
+ leave_when_empty: bool,
+ client: Arc<Client>,
+ user_store: Model<UserStore>,
+ follows_by_leader_id_project_id: HashMap<(PeerId, u64), Vec<PeerId>>,
+ client_subscriptions: Vec<client::Subscription>,
+ _subscriptions: Vec<gpui::Subscription>,
+ room_update_completed_tx: watch::Sender<Option<()>>,
+ room_update_completed_rx: watch::Receiver<Option<()>>,
+ pending_room_update: Option<Task<()>>,
+ maintain_connection: Option<Task<Option<()>>>,
+}
+
+impl EventEmitter<Event> for Room {}
+
+impl Room {
+ pub fn channel_id(&self) -> Option<ChannelId> {
+ self.channel_id
+ }
+
+ pub fn is_sharing_project(&self) -> bool {
+ !self.shared_projects.is_empty()
+ }
+
+ #[cfg(all(any(test, feature = "test-support"), not(target_os = "windows")))]
+ pub fn is_connected(&self) -> bool {
+ if let Some(live_kit) = self.live_kit.as_ref() {
+ live_kit.room.connection_state() == livekit::ConnectionState::Connected
+ } else {
+ false
+ }
+ }
+
+ fn new(
+ id: u64,
+ channel_id: Option<ChannelId>,
+ livekit_connection_info: Option<proto::LiveKitConnectionInfo>,
+ client: Arc<Client>,
+ user_store: Model<UserStore>,
+ cx: &mut ModelContext<Self>,
+ ) -> Self {
+ spawn_room_connection(livekit_connection_info, cx);
+
+ let maintain_connection = cx.spawn({
+ let client = client.clone();
+ move |this, cx| Self::maintain_connection(this, client.clone(), cx).log_err()
+ });
+
+ Audio::play_sound(Sound::Joined, cx);
+
+ let (room_update_completed_tx, room_update_completed_rx) = watch::channel();
+
+ Self {
+ id,
+ channel_id,
+ live_kit: None,
+ status: RoomStatus::Online,
+ shared_projects: Default::default(),
+ joined_projects: Default::default(),
+ participant_user_ids: Default::default(),
+ local_participant: Default::default(),
+ remote_participants: Default::default(),
+ pending_participants: Default::default(),
+ pending_call_count: 0,
+ client_subscriptions: vec![
+ client.add_message_handler(cx.weak_model(), Self::handle_room_updated)
+ ],
+ _subscriptions: vec![
+ cx.on_release(Self::released),
+ cx.on_app_quit(Self::app_will_quit),
+ ],
+ leave_when_empty: false,
+ pending_room_update: None,
+ client,
+ user_store,
+ follows_by_leader_id_project_id: Default::default(),
+ maintain_connection: Some(maintain_connection),
+ room_update_completed_tx,
+ room_update_completed_rx,
+ }
+ }
+
+ pub(crate) fn create(
+ called_user_id: u64,
+ initial_project: Option<Model<Project>>,
+ client: Arc<Client>,
+ user_store: Model<UserStore>,
+ cx: &mut AppContext,
+ ) -> Task<Result<Model<Self>>> {
+ cx.spawn(move |mut cx| async move {
+ let response = client.request(proto::CreateRoom {}).await?;
+ let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
+ let room = cx.new_model(|cx| {
+ let mut room = Self::new(
+ room_proto.id,
+ None,
+ response.live_kit_connection_info,
+ client,
+ user_store,
+ cx,
+ );
+ if let Some(participant) = room_proto.participants.first() {
+ room.local_participant.role = participant.role()
+ }
+ room
+ })?;
+
+ let initial_project_id = if let Some(initial_project) = initial_project {
+ let initial_project_id = room
+ .update(&mut cx, |room, cx| {
+ room.share_project(initial_project.clone(), cx)
+ })?
+ .await?;
+ Some(initial_project_id)
+ } else {
+ None
+ };
+
+ let did_join = room
+ .update(&mut cx, |room, cx| {
+ room.leave_when_empty = true;
+ room.call(called_user_id, initial_project_id, cx)
+ })?
+ .await;
+ match did_join {
+ Ok(()) => Ok(room),
+ Err(error) => Err(error.context("room creation failed")),
+ }
+ })
+ }
+
+ pub(crate) async fn join_channel(
+ channel_id: ChannelId,
+ client: Arc<Client>,
+ user_store: Model<UserStore>,
+ cx: AsyncAppContext,
+ ) -> Result<Model<Self>> {
+ Self::from_join_response(
+ client
+ .request(proto::JoinChannel {
+ channel_id: channel_id.0,
+ })
+ .await?,
+ client,
+ user_store,
+ cx,
+ )
+ }
+
+ pub(crate) async fn join(
+ room_id: u64,
+ client: Arc<Client>,
+ user_store: Model<UserStore>,
+ cx: AsyncAppContext,
+ ) -> Result<Model<Self>> {
+ Self::from_join_response(
+ client.request(proto::JoinRoom { id: room_id }).await?,
+ client,
+ user_store,
+ cx,
+ )
+ }
+
+ fn released(&mut self, cx: &mut AppContext) {
+ if self.status.is_online() {
+ self.leave_internal(cx).detach_and_log_err(cx);
+ }
+ }
+
+ fn app_will_quit(&mut self, cx: &mut ModelContext<Self>) -> impl Future<Output = ()> {
+ let task = if self.status.is_online() {
+ let leave = self.leave_internal(cx);
+ Some(cx.background_executor().spawn(async move {
+ leave.await.log_err();
+ }))
+ } else {
+ None
+ };
+
+ async move {
+ if let Some(task) = task {
+ task.await;
+ }
+ }
+ }
+
+ pub fn mute_on_join(cx: &AppContext) -> bool {
+ CallSettings::get_global(cx).mute_on_join || client::IMPERSONATE_LOGIN.is_some()
+ }
+
+ fn from_join_response(
+ response: proto::JoinRoomResponse,
+ client: Arc<Client>,
+ user_store: Model<UserStore>,
+ mut cx: AsyncAppContext,
+ ) -> Result<Model<Self>> {
+ let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
+ let room = cx.new_model(|cx| {
+ Self::new(
+ room_proto.id,
+ response.channel_id.map(ChannelId),
+ response.live_kit_connection_info,
+ client,
+ user_store,
+ cx,
+ )
+ })?;
+ room.update(&mut cx, |room, cx| {
+ room.leave_when_empty = room.channel_id.is_none();
+ room.apply_room_update(room_proto, cx)?;
+ anyhow::Ok(())
+ })??;
+ Ok(room)
+ }
+
+ fn should_leave(&self) -> bool {
+ self.leave_when_empty
+ && self.pending_room_update.is_none()
+ && self.pending_participants.is_empty()
+ && self.remote_participants.is_empty()
+ && self.pending_call_count == 0
+ }
+
+ pub(crate) fn leave(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
+ cx.notify();
+ self.leave_internal(cx)
+ }
+
+ fn leave_internal(&mut self, cx: &mut AppContext) -> Task<Result<()>> {
+ if self.status.is_offline() {
+ return Task::ready(Err(anyhow!("room is offline")));
+ }
+
+ log::info!("leaving room");
+ Audio::play_sound(Sound::Leave, cx);
+
+ self.clear_state(cx);
+
+ let leave_room = self.client.request(proto::LeaveRoom {});
+ cx.background_executor().spawn(async move {
+ leave_room.await?;
+ anyhow::Ok(())
+ })
+ }
+
+ pub(crate) fn clear_state(&mut self, cx: &mut AppContext) {
+ for project in self.shared_projects.drain() {
+ if let Some(project) = project.upgrade() {
+ project.update(cx, |project, cx| {
+ project.unshare(cx).log_err();
+ });
+ }
+ }
+ for project in self.joined_projects.drain() {
+ if let Some(project) = project.upgrade() {
+ project.update(cx, |project, cx| {
+ project.disconnected_from_host(cx);
+ project.close(cx);
+ });
+ }
+ }
+
+ self.status = RoomStatus::Offline;
+ self.remote_participants.clear();
+ self.pending_participants.clear();
+ self.participant_user_ids.clear();
+ self.client_subscriptions.clear();
+ self.live_kit.take();
+ self.pending_room_update.take();
+ self.maintain_connection.take();
+ }
+
+ async fn maintain_connection(
+ this: WeakModel<Self>,
+ client: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<()> {
+ let mut client_status = client.status();
+ loop {
+ let _ = client_status.try_recv();
+ let is_connected = client_status.borrow().is_connected();
+ // Even if we're initially connected, any future change of the status means we momentarily disconnected.
+ if !is_connected || client_status.next().await.is_some() {
+ log::info!("detected client disconnection");
+
+ this.upgrade()
+ .ok_or_else(|| anyhow!("room was dropped"))?
+ .update(&mut cx, |this, cx| {
+ this.status = RoomStatus::Rejoining;
+ cx.notify();
+ })?;
+
+ // Wait for client to re-establish a connection to the server.
+ {
+ let mut reconnection_timeout =
+ cx.background_executor().timer(RECONNECT_TIMEOUT).fuse();
+ let client_reconnection = async {
+ let mut remaining_attempts = 3;
+ while remaining_attempts > 0 {
+ if client_status.borrow().is_connected() {
+ log::info!("client reconnected, attempting to rejoin room");
+
+ let Some(this) = this.upgrade() else { break };
+ match this.update(&mut cx, |this, cx| this.rejoin(cx)) {
+ Ok(task) => {
+ if task.await.log_err().is_some() {
+ return true;
+ } else {
+ remaining_attempts -= 1;
+ }
+ }
+ Err(_app_dropped) => return false,
+ }
+ } else if client_status.borrow().is_signed_out() {
+ return false;
+ }
+
+ log::info!(
+ "waiting for client status change, remaining attempts {}",
+ remaining_attempts
+ );
+ client_status.next().await;
+ }
+ false
+ }
+ .fuse();
+ futures::pin_mut!(client_reconnection);
+
+ futures::select_biased! {
+ reconnected = client_reconnection => {
+ if reconnected {
+ log::info!("successfully reconnected to room");
+ // If we successfully joined the room, go back around the loop
+ // waiting for future connection status changes.
+ continue;
+ }
+ }
+ _ = reconnection_timeout => {
+ log::info!("room reconnection timeout expired");
+ }
+ }
+ }
+
+ break;
+ }
+ }
+
+ // The client failed to re-establish a connection to the server
+ // or an error occurred while trying to re-join the room. Either way
+ // we leave the room and return an error.
+ if let Some(this) = this.upgrade() {
+ log::info!("reconnection failed, leaving room");
+ this.update(&mut cx, |this, cx| this.leave(cx))?.await?;
+ }
+ Err(anyhow!(
+ "can't reconnect to room: client failed to re-establish connection"
+ ))
+ }
+
+ fn rejoin(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
+ let mut projects = HashMap::default();
+ let mut reshared_projects = Vec::new();
+ let mut rejoined_projects = Vec::new();
+ self.shared_projects.retain(|project| {
+ if let Some(handle) = project.upgrade() {
+ let project = handle.read(cx);
+ if let Some(project_id) = project.remote_id() {
+ projects.insert(project_id, handle.clone());
+ reshared_projects.push(proto::UpdateProject {
+ project_id,
+ worktrees: project.worktree_metadata_protos(cx),
+ });
+ return true;
+ }
+ }
+ false
+ });
+ self.joined_projects.retain(|project| {
+ if let Some(handle) = project.upgrade() {
+ let project = handle.read(cx);
+ if let Some(project_id) = project.remote_id() {
+ projects.insert(project_id, handle.clone());
+ rejoined_projects.push(proto::RejoinProject {
+ id: project_id,
+ worktrees: project
+ .worktrees(cx)
+ .map(|worktree| {
+ let worktree = worktree.read(cx);
+ proto::RejoinWorktree {
+ id: worktree.id().to_proto(),
+ scan_id: worktree.completed_scan_id() as u64,
+ }
+ })
+ .collect(),
+ });
+ }
+ return true;
+ }
+ false
+ });
+
+ let response = self.client.request_envelope(proto::RejoinRoom {
+ id: self.id,
+ reshared_projects,
+ rejoined_projects,
+ });
+
+ cx.spawn(|this, mut cx| async move {
+ let response = response.await?;
+ let message_id = response.message_id;
+ let response = response.payload;
+ let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
+ this.update(&mut cx, |this, cx| {
+ this.status = RoomStatus::Online;
+ this.apply_room_update(room_proto, cx)?;
+
+ for reshared_project in response.reshared_projects {
+ if let Some(project) = projects.get(&reshared_project.id) {
+ project.update(cx, |project, cx| {
+ project.reshared(reshared_project, cx).log_err();
+ });
+ }
+ }
+
+ for rejoined_project in response.rejoined_projects {
+ if let Some(project) = projects.get(&rejoined_project.id) {
+ project.update(cx, |project, cx| {
+ project.rejoined(rejoined_project, message_id, cx).log_err();
+ });
+ }
+ }
+
+ anyhow::Ok(())
+ })?
+ })
+ }
+
+ pub fn id(&self) -> u64 {
+ self.id
+ }
+
+ pub fn status(&self) -> RoomStatus {
+ self.status
+ }
+
+ pub fn local_participant(&self) -> &LocalParticipant {
+ &self.local_participant
+ }
+
+ pub fn remote_participants(&self) -> &BTreeMap<u64, RemoteParticipant> {
+ &self.remote_participants
+ }
+
+ pub fn remote_participant_for_peer_id(&self, peer_id: PeerId) -> Option<&RemoteParticipant> {
+ self.remote_participants
+ .values()
+ .find(|p| p.peer_id == peer_id)
+ }
+
+ pub fn role_for_user(&self, user_id: u64) -> Option<proto::ChannelRole> {
+ self.remote_participants
+ .get(&user_id)
+ .map(|participant| participant.role)
+ }
+
+ pub fn contains_guests(&self) -> bool {
+ self.local_participant.role == proto::ChannelRole::Guest
+ || self
+ .remote_participants
+ .values()
+ .any(|p| p.role == proto::ChannelRole::Guest)
+ }
+
+ pub fn local_participant_is_admin(&self) -> bool {
+ self.local_participant.role == proto::ChannelRole::Admin
+ }
+
+ pub fn local_participant_is_guest(&self) -> bool {
+ self.local_participant.role == proto::ChannelRole::Guest
+ }
+
+ pub fn set_participant_role(
+ &mut self,
+ user_id: u64,
+ role: proto::ChannelRole,
+ cx: &ModelContext<Self>,
+ ) -> Task<Result<()>> {
+ let client = self.client.clone();
+ let room_id = self.id;
+ let role = role.into();
+ cx.spawn(|_, _| async move {
+ client
+ .request(proto::SetRoomParticipantRole {
+ room_id,
+ user_id,
+ role,
+ })
+ .await
+ .map(|_| ())
+ })
+ }
+
+ pub fn pending_participants(&self) -> &[Arc<User>] {
+ &self.pending_participants
+ }
+
+ pub fn contains_participant(&self, user_id: u64) -> bool {
+ self.participant_user_ids.contains(&user_id)
+ }
+
+ pub fn followers_for(&self, leader_id: PeerId, project_id: u64) -> &[PeerId] {
+ self.follows_by_leader_id_project_id
+ .get(&(leader_id, project_id))
+ .map_or(&[], |v| v.as_slice())
+ }
+
+ /// Returns the most 'active' projects, defined as most people in the project
+ pub fn most_active_project(&self, cx: &AppContext) -> Option<(u64, u64)> {
+ let mut project_hosts_and_guest_counts = HashMap::<u64, (Option<u64>, u32)>::default();
+ for participant in self.remote_participants.values() {
+ match participant.location {
+ ParticipantLocation::SharedProject { project_id } => {
+ project_hosts_and_guest_counts
+ .entry(project_id)
+ .or_default()
+ .1 += 1;
+ }
+ ParticipantLocation::External | ParticipantLocation::UnsharedProject => {}
+ }
+ for project in &participant.projects {
+ project_hosts_and_guest_counts
+ .entry(project.id)
+ .or_default()
+ .0 = Some(participant.user.id);
+ }
+ }
+
+ if let Some(user) = self.user_store.read(cx).current_user() {
+ for project in &self.local_participant.projects {
+ project_hosts_and_guest_counts
+ .entry(project.id)
+ .or_default()
+ .0 = Some(user.id);
+ }
+ }
+
+ project_hosts_and_guest_counts
+ .into_iter()
+ .filter_map(|(id, (host, guest_count))| Some((id, host?, guest_count)))
+ .max_by_key(|(_, _, guest_count)| *guest_count)
+ .map(|(id, host, _)| (id, host))
+ }
+
+ async fn handle_room_updated(
+ this: Model<Self>,
+ envelope: TypedEnvelope<proto::RoomUpdated>,
+ mut cx: AsyncAppContext,
+ ) -> Result<()> {
+ let room = envelope
+ .payload
+ .room
+ .ok_or_else(|| anyhow!("invalid room"))?;
+ this.update(&mut cx, |this, cx| this.apply_room_update(room, cx))?
+ }
+
+ fn apply_room_update(&mut self, room: proto::Room, cx: &mut ModelContext<Self>) -> Result<()> {
+ log::trace!(
+ "client {:?}. room update: {:?}",
+ self.client.user_id(),
+ &room
+ );
+
+ self.pending_room_update = Some(self.start_room_connection(room, cx));
+
+ cx.notify();
+ Ok(())
+ }
+
+ pub fn room_update_completed(&mut self) -> impl Future<Output = ()> {
+ let mut done_rx = self.room_update_completed_rx.clone();
+ async move {
+ while let Some(result) = done_rx.next().await {
+ if result.is_some() {
+ break;
+ }
+ }
+ }
+ }
+
+ #[cfg(target_os = "windows")]
+ fn start_room_connection(
+ &self,
+ mut room: proto::Room,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<()> {
+ Task::ready(())
+ }
+
+ #[cfg(not(target_os = "windows"))]
+ fn start_room_connection(
+ &self,
+ mut room: proto::Room,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<()> {
+ // Filter ourselves out from the room's participants.
+ let local_participant_ix = room
+ .participants
+ .iter()
+ .position(|participant| Some(participant.user_id) == self.client.user_id());
+ let local_participant = local_participant_ix.map(|ix| room.participants.swap_remove(ix));
+
+ let pending_participant_user_ids = room
+ .pending_participants
+ .iter()
+ .map(|p| p.user_id)
+ .collect::<Vec<_>>();
+
+ let remote_participant_user_ids = room
+ .participants
+ .iter()
+ .map(|p| p.user_id)
+ .collect::<Vec<_>>();
+
+ let (remote_participants, pending_participants) =
+ self.user_store.update(cx, move |user_store, cx| {
+ (
+ user_store.get_users(remote_participant_user_ids, cx),
+ user_store.get_users(pending_participant_user_ids, cx),
+ )
+ });
+ cx.spawn(|this, mut cx| async move {
+ let (remote_participants, pending_participants) =
+ futures::join!(remote_participants, pending_participants);
+
+ this.update(&mut cx, |this, cx| {
+ this.participant_user_ids.clear();
+
+ if let Some(participant) = local_participant {
+ let role = participant.role();
+ this.local_participant.projects = participant.projects;
+ if this.local_participant.role != role {
+ this.local_participant.role = role;
+
+ if role == proto::ChannelRole::Guest {
+ for project in mem::take(&mut this.shared_projects) {
+ if let Some(project) = project.upgrade() {
+ this.unshare_project(project, cx).log_err();
+ }
+ }
+ this.local_participant.projects.clear();
+ if let Some(livekit_room) = &mut this.live_kit {
+ livekit_room.stop_publishing(cx);
+ }
+ }
+
+ this.joined_projects.retain(|project| {
+ if let Some(project) = project.upgrade() {
+ project.update(cx, |project, cx| project.set_role(role, cx));
+ true
+ } else {
+ false
+ }
+ });
+ }
+ } else {
+ this.local_participant.projects.clear();
+ }
+
+ let livekit_participants = this
+ .live_kit
+ .as_ref()
+ .map(|live_kit| live_kit.room.remote_participants());
+
+ if let Some(participants) = remote_participants.log_err() {
+ for (participant, user) in room.participants.into_iter().zip(participants) {
+ let Some(peer_id) = participant.peer_id else {
+ continue;
+ };
+ let participant_index = ParticipantIndex(participant.participant_index);
+ this.participant_user_ids.insert(participant.user_id);
+
+ let old_projects = this
+ .remote_participants
+ .get(&participant.user_id)
+ .into_iter()
+ .flat_map(|existing| &existing.projects)
+ .map(|project| project.id)
+ .collect::<HashSet<_>>();
+ let new_projects = participant
+ .projects
+ .iter()
+ .map(|project| project.id)
+ .collect::<HashSet<_>>();
+
+ for project in &participant.projects {
+ if !old_projects.contains(&project.id) {
+ cx.emit(Event::RemoteProjectShared {
+ owner: user.clone(),
+ project_id: project.id,
+ worktree_root_names: project.worktree_root_names.clone(),
+ });
+ }
+ }
+
+ for unshared_project_id in old_projects.difference(&new_projects) {
+ this.joined_projects.retain(|project| {
+ if let Some(project) = project.upgrade() {
+ project.update(cx, |project, cx| {
+ if project.remote_id() == Some(*unshared_project_id) {
+ project.disconnected_from_host(cx);
+ false
+ } else {
+ true
+ }
+ })
+ } else {
+ false
+ }
+ });
+ cx.emit(Event::RemoteProjectUnshared {
+ project_id: *unshared_project_id,
+ });
+ }
+
+ let role = participant.role();
+ let location = ParticipantLocation::from_proto(participant.location)
+ .unwrap_or(ParticipantLocation::External);
+ if let Some(remote_participant) =
+ this.remote_participants.get_mut(&participant.user_id)
+ {
+ remote_participant.peer_id = peer_id;
+ remote_participant.projects = participant.projects;
+ remote_participant.participant_index = participant_index;
+ if location != remote_participant.location
+ || role != remote_participant.role
+ {
+ remote_participant.location = location;
+ remote_participant.role = role;
+ cx.emit(Event::ParticipantLocationChanged {
+ participant_id: peer_id,
+ });
+ }
+ } else {
+ this.remote_participants.insert(
+ participant.user_id,
+ RemoteParticipant {
+ user: user.clone(),
+ participant_index,
+ peer_id,
+ projects: participant.projects,
+ location,
+ role,
+ muted: true,
+ speaking: false,
+ video_tracks: Default::default(),
+ #[cfg(not(target_os = "windows"))]
+ audio_tracks: Default::default(),
+ },
+ );
+
+ Audio::play_sound(Sound::Joined, cx);
+ if let Some(livekit_participants) = &livekit_participants {
+ if let Some(livekit_participant) = livekit_participants
+ .get(&ParticipantIdentity(user.id.to_string()))
+ {
+ for publication in
+ livekit_participant.track_publications().into_values()
+ {
+ if let Some(track) = publication.track() {
+ this.livekit_room_updated(
+ RoomEvent::TrackSubscribed {
+ track,
+ publication,
+ participant: livekit_participant.clone(),
+ },
+ cx,
+ )
+ .warn_on_err();
+ }
+ }
+ }
+ }
+ }
+ }
+
+ this.remote_participants.retain(|user_id, participant| {
+ if this.participant_user_ids.contains(user_id) {
+ true
+ } else {
+ for project in &participant.projects {
+ cx.emit(Event::RemoteProjectUnshared {
+ project_id: project.id,
+ });
+ }
+ false
+ }
+ });
+ }
+
+ if let Some(pending_participants) = pending_participants.log_err() {
+ this.pending_participants = pending_participants;
+ for participant in &this.pending_participants {
+ this.participant_user_ids.insert(participant.id);
+ }
+ }
+
+ this.follows_by_leader_id_project_id.clear();
+ for follower in room.followers {
+ let project_id = follower.project_id;
+ let (leader, follower) = match (follower.leader_id, follower.follower_id) {
+ (Some(leader), Some(follower)) => (leader, follower),
+
+ _ => {
+ log::error!("Follower message {follower:?} missing some state");
+ continue;
+ }
+ };
+
+ let list = this
+ .follows_by_leader_id_project_id
+ .entry((leader, project_id))
+ .or_default();
+ if !list.contains(&follower) {
+ list.push(follower);
+ }
+ }
+
+ this.pending_room_update.take();
+ if this.should_leave() {
+ log::info!("room is empty, leaving");
+ this.leave(cx).detach();
+ }
+
+ this.user_store.update(cx, |user_store, cx| {
+ let participant_indices_by_user_id = this
+ .remote_participants
+ .iter()
+ .map(|(user_id, participant)| (*user_id, participant.participant_index))
+ .collect();
+ user_store.set_participant_indices(participant_indices_by_user_id, cx);
+ });
+
+ this.check_invariants();
+ this.room_update_completed_tx.try_send(Some(())).ok();
+ cx.notify();
+ })
+ .ok();
+ })
+ }
+
+ fn livekit_room_updated(
+ &mut self,
+ event: RoomEvent,
+ cx: &mut ModelContext<Self>,
+ ) -> Result<()> {
+ log::trace!(
+ "client {:?}. livekit event: {:?}",
+ self.client.user_id(),
+ &event
+ );
+
+ match event {
+ #[cfg(not(target_os = "windows"))]
+ RoomEvent::TrackSubscribed {
+ track,
+ participant,
+ publication,
+ } => {
+ let user_id = participant.identity().0.parse()?;
+ let track_id = track.sid();
+ let participant = self.remote_participants.get_mut(&user_id).ok_or_else(|| {
+ anyhow!(
+ "{:?} subscribed to track by unknown participant {user_id}",
+ self.client.user_id()
+ )
+ })?;
+ if self.live_kit.as_ref().map_or(true, |kit| kit.deafened) {
+ track.rtc_track().set_enabled(false);
+ }
+ match track {
+ livekit::track::RemoteTrack::Audio(track) => {
+ cx.emit(Event::RemoteAudioTracksChanged {
+ participant_id: participant.peer_id,
+ });
+ let stream = play_remote_audio_track(&track, cx.background_executor())?;
+ participant.audio_tracks.insert(track_id, (track, stream));
+ participant.muted = publication.is_muted();
+ }
+ livekit::track::RemoteTrack::Video(track) => {
+ cx.emit(Event::RemoteVideoTracksChanged {
+ participant_id: participant.peer_id,
+ });
+ participant.video_tracks.insert(track_id, track);
+ }
+ }
+ }
+
+ #[cfg(not(target_os = "windows"))]
+ RoomEvent::TrackUnsubscribed {
+ track, participant, ..
+ } => {
+ let user_id = participant.identity().0.parse()?;
+ let participant = self.remote_participants.get_mut(&user_id).ok_or_else(|| {
+ anyhow!(
+ "{:?}, unsubscribed from track by unknown participant {user_id}",
+ self.client.user_id()
+ )
+ })?;
+ match track {
+ livekit::track::RemoteTrack::Audio(track) => {
+ participant.audio_tracks.remove(&track.sid());
+ participant.muted = true;
+ cx.emit(Event::RemoteAudioTracksChanged {
+ participant_id: participant.peer_id,
+ });
+ }
+ livekit::track::RemoteTrack::Video(track) => {
+ participant.video_tracks.remove(&track.sid());
+ cx.emit(Event::RemoteVideoTracksChanged {
+ participant_id: participant.peer_id,
+ });
+ }
+ }
+ }
+
+ #[cfg(not(target_os = "windows"))]
+ RoomEvent::ActiveSpeakersChanged { speakers } => {
+ let mut speaker_ids = speakers
+ .into_iter()
+ .filter_map(|speaker| speaker.identity().0.parse().ok())
+ .collect::<Vec<u64>>();
+ speaker_ids.sort_unstable();
+ for (sid, participant) in &mut self.remote_participants {
+ participant.speaking = speaker_ids.binary_search(sid).is_ok();
+ }
+ if let Some(id) = self.client.user_id() {
+ if let Some(room) = &mut self.live_kit {
+ room.speaking = speaker_ids.binary_search(&id).is_ok();
+ }
+ }
+ }
+
+ #[cfg(not(target_os = "windows"))]
+ RoomEvent::TrackMuted {
+ participant,
+ publication,
+ }
+ | RoomEvent::TrackUnmuted {
+ participant,
+ publication,
+ } => {
+ let mut found = false;
+ let user_id = participant.identity().0.parse()?;
+ let track_id = publication.sid();
+ if let Some(participant) = self.remote_participants.get_mut(&user_id) {
+ for (track, _) in participant.audio_tracks.values() {
+ if track.sid() == track_id {
+ found = true;
+ break;
+ }
+ }
+ if found {
+ participant.muted = publication.is_muted();
+ }
+ }
+ }
+
+ #[cfg(not(target_os = "windows"))]
+ RoomEvent::LocalTrackUnpublished { publication, .. } => {
+ log::info!("unpublished track {}", publication.sid());
+ if let Some(room) = &mut self.live_kit {
+ if let LocalTrack::Published {
+ track_publication, ..
+ } = &room.microphone_track
+ {
+ if track_publication.sid() == publication.sid() {
+ room.microphone_track = LocalTrack::None;
+ }
+ }
+ if let LocalTrack::Published {
+ track_publication, ..
+ } = &room.screen_track
+ {
+ if track_publication.sid() == publication.sid() {
+ room.screen_track = LocalTrack::None;
+ }
+ }
+ }
+ }
+
+ #[cfg(not(target_os = "windows"))]
+ RoomEvent::LocalTrackPublished { publication, .. } => {
+ log::info!("published track {:?}", publication.sid());
+ }
+
+ #[cfg(not(target_os = "windows"))]
+ RoomEvent::Disconnected { reason } => {
+ log::info!("disconnected from room: {reason:?}");
+ self.leave(cx).detach_and_log_err(cx);
+ }
+ _ => {}
+ }
+
+ cx.notify();
+ Ok(())
+ }
+
+ fn check_invariants(&self) {
+ #[cfg(any(test, feature = "test-support"))]
+ {
+ for participant in self.remote_participants.values() {
+ assert!(self.participant_user_ids.contains(&participant.user.id));
+ assert_ne!(participant.user.id, self.client.user_id().unwrap());
+ }
+
+ for participant in &self.pending_participants {
+ assert!(self.participant_user_ids.contains(&participant.id));
+ assert_ne!(participant.id, self.client.user_id().unwrap());
+ }
+
+ assert_eq!(
+ self.participant_user_ids.len(),
+ self.remote_participants.len() + self.pending_participants.len()
+ );
+ }
+ }
+
+ pub(crate) fn call(
+ &mut self,
+ called_user_id: u64,
+ initial_project_id: Option<u64>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<()>> {
+ if self.status.is_offline() {
+ return Task::ready(Err(anyhow!("room is offline")));
+ }
+
+ cx.notify();
+ let client = self.client.clone();
+ let room_id = self.id;
+ self.pending_call_count += 1;
+ cx.spawn(move |this, mut cx| async move {
+ let result = client
+ .request(proto::Call {
+ room_id,
+ called_user_id,
+ initial_project_id,
+ })
+ .await;
+ this.update(&mut cx, |this, cx| {
+ this.pending_call_count -= 1;
+ if this.should_leave() {
+ this.leave(cx).detach_and_log_err(cx);
+ }
+ })?;
+ result?;
+ Ok(())
+ })
+ }
+
+ pub fn join_project(
+ &mut self,
+ id: u64,
+ language_registry: Arc<LanguageRegistry>,
+ fs: Arc<dyn Fs>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Model<Project>>> {
+ let client = self.client.clone();
+ let user_store = self.user_store.clone();
+ cx.emit(Event::RemoteProjectJoined { project_id: id });
+ cx.spawn(move |this, mut cx| async move {
+ let project =
+ Project::in_room(id, client, user_store, language_registry, fs, cx.clone()).await?;
+
+ this.update(&mut cx, |this, cx| {
+ this.joined_projects.retain(|project| {
+ if let Some(project) = project.upgrade() {
+ !project.read(cx).is_disconnected(cx)
+ } else {
+ false
+ }
+ });
+ this.joined_projects.insert(project.downgrade());
+ })?;
+ Ok(project)
+ })
+ }
+
+ pub fn share_project(
+ &mut self,
+ project: Model<Project>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<u64>> {
+ if let Some(project_id) = project.read(cx).remote_id() {
+ return Task::ready(Ok(project_id));
+ }
+
+ let request = self.client.request(proto::ShareProject {
+ room_id: self.id(),
+ worktrees: project.read(cx).worktree_metadata_protos(cx),
+ is_ssh_project: project.read(cx).is_via_ssh(),
+ });
+
+ cx.spawn(|this, mut cx| async move {
+ let response = request.await?;
+
+ project.update(&mut cx, |project, cx| {
+ project.shared(response.project_id, cx)
+ })??;
+
+ // If the user's location is in this project, it changes from UnsharedProject to SharedProject.
+ this.update(&mut cx, |this, cx| {
+ this.shared_projects.insert(project.downgrade());
+ let active_project = this.local_participant.active_project.as_ref();
+ if active_project.map_or(false, |location| *location == project) {
+ this.set_location(Some(&project), cx)
+ } else {
+ Task::ready(Ok(()))
+ }
+ })?
+ .await?;
+
+ Ok(response.project_id)
+ })
+ }
+
+ pub(crate) fn unshare_project(
+ &mut self,
+ project: Model<Project>,
+ cx: &mut ModelContext<Self>,
+ ) -> Result<()> {
+ let project_id = match project.read(cx).remote_id() {
+ Some(project_id) => project_id,
+ None => return Ok(()),
+ };
+
+ self.client.send(proto::UnshareProject { project_id })?;
+ project.update(cx, |this, cx| this.unshare(cx))?;
+
+ if self.local_participant.active_project == Some(project.downgrade()) {
+ self.set_location(Some(&project), cx).detach_and_log_err(cx);
+ }
+ Ok(())
+ }
+
+ pub(crate) fn set_location(
+ &mut self,
+ project: Option<&Model<Project>>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<()>> {
+ if self.status.is_offline() {
+ return Task::ready(Err(anyhow!("room is offline")));
+ }
+
+ let client = self.client.clone();
+ let room_id = self.id;
+ let location = if let Some(project) = project {
+ self.local_participant.active_project = Some(project.downgrade());
+ if let Some(project_id) = project.read(cx).remote_id() {
+ proto::participant_location::Variant::SharedProject(
+ proto::participant_location::SharedProject { id: project_id },
+ )
+ } else {
+ proto::participant_location::Variant::UnsharedProject(
+ proto::participant_location::UnsharedProject {},
+ )
+ }
+ } else {
+ self.local_participant.active_project = None;
+ proto::participant_location::Variant::External(proto::participant_location::External {})
+ };
+
+ cx.notify();
+ cx.background_executor().spawn(async move {
+ client
+ .request(proto::UpdateParticipantLocation {
+ room_id,
+ location: Some(proto::ParticipantLocation {
+ variant: Some(location),
+ }),
+ })
+ .await?;
+ Ok(())
+ })
+ }
+
+ pub fn is_screen_sharing(&self) -> bool {
+ self.live_kit.as_ref().map_or(false, |live_kit| {
+ !matches!(live_kit.screen_track, LocalTrack::None)
+ })
+ }
+
+ pub fn is_sharing_mic(&self) -> bool {
+ self.live_kit.as_ref().map_or(false, |live_kit| {
+ !matches!(live_kit.microphone_track, LocalTrack::None)
+ })
+ }
+
+ pub fn is_muted(&self) -> bool {
+ self.live_kit.as_ref().map_or(false, |live_kit| {
+ matches!(live_kit.microphone_track, LocalTrack::None)
+ || live_kit.muted_by_user
+ || live_kit.deafened
+ })
+ }
+
+ pub fn is_speaking(&self) -> bool {
+ self.live_kit
+ .as_ref()
+ .map_or(false, |live_kit| live_kit.speaking)
+ }
+
+ pub fn is_deafened(&self) -> Option<bool> {
+ self.live_kit.as_ref().map(|live_kit| live_kit.deafened)
+ }
+
+ pub fn can_use_microphone(&self, _cx: &AppContext) -> bool {
+ use proto::ChannelRole::*;
+
+ #[cfg(not(any(test, feature = "test-support")))]
+ {
+ use feature_flags::FeatureFlagAppExt as _;
+ if cfg!(target_os = "windows") || (cfg!(target_os = "linux") && !_cx.is_staff()) {
+ return false;
+ }
+ }
+
+ match self.local_participant.role {
+ Admin | Member | Talker => true,
+ Guest | Banned => false,
+ }
+ }
+
+ pub fn can_share_projects(&self) -> bool {
+ use proto::ChannelRole::*;
+ match self.local_participant.role {
+ Admin | Member => true,
+ Guest | Banned | Talker => false,
+ }
+ }
+
+ #[cfg(target_os = "windows")]
+ pub fn share_microphone(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
+ Task::ready(Err(anyhow!("Windows is not supported yet")))
+ }
+
+ #[cfg(not(target_os = "windows"))]
+ #[track_caller]
+ pub fn share_microphone(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
+ if self.status.is_offline() {
+ return Task::ready(Err(anyhow!("room is offline")));
+ }
+
+ let (participant, publish_id) = if let Some(live_kit) = self.live_kit.as_mut() {
+ let publish_id = post_inc(&mut live_kit.next_publish_id);
+ live_kit.microphone_track = LocalTrack::Pending { publish_id };
+ cx.notify();
+ (live_kit.room.local_participant(), publish_id)
+ } else {
+ return Task::ready(Err(anyhow!("live-kit was not initialized")));
+ };
+
+ cx.spawn(move |this, mut cx| async move {
+ let (track, stream) = capture_local_audio_track(cx.background_executor())?.await;
+
+ let publication = participant
+ .publish_track(
+ livekit::track::LocalTrack::Audio(track),
+ TrackPublishOptions {
+ source: TrackSource::Microphone,
+ ..Default::default()
+ },
+ )
+ .await
+ .map_err(|error| anyhow!("failed to publish track: {error}"));
+ this.update(&mut cx, |this, cx| {
+ let live_kit = this
+ .live_kit
+ .as_mut()
+ .ok_or_else(|| anyhow!("live-kit was not initialized"))?;
+
+ let canceled = if let LocalTrack::Pending {
+ publish_id: cur_publish_id,
+ } = &live_kit.microphone_track
+ {
+ *cur_publish_id != publish_id
+ } else {
+ true
+ };
+
+ match publication {
+ Ok(publication) => {
+ if canceled {
+ cx.background_executor()
+ .spawn(async move {
+ participant.unpublish_track(&publication.sid()).await
+ })
+ .detach_and_log_err(cx)
+ } else {
+ if live_kit.muted_by_user || live_kit.deafened {
+ publication.mute();
+ }
+ live_kit.microphone_track = LocalTrack::Published {
+ track_publication: publication,
+ _stream: Box::new(stream),
+ };
+ cx.notify();
+ }
+ Ok(())
+ }
+ Err(error) => {
+ if canceled {
+ Ok(())
+ } else {
+ live_kit.microphone_track = LocalTrack::None;
+ cx.notify();
+ Err(error)
+ }
+ }
+ }
+ })?
+ })
+ }
+
+ #[cfg(target_os = "windows")]
+ pub fn share_screen(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
+ Task::ready(Err(anyhow!("Windows is not supported yet")))
+ }
+
+ #[cfg(not(target_os = "windows"))]
+ pub fn share_screen(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
+ if self.status.is_offline() {
+ return Task::ready(Err(anyhow!("room is offline")));
+ }
+ if self.is_screen_sharing() {
+ return Task::ready(Err(anyhow!("screen was already shared")));
+ }
+
+ let (participant, publish_id) = if let Some(live_kit) = self.live_kit.as_mut() {
+ let publish_id = post_inc(&mut live_kit.next_publish_id);
+ live_kit.screen_track = LocalTrack::Pending { publish_id };
+ cx.notify();
+ (live_kit.room.local_participant(), publish_id)
+ } else {
+ return Task::ready(Err(anyhow!("live-kit was not initialized")));
+ };
+
+ let sources = cx.screen_capture_sources();
+
+ cx.spawn(move |this, mut cx| async move {
+ let sources = sources.await??;
+ let source = sources.first().ok_or_else(|| anyhow!("no display found"))?;
+
+ let (track, stream) = capture_local_video_track(&**source).await?;
+
+ let publication = participant
+ .publish_track(
+ livekit::track::LocalTrack::Video(track),
+ TrackPublishOptions {
+ source: TrackSource::Screenshare,
+ video_codec: VideoCodec::H264,
+ ..Default::default()
+ },
+ )
+ .await
+ .map_err(|error| anyhow!("error publishing screen track {error:?}"));
+
+ this.update(&mut cx, |this, cx| {
+ let live_kit = this
+ .live_kit
+ .as_mut()
+ .ok_or_else(|| anyhow!("live-kit was not initialized"))?;
+
+ let canceled = if let LocalTrack::Pending {
+ publish_id: cur_publish_id,
+ } = &live_kit.screen_track
+ {
+ *cur_publish_id != publish_id
+ } else {
+ true
+ };
+
+ match publication {
+ Ok(publication) => {
+ if canceled {
+ cx.background_executor()
+ .spawn(async move {
+ participant.unpublish_track(&publication.sid()).await
+ })
+ .detach()
+ } else {
+ live_kit.screen_track = LocalTrack::Published {
+ track_publication: publication,
+ _stream: Box::new(stream),
+ };
+ cx.notify();
+ }
+
+ Audio::play_sound(Sound::StartScreenshare, cx);
+ Ok(())
+ }
+ Err(error) => {
+ if canceled {
+ Ok(())
+ } else {
+ live_kit.screen_track = LocalTrack::None;
+ cx.notify();
+ Err(error)
+ }
+ }
+ }
+ })?
+ })
+ }
+
+ pub fn toggle_mute(&mut self, cx: &mut ModelContext<Self>) {
+ if let Some(live_kit) = self.live_kit.as_mut() {
+ // When unmuting, undeafen if the user was deafened before.
+ let was_deafened = live_kit.deafened;
+ if live_kit.muted_by_user
+ || live_kit.deafened
+ || matches!(live_kit.microphone_track, LocalTrack::None)
+ {
+ live_kit.muted_by_user = false;
+ live_kit.deafened = false;
+ } else {
+ live_kit.muted_by_user = true;
+ }
+ let muted = live_kit.muted_by_user;
+ let should_undeafen = was_deafened && !live_kit.deafened;
+
+ if let Some(task) = self.set_mute(muted, cx) {
+ task.detach_and_log_err(cx);
+ }
+
+ if should_undeafen {
+ self.set_deafened(false, cx);
+ }
+ }
+ }
+
+ pub fn toggle_deafen(&mut self, cx: &mut ModelContext<Self>) {
+ if let Some(live_kit) = self.live_kit.as_mut() {
+ // When deafening, mute the microphone if it was not already muted.
+ // When un-deafening, unmute the microphone, unless it was explicitly muted.
+ let deafened = !live_kit.deafened;
+ live_kit.deafened = deafened;
+ let should_change_mute = !live_kit.muted_by_user;
+
+ self.set_deafened(deafened, cx);
+
+ if should_change_mute {
+ if let Some(task) = self.set_mute(deafened, cx) {
+ task.detach_and_log_err(cx);
+ }
+ }
+ }
+ }
+
+ pub fn unshare_screen(&mut self, cx: &mut ModelContext<Self>) -> Result<()> {
+ if self.status.is_offline() {
+ return Err(anyhow!("room is offline"));
+ }
+
+ let live_kit = self
+ .live_kit
+ .as_mut()
+ .ok_or_else(|| anyhow!("live-kit was not initialized"))?;
+ match mem::take(&mut live_kit.screen_track) {
+ LocalTrack::None => Err(anyhow!("screen was not shared")),
+ LocalTrack::Pending { .. } => {
+ cx.notify();
+ Ok(())
+ }
+ LocalTrack::Published {
+ track_publication, ..
+ } => {
+ #[cfg(not(target_os = "windows"))]
+ {
+ let local_participant = live_kit.room.local_participant();
+ let sid = track_publication.sid();
+ cx.background_executor()
+ .spawn(async move { local_participant.unpublish_track(&sid).await })
+ .detach_and_log_err(cx);
+ cx.notify();
+ }
+ Audio::play_sound(Sound::StopScreenshare, cx);
+ Ok(())
+ }
+ }
+ }
+
+ fn set_deafened(&mut self, deafened: bool, cx: &mut ModelContext<Self>) -> Option<()> {
+ #[cfg(not(target_os = "windows"))]
+ {
+ let live_kit = self.live_kit.as_mut()?;
+ cx.notify();
+ for (_, participant) in live_kit.room.remote_participants() {
+ for (_, publication) in participant.track_publications() {
+ if publication.kind() == TrackKind::Audio {
+ publication.set_enabled(!deafened);
+ }
+ }
+ }
+ }
+
+ None
+ }
+
+ fn set_mute(
+ &mut self,
+ should_mute: bool,
+ cx: &mut ModelContext<Room>,
+ ) -> Option<Task<Result<()>>> {
+ let live_kit = self.live_kit.as_mut()?;
+ cx.notify();
+
+ if should_mute {
+ Audio::play_sound(Sound::Mute, cx);
+ } else {
+ Audio::play_sound(Sound::Unmute, cx);
+ }
+
+ match &mut live_kit.microphone_track {
+ LocalTrack::None => {
+ if should_mute {
+ None
+ } else {
+ Some(self.share_microphone(cx))
+ }
+ }
+ LocalTrack::Pending { .. } => None,
+ LocalTrack::Published {
+ track_publication, ..
+ } => {
+ #[cfg(not(target_os = "windows"))]
+ {
+ if should_mute {
+ track_publication.mute()
+ } else {
+ track_publication.unmute()
+ }
+ }
+ None
+ }
+ }
+ }
+}
+
+#[cfg(target_os = "windows")]
+fn spawn_room_connection(
+ livekit_connection_info: Option<proto::LiveKitConnectionInfo>,
+ cx: &mut ModelContext<'_, Room>,
+) {
+}
+
+#[cfg(not(target_os = "windows"))]
+fn spawn_room_connection(
+ livekit_connection_info: Option<proto::LiveKitConnectionInfo>,
+ cx: &mut ModelContext<'_, Room>,
+) {
+ if let Some(connection_info) = livekit_connection_info {
+ cx.spawn(|this, mut cx| async move {
+ let (room, mut events) = livekit::Room::connect(
+ &connection_info.server_url,
+ &connection_info.token,
+ RoomOptions::default(),
+ )
+ .await?;
+
+ this.update(&mut cx, |this, cx| {
+ let _handle_updates = cx.spawn(|this, mut cx| async move {
+ while let Some(event) = events.recv().await {
+ if this
+ .update(&mut cx, |this, cx| {
+ this.livekit_room_updated(event, cx).warn_on_err();
+ })
+ .is_err()
+ {
+ break;
+ }
+ }
+ });
+
+ let muted_by_user = Room::mute_on_join(cx);
+ this.live_kit = Some(LiveKitRoom {
+ room: Arc::new(room),
+ screen_track: LocalTrack::None,
+ microphone_track: LocalTrack::None,
+ next_publish_id: 0,
+ muted_by_user,
+ deafened: false,
+ speaking: false,
+ _handle_updates,
+ });
+
+ if !muted_by_user && this.can_use_microphone(cx) {
+ this.share_microphone(cx)
+ } else {
+ Task::ready(Ok(()))
+ }
+ })?
+ .await
+ })
+ .detach_and_log_err(cx);
+ }
+}
+
+struct LiveKitRoom {
+ room: Arc<livekit::Room>,
+ screen_track: LocalTrack,
+ microphone_track: LocalTrack,
+ /// Tracks whether we're currently in a muted state due to auto-mute from deafening or manual mute performed by user.
+ muted_by_user: bool,
+ deafened: bool,
+ speaking: bool,
+ next_publish_id: usize,
+ _handle_updates: Task<()>,
+}
+
+impl LiveKitRoom {
+ #[cfg(target_os = "windows")]
+ fn stop_publishing(&mut self, _cx: &mut ModelContext<Room>) {}
+
+ #[cfg(not(target_os = "windows"))]
+ fn stop_publishing(&mut self, cx: &mut ModelContext<Room>) {
+ let mut tracks_to_unpublish = Vec::new();
+ if let LocalTrack::Published {
+ track_publication, ..
+ } = mem::replace(&mut self.microphone_track, LocalTrack::None)
+ {
+ tracks_to_unpublish.push(track_publication.sid());
+ cx.notify();
+ }
+
+ if let LocalTrack::Published {
+ track_publication, ..
+ } = mem::replace(&mut self.screen_track, LocalTrack::None)
+ {
+ tracks_to_unpublish.push(track_publication.sid());
+ cx.notify();
+ }
+
+ let participant = self.room.local_participant();
+ cx.background_executor()
+ .spawn(async move {
+ for sid in tracks_to_unpublish {
+ participant.unpublish_track(&sid).await.log_err();
+ }
+ })
+ .detach();
+ }
+}
+
+enum LocalTrack {
+ None,
+ Pending {
+ publish_id: usize,
+ },
+ Published {
+ track_publication: LocalTrackPublication,
+ _stream: Box<dyn Any>,
+ },
+}
+
+impl Default for LocalTrack {
+ fn default() -> Self {
+ Self::None
+ }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq)]
+pub enum RoomStatus {
+ Online,
+ Rejoining,
+ Offline,
+}
+
+impl RoomStatus {
+ pub fn is_offline(&self) -> bool {
+ matches!(self, RoomStatus::Offline)
+ }
+
+ pub fn is_online(&self) -> bool {
+ matches!(self, RoomStatus::Online)
+ }
+}
@@ -0,0 +1,545 @@
+pub mod participant;
+pub mod room;
+
+use crate::call_settings::CallSettings;
+use anyhow::{anyhow, Result};
+use audio::Audio;
+use client::{proto, ChannelId, Client, TypedEnvelope, User, UserStore, ZED_ALWAYS_ACTIVE};
+use collections::HashSet;
+use futures::{channel::oneshot, future::Shared, Future, FutureExt};
+use gpui::{
+ AppContext, AsyncAppContext, Context, EventEmitter, Global, Model, ModelContext, Subscription,
+ Task, WeakModel,
+};
+use postage::watch;
+use project::Project;
+use room::Event;
+use settings::Settings;
+use std::sync::Arc;
+
+pub use participant::ParticipantLocation;
+pub use room::Room;
+
+struct GlobalActiveCall(Model<ActiveCall>);
+
+impl Global for GlobalActiveCall {}
+
+pub fn init(client: Arc<Client>, user_store: Model<UserStore>, cx: &mut AppContext) {
+ CallSettings::register(cx);
+
+ let active_call = cx.new_model(|cx| ActiveCall::new(client, user_store, cx));
+ cx.set_global(GlobalActiveCall(active_call));
+}
+
+pub struct OneAtATime {
+ cancel: Option<oneshot::Sender<()>>,
+}
+
+impl OneAtATime {
+ /// spawn a task in the given context.
+ /// if another task is spawned before that resolves, or if the OneAtATime itself is dropped, the first task will be cancelled and return Ok(None)
+ /// otherwise you'll see the result of the task.
+ fn spawn<F, Fut, R>(&mut self, cx: &mut AppContext, f: F) -> Task<Result<Option<R>>>
+ where
+ F: 'static + FnOnce(AsyncAppContext) -> Fut,
+ Fut: Future<Output = Result<R>>,
+ R: 'static,
+ {
+ let (tx, rx) = oneshot::channel();
+ self.cancel.replace(tx);
+ cx.spawn(|cx| async move {
+ futures::select_biased! {
+ _ = rx.fuse() => Ok(None),
+ result = f(cx).fuse() => result.map(Some),
+ }
+ })
+ }
+
+ fn running(&self) -> bool {
+ self.cancel
+ .as_ref()
+ .is_some_and(|cancel| !cancel.is_canceled())
+ }
+}
+
+#[derive(Clone)]
+pub struct IncomingCall {
+ pub room_id: u64,
+ pub calling_user: Arc<User>,
+ pub participants: Vec<Arc<User>>,
+ pub initial_project: Option<proto::ParticipantProject>,
+}
+
+/// Singleton global maintaining the user's participation in a room across workspaces.
+pub struct ActiveCall {
+ room: Option<(Model<Room>, Vec<Subscription>)>,
+ pending_room_creation: Option<Shared<Task<Result<Model<Room>, Arc<anyhow::Error>>>>>,
+ location: Option<WeakModel<Project>>,
+ _join_debouncer: OneAtATime,
+ pending_invites: HashSet<u64>,
+ incoming_call: (
+ watch::Sender<Option<IncomingCall>>,
+ watch::Receiver<Option<IncomingCall>>,
+ ),
+ client: Arc<Client>,
+ user_store: Model<UserStore>,
+ _subscriptions: Vec<client::Subscription>,
+}
+
+impl EventEmitter<Event> for ActiveCall {}
+
+impl ActiveCall {
+ fn new(client: Arc<Client>, user_store: Model<UserStore>, cx: &mut ModelContext<Self>) -> Self {
+ Self {
+ room: None,
+ pending_room_creation: None,
+ location: None,
+ pending_invites: Default::default(),
+ incoming_call: watch::channel(),
+ _join_debouncer: OneAtATime { cancel: None },
+ _subscriptions: vec![
+ client.add_request_handler(cx.weak_model(), Self::handle_incoming_call),
+ client.add_message_handler(cx.weak_model(), Self::handle_call_canceled),
+ ],
+ client,
+ user_store,
+ }
+ }
+
+ pub fn channel_id(&self, cx: &AppContext) -> Option<ChannelId> {
+ self.room()?.read(cx).channel_id()
+ }
+
+ async fn handle_incoming_call(
+ this: Model<Self>,
+ envelope: TypedEnvelope<proto::IncomingCall>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::Ack> {
+ let user_store = this.update(&mut cx, |this, _| this.user_store.clone())?;
+ let call = IncomingCall {
+ room_id: envelope.payload.room_id,
+ participants: user_store
+ .update(&mut cx, |user_store, cx| {
+ user_store.get_users(envelope.payload.participant_user_ids, cx)
+ })?
+ .await?,
+ calling_user: user_store
+ .update(&mut cx, |user_store, cx| {
+ user_store.get_user(envelope.payload.calling_user_id, cx)
+ })?
+ .await?,
+ initial_project: envelope.payload.initial_project,
+ };
+ this.update(&mut cx, |this, _| {
+ *this.incoming_call.0.borrow_mut() = Some(call);
+ })?;
+
+ Ok(proto::Ack {})
+ }
+
+ async fn handle_call_canceled(
+ this: Model<Self>,
+ envelope: TypedEnvelope<proto::CallCanceled>,
+ mut cx: AsyncAppContext,
+ ) -> Result<()> {
+ this.update(&mut cx, |this, _| {
+ let mut incoming_call = this.incoming_call.0.borrow_mut();
+ if incoming_call
+ .as_ref()
+ .map_or(false, |call| call.room_id == envelope.payload.room_id)
+ {
+ incoming_call.take();
+ }
+ })?;
+ Ok(())
+ }
+
+ pub fn global(cx: &AppContext) -> Model<Self> {
+ cx.global::<GlobalActiveCall>().0.clone()
+ }
+
+ pub fn try_global(cx: &AppContext) -> Option<Model<Self>> {
+ cx.try_global::<GlobalActiveCall>()
+ .map(|call| call.0.clone())
+ }
+
+ pub fn invite(
+ &mut self,
+ called_user_id: u64,
+ initial_project: Option<Model<Project>>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<()>> {
+ if !self.pending_invites.insert(called_user_id) {
+ return Task::ready(Err(anyhow!("user was already invited")));
+ }
+ cx.notify();
+
+ if self._join_debouncer.running() {
+ return Task::ready(Ok(()));
+ }
+
+ let room = if let Some(room) = self.room().cloned() {
+ Some(Task::ready(Ok(room)).shared())
+ } else {
+ self.pending_room_creation.clone()
+ };
+
+ let invite = if let Some(room) = room {
+ cx.spawn(move |_, mut cx| async move {
+ let room = room.await.map_err(|err| anyhow!("{:?}", err))?;
+
+ let initial_project_id = if let Some(initial_project) = initial_project {
+ Some(
+ room.update(&mut cx, |room, cx| room.share_project(initial_project, cx))?
+ .await?,
+ )
+ } else {
+ None
+ };
+
+ room.update(&mut cx, move |room, cx| {
+ room.call(called_user_id, initial_project_id, cx)
+ })?
+ .await?;
+
+ anyhow::Ok(())
+ })
+ } else {
+ let client = self.client.clone();
+ let user_store = self.user_store.clone();
+ let room = cx
+ .spawn(move |this, mut cx| async move {
+ let create_room = async {
+ let room = cx
+ .update(|cx| {
+ Room::create(
+ called_user_id,
+ initial_project,
+ client,
+ user_store,
+ cx,
+ )
+ })?
+ .await?;
+
+ this.update(&mut cx, |this, cx| this.set_room(Some(room.clone()), cx))?
+ .await?;
+
+ anyhow::Ok(room)
+ };
+
+ let room = create_room.await;
+ this.update(&mut cx, |this, _| this.pending_room_creation = None)?;
+ room.map_err(Arc::new)
+ })
+ .shared();
+ self.pending_room_creation = Some(room.clone());
+ cx.background_executor().spawn(async move {
+ room.await.map_err(|err| anyhow!("{:?}", err))?;
+ anyhow::Ok(())
+ })
+ };
+
+ cx.spawn(move |this, mut cx| async move {
+ let result = invite.await;
+ if result.is_ok() {
+ this.update(&mut cx, |this, cx| this.report_call_event("invite", cx))?;
+ } else {
+ //TODO: report collaboration error
+ log::error!("invite failed: {:?}", result);
+ }
+
+ this.update(&mut cx, |this, cx| {
+ this.pending_invites.remove(&called_user_id);
+ cx.notify();
+ })?;
+ result
+ })
+ }
+
+ pub fn cancel_invite(
+ &mut self,
+ called_user_id: u64,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<()>> {
+ let room_id = if let Some(room) = self.room() {
+ room.read(cx).id()
+ } else {
+ return Task::ready(Err(anyhow!("no active call")));
+ };
+
+ let client = self.client.clone();
+ cx.background_executor().spawn(async move {
+ client
+ .request(proto::CancelCall {
+ room_id,
+ called_user_id,
+ })
+ .await?;
+ anyhow::Ok(())
+ })
+ }
+
+ pub fn incoming(&self) -> watch::Receiver<Option<IncomingCall>> {
+ self.incoming_call.1.clone()
+ }
+
+ pub fn accept_incoming(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
+ if self.room.is_some() {
+ return Task::ready(Err(anyhow!("cannot join while on another call")));
+ }
+
+ let call = if let Some(call) = self.incoming_call.0.borrow_mut().take() {
+ call
+ } else {
+ return Task::ready(Err(anyhow!("no incoming call")));
+ };
+
+ if self.pending_room_creation.is_some() {
+ return Task::ready(Ok(()));
+ }
+
+ let room_id = call.room_id;
+ let client = self.client.clone();
+ let user_store = self.user_store.clone();
+ let join = self
+ ._join_debouncer
+ .spawn(cx, move |cx| Room::join(room_id, client, user_store, cx));
+
+ cx.spawn(|this, mut cx| async move {
+ let room = join.await?;
+ this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))?
+ .await?;
+ this.update(&mut cx, |this, cx| {
+ this.report_call_event("accept incoming", cx)
+ })?;
+ Ok(())
+ })
+ }
+
+ pub fn decline_incoming(&mut self, _: &mut ModelContext<Self>) -> Result<()> {
+ let call = self
+ .incoming_call
+ .0
+ .borrow_mut()
+ .take()
+ .ok_or_else(|| anyhow!("no incoming call"))?;
+ report_call_event_for_room("decline incoming", call.room_id, None, &self.client);
+ self.client.send(proto::DeclineCall {
+ room_id: call.room_id,
+ })?;
+ Ok(())
+ }
+
+ pub fn join_channel(
+ &mut self,
+ channel_id: ChannelId,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Option<Model<Room>>>> {
+ if let Some(room) = self.room().cloned() {
+ if room.read(cx).channel_id() == Some(channel_id) {
+ return Task::ready(Ok(Some(room)));
+ } else {
+ room.update(cx, |room, cx| room.clear_state(cx));
+ }
+ }
+
+ if self.pending_room_creation.is_some() {
+ return Task::ready(Ok(None));
+ }
+
+ let client = self.client.clone();
+ let user_store = self.user_store.clone();
+ let join = self._join_debouncer.spawn(cx, move |cx| async move {
+ Room::join_channel(channel_id, client, user_store, cx).await
+ });
+
+ cx.spawn(|this, mut cx| async move {
+ let room = join.await?;
+ this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))?
+ .await?;
+ this.update(&mut cx, |this, cx| {
+ this.report_call_event("join channel", cx)
+ })?;
+ Ok(room)
+ })
+ }
+
+ pub fn hang_up(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
+ cx.notify();
+ self.report_call_event("hang up", cx);
+
+ Audio::end_call(cx);
+
+ let channel_id = self.channel_id(cx);
+ if let Some((room, _)) = self.room.take() {
+ cx.emit(Event::RoomLeft { channel_id });
+ room.update(cx, |room, cx| room.leave(cx))
+ } else {
+ Task::ready(Ok(()))
+ }
+ }
+
+ pub fn share_project(
+ &mut self,
+ project: Model<Project>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<u64>> {
+ if let Some((room, _)) = self.room.as_ref() {
+ self.report_call_event("share project", cx);
+ room.update(cx, |room, cx| room.share_project(project, cx))
+ } else {
+ Task::ready(Err(anyhow!("no active call")))
+ }
+ }
+
+ pub fn unshare_project(
+ &mut self,
+ project: Model<Project>,
+ cx: &mut ModelContext<Self>,
+ ) -> Result<()> {
+ if let Some((room, _)) = self.room.as_ref() {
+ self.report_call_event("unshare project", cx);
+ room.update(cx, |room, cx| room.unshare_project(project, cx))
+ } else {
+ Err(anyhow!("no active call"))
+ }
+ }
+
+ pub fn location(&self) -> Option<&WeakModel<Project>> {
+ self.location.as_ref()
+ }
+
+ pub fn set_location(
+ &mut self,
+ project: Option<&Model<Project>>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<()>> {
+ if project.is_some() || !*ZED_ALWAYS_ACTIVE {
+ self.location = project.map(|project| project.downgrade());
+ if let Some((room, _)) = self.room.as_ref() {
+ return room.update(cx, |room, cx| room.set_location(project, cx));
+ }
+ }
+ Task::ready(Ok(()))
+ }
+
+ fn set_room(
+ &mut self,
+ room: Option<Model<Room>>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<()>> {
+ if room.as_ref() == self.room.as_ref().map(|room| &room.0) {
+ Task::ready(Ok(()))
+ } else {
+ cx.notify();
+ if let Some(room) = room {
+ if room.read(cx).status().is_offline() {
+ self.room = None;
+ Task::ready(Ok(()))
+ } else {
+ let subscriptions = vec![
+ cx.observe(&room, |this, room, cx| {
+ if room.read(cx).status().is_offline() {
+ this.set_room(None, cx).detach_and_log_err(cx);
+ }
+
+ cx.notify();
+ }),
+ cx.subscribe(&room, |_, _, event, cx| cx.emit(event.clone())),
+ ];
+ self.room = Some((room.clone(), subscriptions));
+ let location = self
+ .location
+ .as_ref()
+ .and_then(|location| location.upgrade());
+ let channel_id = room.read(cx).channel_id();
+ cx.emit(Event::RoomJoined { channel_id });
+ room.update(cx, |room, cx| room.set_location(location.as_ref(), cx))
+ }
+ } else {
+ self.room = None;
+ Task::ready(Ok(()))
+ }
+ }
+ }
+
+ pub fn room(&self) -> Option<&Model<Room>> {
+ self.room.as_ref().map(|(room, _)| room)
+ }
+
+ pub fn client(&self) -> Arc<Client> {
+ self.client.clone()
+ }
+
+ pub fn pending_invites(&self) -> &HashSet<u64> {
+ &self.pending_invites
+ }
+
+ pub fn report_call_event(&self, operation: &'static str, cx: &mut AppContext) {
+ if let Some(room) = self.room() {
+ let room = room.read(cx);
+ report_call_event_for_room(operation, room.id(), room.channel_id(), &self.client);
+ }
+ }
+}
+
+pub fn report_call_event_for_room(
+ operation: &'static str,
+ room_id: u64,
+ channel_id: Option<ChannelId>,
+ client: &Arc<Client>,
+) {
+ let telemetry = client.telemetry();
+
+ telemetry.report_call_event(operation, Some(room_id), channel_id)
+}
+
+pub fn report_call_event_for_channel(
+ operation: &'static str,
+ channel_id: ChannelId,
+ client: &Arc<Client>,
+ cx: &AppContext,
+) {
+ let room = ActiveCall::global(cx).read(cx).room();
+
+ let telemetry = client.telemetry();
+
+ telemetry.report_call_event(operation, room.map(|r| r.read(cx).id()), Some(channel_id))
+}
+
+#[cfg(test)]
+mod test {
+ use gpui::TestAppContext;
+
+ use crate::OneAtATime;
+
+ #[gpui::test]
+ async fn test_one_at_a_time(cx: &mut TestAppContext) {
+ let mut one_at_a_time = OneAtATime { cancel: None };
+
+ assert_eq!(
+ cx.update(|cx| one_at_a_time.spawn(cx, |_| async { Ok(1) }))
+ .await
+ .unwrap(),
+ Some(1)
+ );
+
+ let (a, b) = cx.update(|cx| {
+ (
+ one_at_a_time.spawn(cx, |_| async {
+ panic!("");
+ }),
+ one_at_a_time.spawn(cx, |_| async { Ok(3) }),
+ )
+ });
+
+ assert_eq!(a.await.unwrap(), None::<u32>);
+ assert_eq!(b.await.unwrap(), Some(3));
+
+ let promise = cx.update(|cx| one_at_a_time.spawn(cx, |_| async { Ok(4) }));
+ drop(one_at_a_time);
+
+ assert_eq!(promise.await.unwrap(), None);
+ }
+}
@@ -3,8 +3,8 @@ use client::ParticipantIndex;
use client::{proto, User};
use collections::HashMap;
use gpui::WeakModel;
-pub use live_kit_client::Frame;
-pub use live_kit_client::{RemoteAudioTrack, RemoteVideoTrack};
+pub use livekit_client_macos::Frame;
+pub use livekit_client_macos::{RemoteAudioTrack, RemoteVideoTrack};
use project::Project;
use std::sync::Arc;
@@ -49,6 +49,12 @@ pub struct RemoteParticipant {
pub participant_index: ParticipantIndex,
pub muted: bool,
pub speaking: bool,
- pub video_tracks: HashMap<live_kit_client::Sid, Arc<RemoteVideoTrack>>,
- pub audio_tracks: HashMap<live_kit_client::Sid, Arc<RemoteAudioTrack>>,
+ pub video_tracks: HashMap<livekit_client_macos::Sid, Arc<RemoteVideoTrack>>,
+ pub audio_tracks: HashMap<livekit_client_macos::Sid, Arc<RemoteAudioTrack>>,
+}
+
+impl RemoteParticipant {
+ pub fn has_video_tracks(&self) -> bool {
+ !self.video_tracks.is_empty()
+ }
}
@@ -15,7 +15,7 @@ use gpui::{
AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Task, WeakModel,
};
use language::LanguageRegistry;
-use live_kit_client::{LocalAudioTrack, LocalTrackPublication, LocalVideoTrack, RoomUpdate};
+use livekit_client_macos::{LocalAudioTrack, LocalTrackPublication, LocalVideoTrack, RoomUpdate};
use postage::{sink::Sink, stream::Stream, watch};
use project::Project;
use settings::Settings as _;
@@ -97,7 +97,7 @@ impl Room {
if let Some(live_kit) = self.live_kit.as_ref() {
matches!(
*live_kit.room.status().borrow(),
- live_kit_client::ConnectionState::Connected { .. }
+ livekit_client_macos::ConnectionState::Connected { .. }
)
} else {
false
@@ -113,7 +113,7 @@ impl Room {
cx: &mut ModelContext<Self>,
) -> Self {
let live_kit_room = if let Some(connection_info) = live_kit_connection_info {
- let room = live_kit_client::Room::new();
+ let room = livekit_client_macos::Room::new();
let mut status = room.status();
// Consume the initial status of the room.
let _ = status.try_recv();
@@ -125,7 +125,7 @@ impl Room {
break;
};
- if status == live_kit_client::ConnectionState::Disconnected {
+ if status == livekit_client_macos::ConnectionState::Disconnected {
this.update(&mut cx, |this, cx| this.leave(cx).log_err())
.ok();
break;
@@ -156,7 +156,7 @@ impl Room {
cx.spawn(|this, mut cx| async move {
connect.await?;
this.update(&mut cx, |this, cx| {
- if this.can_use_microphone() {
+ if this.can_use_microphone(cx) {
if let Some(live_kit) = &this.live_kit {
if !live_kit.muted_by_user && !live_kit.deafened {
return this.share_microphone(cx);
@@ -1317,7 +1317,7 @@ impl Room {
self.live_kit.as_ref().map(|live_kit| live_kit.deafened)
}
- pub fn can_use_microphone(&self) -> bool {
+ pub fn can_use_microphone(&self, _cx: &AppContext) -> bool {
use proto::ChannelRole::*;
match self.local_participant.role {
Admin | Member | Talker => true,
@@ -1631,7 +1631,7 @@ impl Room {
}
#[cfg(any(test, feature = "test-support"))]
- pub fn set_display_sources(&self, sources: Vec<live_kit_client::MacOSDisplay>) {
+ pub fn set_display_sources(&self, sources: Vec<livekit_client_macos::MacOSDisplay>) {
self.live_kit
.as_ref()
.unwrap()
@@ -1641,7 +1641,7 @@ impl Room {
}
struct LiveKitRoom {
- room: Arc<live_kit_client::Room>,
+ room: Arc<livekit_client_macos::Room>,
screen_track: LocalTrack,
microphone_track: LocalTrack,
/// Tracks whether we're currently in a muted state due to auto-mute from deafening or manual mute performed by user.
@@ -5,9 +5,9 @@ HTTP_PORT = 8080
API_TOKEN = "secret"
INVITE_LINK_PREFIX = "http://localhost:3000/invites/"
ZED_ENVIRONMENT = "development"
-LIVE_KIT_SERVER = "http://localhost:7880"
-LIVE_KIT_KEY = "devkey"
-LIVE_KIT_SECRET = "secret"
+LIVEKIT_SERVER = "http://localhost:7880"
+LIVEKIT_KEY = "devkey"
+LIVEKIT_SECRET = "secret"
BLOB_STORE_ACCESS_KEY = "the-blob-store-access-key"
BLOB_STORE_SECRET_KEY = "the-blob-store-secret-key"
BLOB_STORE_BUCKET = "the-extensions-bucket"
@@ -40,7 +40,7 @@ google_ai.workspace = true
hex.workspace = true
http_client.workspace = true
jsonwebtoken.workspace = true
-live_kit_server.workspace = true
+livekit_server.workspace = true
log.workspace = true
nanoid.workspace = true
open_ai.workspace = true
@@ -77,6 +77,12 @@ tracing-subscriber = { version = "0.3.18", features = ["env-filter", "json", "re
util.workspace = true
uuid.workspace = true
+[target.'cfg(target_os = "macos")'.dependencies]
+livekit_client_macos = { workspace = true, features = ["test-support"] }
+
+[target.'cfg(not(target_os = "macos"))'.dependencies]
+livekit_client = { workspace = true, features = ["test-support"] }
+
[dev-dependencies]
assistant = { workspace = true, features = ["test-support"] }
assistant_tool.workspace = true
@@ -101,7 +107,6 @@ hyper.workspace = true
indoc.workspace = true
language = { workspace = true, features = ["test-support"] }
language_model = { workspace = true, features = ["test-support"] }
-live_kit_client = { workspace = true, features = ["test-support"] }
lsp = { workspace = true, features = ["test-support"] }
menu.workspace = true
multi_buffer = { workspace = true, features = ["test-support"] }
@@ -125,5 +130,11 @@ util.workspace = true
workspace = { workspace = true, features = ["test-support"] }
worktree = { workspace = true, features = ["test-support"] }
+[target.'cfg(target_os = "macos")'.dev-dependencies]
+livekit_client_macos = { workspace = true, features = ["test-support"] }
+
+[target.'cfg(not(target_os = "macos"))'.dev-dependencies]
+livekit_client = {workspace = true, features = ["test-support"] }
+
[package.metadata.cargo-machete]
ignored = ["async-stripe"]
@@ -109,17 +109,17 @@ spec:
secretKeyRef:
name: zed-client
key: checksum-seed
- - name: LIVE_KIT_SERVER
+ - name: LIVEKIT_SERVER
valueFrom:
secretKeyRef:
name: livekit
key: server
- - name: LIVE_KIT_KEY
+ - name: LIVEKIT_KEY
valueFrom:
secretKeyRef:
name: livekit
key: key
- - name: LIVE_KIT_SECRET
+ - name: LIVEKIT_SECRET
valueFrom:
secretKeyRef:
name: livekit
@@ -154,9 +154,9 @@ impl Database {
}
let role = role.unwrap();
- let live_kit_room = format!("channel-{}", nanoid::nanoid!(30));
+ let livekit_room = format!("channel-{}", nanoid::nanoid!(30));
let room_id = self
- .get_or_create_channel_room(channel_id, &live_kit_room, &tx)
+ .get_or_create_channel_room(channel_id, &livekit_room, &tx)
.await?;
self.join_channel_room_internal(room_id, user_id, connection, role, &tx)
@@ -896,7 +896,7 @@ impl Database {
pub(crate) async fn get_or_create_channel_room(
&self,
channel_id: ChannelId,
- live_kit_room: &str,
+ livekit_room: &str,
tx: &DatabaseTransaction,
) -> Result<RoomId> {
let room = room::Entity::find()
@@ -909,7 +909,7 @@ impl Database {
} else {
let result = room::Entity::insert(room::ActiveModel {
channel_id: ActiveValue::Set(Some(channel_id)),
- live_kit_room: ActiveValue::Set(live_kit_room.to_string()),
+ live_kit_room: ActiveValue::Set(livekit_room.to_string()),
..Default::default()
})
.exec(tx)
@@ -103,11 +103,11 @@ impl Database {
&self,
user_id: UserId,
connection: ConnectionId,
- live_kit_room: &str,
+ livekit_room: &str,
) -> Result<proto::Room> {
self.transaction(|tx| async move {
let room = room::ActiveModel {
- live_kit_room: ActiveValue::set(live_kit_room.into()),
+ live_kit_room: ActiveValue::set(livekit_room.into()),
..Default::default()
}
.insert(&*tx)
@@ -1316,7 +1316,7 @@ impl Database {
channel,
proto::Room {
id: db_room.id.to_proto(),
- live_kit_room: db_room.live_kit_room,
+ livekit_room: db_room.live_kit_room,
participants: participants.into_values().collect(),
pending_participants,
followers,
@@ -156,9 +156,9 @@ pub struct Config {
pub clickhouse_password: Option<String>,
pub clickhouse_database: Option<String>,
pub invite_link_prefix: String,
- pub live_kit_server: Option<String>,
- pub live_kit_key: Option<String>,
- pub live_kit_secret: Option<String>,
+ pub livekit_server: Option<String>,
+ pub livekit_key: Option<String>,
+ pub livekit_secret: Option<String>,
pub llm_database_url: Option<String>,
pub llm_database_max_connections: Option<u32>,
pub llm_database_migrations_path: Option<PathBuf>,
@@ -210,9 +210,9 @@ impl Config {
database_max_connections: 0,
api_token: "".into(),
invite_link_prefix: "".into(),
- live_kit_server: None,
- live_kit_key: None,
- live_kit_secret: None,
+ livekit_server: None,
+ livekit_key: None,
+ livekit_secret: None,
llm_database_url: None,
llm_database_max_connections: None,
llm_database_migrations_path: None,
@@ -277,7 +277,7 @@ impl ServiceMode {
pub struct AppState {
pub db: Arc<Database>,
pub llm_db: Option<Arc<LlmDatabase>>,
- pub live_kit_client: Option<Arc<dyn live_kit_server::api::Client>>,
+ pub livekit_client: Option<Arc<dyn livekit_server::api::Client>>,
pub blob_store_client: Option<aws_sdk_s3::Client>,
pub stripe_client: Option<Arc<stripe::Client>>,
pub stripe_billing: Option<Arc<StripeBilling>>,
@@ -309,17 +309,17 @@ impl AppState {
None
};
- let live_kit_client = if let Some(((server, key), secret)) = config
- .live_kit_server
+ let livekit_client = if let Some(((server, key), secret)) = config
+ .livekit_server
.as_ref()
- .zip(config.live_kit_key.as_ref())
- .zip(config.live_kit_secret.as_ref())
+ .zip(config.livekit_key.as_ref())
+ .zip(config.livekit_secret.as_ref())
{
- Some(Arc::new(live_kit_server::api::LiveKitClient::new(
+ Some(Arc::new(livekit_server::api::LiveKitClient::new(
server.clone(),
key.clone(),
secret.clone(),
- )) as Arc<dyn live_kit_server::api::Client>)
+ )) as Arc<dyn livekit_server::api::Client>)
} else {
None
};
@@ -329,7 +329,7 @@ impl AppState {
let this = Self {
db: db.clone(),
llm_db,
- live_kit_client,
+ livekit_client,
blob_store_client: build_blob_store_client(&config).await.log_err(),
stripe_billing: stripe_client
.clone()
@@ -419,7 +419,7 @@ impl Server {
let peer = self.peer.clone();
let timeout = self.app_state.executor.sleep(CLEANUP_TIMEOUT);
let pool = self.connection_pool.clone();
- let live_kit_client = self.app_state.live_kit_client.clone();
+ let livekit_client = self.app_state.livekit_client.clone();
let span = info_span!("start server");
self.app_state.executor.spawn_detached(
@@ -464,8 +464,8 @@ impl Server {
for room_id in room_ids {
let mut contacts_to_update = HashSet::default();
let mut canceled_calls_to_user_ids = Vec::new();
- let mut live_kit_room = String::new();
- let mut delete_live_kit_room = false;
+ let mut livekit_room = String::new();
+ let mut delete_livekit_room = false;
if let Some(mut refreshed_room) = app_state
.db
@@ -488,8 +488,8 @@ impl Server {
.extend(refreshed_room.canceled_calls_to_user_ids.iter().copied());
canceled_calls_to_user_ids =
mem::take(&mut refreshed_room.canceled_calls_to_user_ids);
- live_kit_room = mem::take(&mut refreshed_room.room.live_kit_room);
- delete_live_kit_room = refreshed_room.room.participants.is_empty();
+ livekit_room = mem::take(&mut refreshed_room.room.livekit_room);
+ delete_livekit_room = refreshed_room.room.participants.is_empty();
}
{
@@ -540,9 +540,9 @@ impl Server {
}
}
- if let Some(live_kit) = live_kit_client.as_ref() {
- if delete_live_kit_room {
- live_kit.delete_room(live_kit_room).await.trace_err();
+ if let Some(live_kit) = livekit_client.as_ref() {
+ if delete_livekit_room {
+ live_kit.delete_room(livekit_room).await.trace_err();
}
}
}
@@ -1211,15 +1211,15 @@ async fn create_room(
response: Response<proto::CreateRoom>,
session: Session,
) -> Result<()> {
- let live_kit_room = nanoid::nanoid!(30);
+ let livekit_room = nanoid::nanoid!(30);
let live_kit_connection_info = util::maybe!(async {
- let live_kit = session.app_state.live_kit_client.as_ref();
+ let live_kit = session.app_state.livekit_client.as_ref();
let live_kit = live_kit?;
let user_id = session.user_id().to_string();
let token = live_kit
- .room_token(&live_kit_room, &user_id.to_string())
+ .room_token(&livekit_room, &user_id.to_string())
.trace_err()?;
Some(proto::LiveKitConnectionInfo {
@@ -1233,7 +1233,7 @@ async fn create_room(
let room = session
.db()
.await
- .create_room(session.user_id(), session.connection_id, &live_kit_room)
+ .create_room(session.user_id(), session.connection_id, &livekit_room)
.await?;
response.send(proto::CreateRoomResponse {
@@ -1285,22 +1285,22 @@ async fn join_room(
.trace_err();
}
- let live_kit_connection_info =
- if let Some(live_kit) = session.app_state.live_kit_client.as_ref() {
- live_kit
- .room_token(
- &joined_room.room.live_kit_room,
- &session.user_id().to_string(),
- )
- .trace_err()
- .map(|token| proto::LiveKitConnectionInfo {
- server_url: live_kit.url().into(),
- token,
- can_publish: true,
- })
- } else {
- None
- };
+ let live_kit_connection_info = if let Some(live_kit) = session.app_state.livekit_client.as_ref()
+ {
+ live_kit
+ .room_token(
+ &joined_room.room.livekit_room,
+ &session.user_id().to_string(),
+ )
+ .trace_err()
+ .map(|token| proto::LiveKitConnectionInfo {
+ server_url: live_kit.url().into(),
+ token,
+ can_publish: true,
+ })
+ } else {
+ None
+ };
response.send(proto::JoinRoomResponse {
room: Some(joined_room.room),
@@ -1507,7 +1507,7 @@ async fn set_room_participant_role(
let user_id = UserId::from_proto(request.user_id);
let role = ChannelRole::from(request.role());
- let (live_kit_room, can_publish) = {
+ let (livekit_room, can_publish) = {
let room = session
.db()
.await
@@ -1519,18 +1519,18 @@ async fn set_room_participant_role(
)
.await?;
- let live_kit_room = room.live_kit_room.clone();
+ let livekit_room = room.livekit_room.clone();
let can_publish = ChannelRole::from(request.role()).can_use_microphone();
room_updated(&room, &session.peer);
- (live_kit_room, can_publish)
+ (livekit_room, can_publish)
};
- if let Some(live_kit) = session.app_state.live_kit_client.as_ref() {
+ if let Some(live_kit) = session.app_state.livekit_client.as_ref() {
live_kit
.update_participant(
- live_kit_room.clone(),
+ livekit_room.clone(),
request.user_id.to_string(),
- live_kit_server::proto::ParticipantPermission {
+ livekit_server::proto::ParticipantPermission {
can_subscribe: true,
can_publish,
can_publish_data: can_publish,
@@ -3092,7 +3092,7 @@ async fn join_channel_internal(
let live_kit_connection_info =
session
.app_state
- .live_kit_client
+ .livekit_client
.as_ref()
.and_then(|live_kit| {
let (can_publish, token) = if role == ChannelRole::Guest {
@@ -3100,7 +3100,7 @@ async fn join_channel_internal(
false,
live_kit
.guest_token(
- &joined_room.room.live_kit_room,
+ &joined_room.room.livekit_room,
&session.user_id().to_string(),
)
.trace_err()?,
@@ -3110,7 +3110,7 @@ async fn join_channel_internal(
true,
live_kit
.room_token(
- &joined_room.room.live_kit_room,
+ &joined_room.room.livekit_room,
&session.user_id().to_string(),
)
.trace_err()?,
@@ -4314,8 +4314,8 @@ async fn leave_room_for_session(session: &Session, connection_id: ConnectionId)
let room_id;
let canceled_calls_to_user_ids;
- let live_kit_room;
- let delete_live_kit_room;
+ let livekit_room;
+ let delete_livekit_room;
let room;
let channel;
@@ -4328,8 +4328,8 @@ async fn leave_room_for_session(session: &Session, connection_id: ConnectionId)
room_id = RoomId::from_proto(left_room.room.id);
canceled_calls_to_user_ids = mem::take(&mut left_room.canceled_calls_to_user_ids);
- live_kit_room = mem::take(&mut left_room.room.live_kit_room);
- delete_live_kit_room = left_room.deleted;
+ livekit_room = mem::take(&mut left_room.room.livekit_room);
+ delete_livekit_room = left_room.deleted;
room = mem::take(&mut left_room.room);
channel = mem::take(&mut left_room.channel);
@@ -4369,14 +4369,14 @@ async fn leave_room_for_session(session: &Session, connection_id: ConnectionId)
update_user_contacts(contact_user_id, session).await?;
}
- if let Some(live_kit) = session.app_state.live_kit_client.as_ref() {
+ if let Some(live_kit) = session.app_state.livekit_client.as_ref() {
live_kit
- .remove_participant(live_kit_room.clone(), session.user_id().to_string())
+ .remove_participant(livekit_room.clone(), session.user_id().to_string())
.await
.trace_err();
- if delete_live_kit_room {
- live_kit.delete_room(live_kit_room).await.trace_err();
+ if delete_livekit_room {
+ live_kit.delete_room(livekit_room).await.trace_err();
}
}
@@ -1,3 +1,6 @@
+// todo(windows): Actually run the tests
+#![cfg(not(target_os = "windows"))]
+
use std::sync::Arc;
use call::Room;
@@ -107,7 +107,9 @@ async fn test_channel_guest_promotion(cx_a: &mut TestAppContext, cx_b: &mut Test
});
assert!(project_b.read_with(cx_b, |project, cx| project.is_read_only(cx)));
assert!(editor_b.update(cx_b, |e, cx| e.read_only(cx)));
- assert!(room_b.read_with(cx_b, |room, _| !room.can_use_microphone()));
+ cx_b.update(|cx_b| {
+ assert!(room_b.read_with(cx_b, |room, cx| !room.can_use_microphone(cx)));
+ });
assert!(room_b
.update(cx_b, |room, cx| room.share_microphone(cx))
.await
@@ -133,7 +135,9 @@ async fn test_channel_guest_promotion(cx_a: &mut TestAppContext, cx_b: &mut Test
assert!(editor_b.update(cx_b, |editor, cx| !editor.read_only(cx)));
// B sees themselves as muted, and can unmute.
- assert!(room_b.read_with(cx_b, |room, _| room.can_use_microphone()));
+ cx_b.update(|cx_b| {
+ assert!(room_b.read_with(cx_b, |room, cx| room.can_use_microphone(cx)));
+ });
room_b.read_with(cx_b, |room, _| assert!(room.is_muted()));
room_b.update(cx_b, |room, cx| room.toggle_mute(cx));
cx_a.run_until_parked();
@@ -226,7 +230,9 @@ async fn test_channel_requires_zed_cla(cx_a: &mut TestAppContext, cx_b: &mut Tes
let room_b = cx_b
.read(ActiveCall::global)
.update(cx_b, |call, _| call.room().unwrap().clone());
- assert!(room_b.read_with(cx_b, |room, _| !room.can_use_microphone()));
+ cx_b.update(|cx_b| {
+ assert!(room_b.read_with(cx_b, |room, cx| !room.can_use_microphone(cx)));
+ });
// A tries to grant write access to B, but cannot because B has not
// yet signed the zed CLA.
@@ -244,7 +250,9 @@ async fn test_channel_requires_zed_cla(cx_a: &mut TestAppContext, cx_b: &mut Tes
.unwrap_err();
cx_a.run_until_parked();
assert!(room_b.read_with(cx_b, |room, _| !room.can_share_projects()));
- assert!(room_b.read_with(cx_b, |room, _| !room.can_use_microphone()));
+ cx_b.update(|cx_b| {
+ assert!(room_b.read_with(cx_b, |room, cx| !room.can_use_microphone(cx)));
+ });
// A tries to grant write access to B, but cannot because B has not
// yet signed the zed CLA.
@@ -262,7 +270,9 @@ async fn test_channel_requires_zed_cla(cx_a: &mut TestAppContext, cx_b: &mut Tes
.unwrap();
cx_a.run_until_parked();
assert!(room_b.read_with(cx_b, |room, _| !room.can_share_projects()));
- assert!(room_b.read_with(cx_b, |room, _| room.can_use_microphone()));
+ cx_b.update(|cx_b| {
+ assert!(room_b.read_with(cx_b, |room, cx| room.can_use_microphone(cx)));
+ });
// User B signs the zed CLA.
server
@@ -287,5 +297,7 @@ async fn test_channel_requires_zed_cla(cx_a: &mut TestAppContext, cx_b: &mut Tes
.unwrap();
cx_a.run_until_parked();
assert!(room_b.read_with(cx_b, |room, _| room.can_share_projects()));
- assert!(room_b.read_with(cx_b, |room, _| room.can_use_microphone()));
+ cx_b.update(|cx_b| {
+ assert!(room_b.read_with(cx_b, |room, cx| room.can_use_microphone(cx)));
+ });
}
@@ -1,5 +1,5 @@
#![allow(clippy::reversed_empty_ranges)]
-use crate::{rpc::RECONNECT_TIMEOUT, tests::TestServer};
+use crate::tests::TestServer;
use call::{ActiveCall, ParticipantLocation};
use client::ChannelId;
use collab_ui::{
@@ -12,17 +12,11 @@ use gpui::{
View, VisualContext, VisualTestContext,
};
use language::Capability;
-use live_kit_client::MacOSDisplay;
use project::WorktreeSettings;
use rpc::proto::PeerId;
use serde_json::json;
use settings::SettingsStore;
-use workspace::{
- dock::{test::TestPanel, DockPosition},
- item::{test::TestItem, ItemHandle as _},
- shared_screen::SharedScreen,
- SplitDirection, Workspace,
-};
+use workspace::{item::ItemHandle as _, SplitDirection, Workspace};
use super::TestClient;
@@ -428,106 +422,118 @@ async fn test_basic_following(
editor_a1.item_id()
);
- // Client B activates an external window, which causes a new screen-sharing item to be added to the pane.
- let display = MacOSDisplay::new();
- active_call_b
- .update(cx_b, |call, cx| call.set_location(None, cx))
- .await
- .unwrap();
- active_call_b
- .update(cx_b, |call, cx| {
- call.room().unwrap().update(cx, |room, cx| {
- room.set_display_sources(vec![display.clone()]);
- room.share_screen(cx)
+ // TODO: Re-enable this test once we can replace our swift Livekit SDK with the rust SDK
+ #[cfg(not(target_os = "macos"))]
+ {
+ use crate::rpc::RECONNECT_TIMEOUT;
+ use gpui::TestScreenCaptureSource;
+ use workspace::{
+ dock::{test::TestPanel, DockPosition},
+ item::test::TestItem,
+ shared_screen::SharedScreen,
+ };
+
+ // Client B activates an external window, which causes a new screen-sharing item to be added to the pane.
+ let display = TestScreenCaptureSource::new();
+ active_call_b
+ .update(cx_b, |call, cx| call.set_location(None, cx))
+ .await
+ .unwrap();
+ cx_b.set_screen_capture_sources(vec![display]);
+ active_call_b
+ .update(cx_b, |call, cx| {
+ call.room()
+ .unwrap()
+ .update(cx, |room, cx| room.share_screen(cx))
})
- })
- .await
- .unwrap();
- executor.run_until_parked();
- let shared_screen = workspace_a.update(cx_a, |workspace, cx| {
- workspace
- .active_item(cx)
- .expect("no active item")
- .downcast::<SharedScreen>()
- .expect("active item isn't a shared screen")
- });
+ .await
+ .unwrap(); // This is what breaks
+ executor.run_until_parked();
+ let shared_screen = workspace_a.update(cx_a, |workspace, cx| {
+ workspace
+ .active_item(cx)
+ .expect("no active item")
+ .downcast::<SharedScreen>()
+ .expect("active item isn't a shared screen")
+ });
- // Client B activates Zed again, which causes the previous editor to become focused again.
- active_call_b
- .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
- .await
- .unwrap();
- executor.run_until_parked();
- workspace_a.update(cx_a, |workspace, cx| {
- assert_eq!(
- workspace.active_item(cx).unwrap().item_id(),
- editor_a1.item_id()
- )
- });
+ // Client B activates Zed again, which causes the previous editor to become focused again.
+ active_call_b
+ .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
+ .await
+ .unwrap();
+ executor.run_until_parked();
+ workspace_a.update(cx_a, |workspace, cx| {
+ assert_eq!(
+ workspace.active_item(cx).unwrap().item_id(),
+ editor_a1.item_id()
+ )
+ });
- // Client B activates a multibuffer that was created by following client A. Client A returns to that multibuffer.
- workspace_b.update(cx_b, |workspace, cx| {
- workspace.activate_item(&multibuffer_editor_b, true, true, cx)
- });
- executor.run_until_parked();
- workspace_a.update(cx_a, |workspace, cx| {
+ // Client B activates a multibuffer that was created by following client A. Client A returns to that multibuffer.
+ workspace_b.update(cx_b, |workspace, cx| {
+ workspace.activate_item(&multibuffer_editor_b, true, true, cx)
+ });
+ executor.run_until_parked();
+ workspace_a.update(cx_a, |workspace, cx| {
+ assert_eq!(
+ workspace.active_item(cx).unwrap().item_id(),
+ multibuffer_editor_a.item_id()
+ )
+ });
+
+ // Client B activates a panel, and the previously-opened screen-sharing item gets activated.
+ let panel = cx_b.new_view(|cx| TestPanel::new(DockPosition::Left, cx));
+ workspace_b.update(cx_b, |workspace, cx| {
+ workspace.add_panel(panel, cx);
+ workspace.toggle_panel_focus::<TestPanel>(cx);
+ });
+ executor.run_until_parked();
assert_eq!(
- workspace.active_item(cx).unwrap().item_id(),
- multibuffer_editor_a.item_id()
- )
- });
+ workspace_a.update(cx_a, |workspace, cx| workspace
+ .active_item(cx)
+ .unwrap()
+ .item_id()),
+ shared_screen.item_id()
+ );
- // Client B activates a panel, and the previously-opened screen-sharing item gets activated.
- let panel = cx_b.new_view(|cx| TestPanel::new(DockPosition::Left, cx));
- workspace_b.update(cx_b, |workspace, cx| {
- workspace.add_panel(panel, cx);
- workspace.toggle_panel_focus::<TestPanel>(cx);
- });
- executor.run_until_parked();
- assert_eq!(
- workspace_a.update(cx_a, |workspace, cx| workspace
- .active_item(cx)
- .unwrap()
- .item_id()),
- shared_screen.item_id()
- );
+ // Toggling the focus back to the pane causes client A to return to the multibuffer.
+ workspace_b.update(cx_b, |workspace, cx| {
+ workspace.toggle_panel_focus::<TestPanel>(cx);
+ });
+ executor.run_until_parked();
+ workspace_a.update(cx_a, |workspace, cx| {
+ assert_eq!(
+ workspace.active_item(cx).unwrap().item_id(),
+ multibuffer_editor_a.item_id()
+ )
+ });
- // Toggling the focus back to the pane causes client A to return to the multibuffer.
- workspace_b.update(cx_b, |workspace, cx| {
- workspace.toggle_panel_focus::<TestPanel>(cx);
- });
- executor.run_until_parked();
- workspace_a.update(cx_a, |workspace, cx| {
+ // Client B activates an item that doesn't implement following,
+ // so the previously-opened screen-sharing item gets activated.
+ let unfollowable_item = cx_b.new_view(TestItem::new);
+ workspace_b.update(cx_b, |workspace, cx| {
+ workspace.active_pane().update(cx, |pane, cx| {
+ pane.add_item(Box::new(unfollowable_item), true, true, None, cx)
+ })
+ });
+ executor.run_until_parked();
assert_eq!(
- workspace.active_item(cx).unwrap().item_id(),
- multibuffer_editor_a.item_id()
- )
- });
-
- // Client B activates an item that doesn't implement following,
- // so the previously-opened screen-sharing item gets activated.
- let unfollowable_item = cx_b.new_view(TestItem::new);
- workspace_b.update(cx_b, |workspace, cx| {
- workspace.active_pane().update(cx, |pane, cx| {
- pane.add_item(Box::new(unfollowable_item), true, true, None, cx)
- })
- });
- executor.run_until_parked();
- assert_eq!(
- workspace_a.update(cx_a, |workspace, cx| workspace
- .active_item(cx)
- .unwrap()
- .item_id()),
- shared_screen.item_id()
- );
+ workspace_a.update(cx_a, |workspace, cx| workspace
+ .active_item(cx)
+ .unwrap()
+ .item_id()),
+ shared_screen.item_id()
+ );
- // Following interrupts when client B disconnects.
- client_b.disconnect(&cx_b.to_async());
- executor.advance_clock(RECONNECT_TIMEOUT);
- assert_eq!(
- workspace_a.update(cx_a, |workspace, _| workspace.leader_for_pane(&pane_a)),
- None
- );
+ // Following interrupts when client B disconnects.
+ client_b.disconnect(&cx_b.to_async());
+ executor.advance_clock(RECONNECT_TIMEOUT);
+ assert_eq!(
+ workspace_a.update(cx_a, |workspace, _| workspace.leader_for_pane(&pane_a)),
+ None
+ );
+ }
}
#[gpui::test]
@@ -25,7 +25,6 @@ use language::{
tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, FakeLspAdapter,
Language, LanguageConfig, LanguageMatcher, LineEnding, OffsetRangeExt, Point, Rope,
};
-use live_kit_client::MacOSDisplay;
use lsp::LanguageServerId;
use parking_lot::Mutex;
use project::lsp_store::FormatTarget;
@@ -241,56 +240,60 @@ async fn test_basic_calls(
}
);
- // User A shares their screen
- let display = MacOSDisplay::new();
- let events_b = active_call_events(cx_b);
- let events_c = active_call_events(cx_c);
- active_call_a
- .update(cx_a, |call, cx| {
- call.room().unwrap().update(cx, |room, cx| {
- room.set_display_sources(vec![display.clone()]);
- room.share_screen(cx)
+ // TODO: Re-enable this test once we can replace our swift Livekit SDK with the rust SDK
+ #[cfg(not(target_os = "macos"))]
+ {
+ // User A shares their screen
+ let display = gpui::TestScreenCaptureSource::new();
+ let events_b = active_call_events(cx_b);
+ let events_c = active_call_events(cx_c);
+ cx_a.set_screen_capture_sources(vec![display]);
+ active_call_a
+ .update(cx_a, |call, cx| {
+ call.room()
+ .unwrap()
+ .update(cx, |room, cx| room.share_screen(cx))
})
- })
- .await
- .unwrap();
-
- executor.run_until_parked();
+ .await
+ .unwrap();
- // User B observes the remote screen sharing track.
- assert_eq!(events_b.borrow().len(), 1);
- let event_b = events_b.borrow().first().unwrap().clone();
- if let call::room::Event::RemoteVideoTracksChanged { participant_id } = event_b {
- assert_eq!(participant_id, client_a.peer_id().unwrap());
+ executor.run_until_parked();
- room_b.read_with(cx_b, |room, _| {
- assert_eq!(
- room.remote_participants()[&client_a.user_id().unwrap()]
- .video_tracks
- .len(),
- 1
- );
- });
- } else {
- panic!("unexpected event")
- }
+ // User B observes the remote screen sharing track.
+ assert_eq!(events_b.borrow().len(), 1);
+ let event_b = events_b.borrow().first().unwrap().clone();
+ if let call::room::Event::RemoteVideoTracksChanged { participant_id } = event_b {
+ assert_eq!(participant_id, client_a.peer_id().unwrap());
- // User C observes the remote screen sharing track.
- assert_eq!(events_c.borrow().len(), 1);
- let event_c = events_c.borrow().first().unwrap().clone();
- if let call::room::Event::RemoteVideoTracksChanged { participant_id } = event_c {
- assert_eq!(participant_id, client_a.peer_id().unwrap());
+ room_b.read_with(cx_b, |room, _| {
+ assert_eq!(
+ room.remote_participants()[&client_a.user_id().unwrap()]
+ .video_tracks
+ .len(),
+ 1
+ );
+ });
+ } else {
+ panic!("unexpected event")
+ }
- room_c.read_with(cx_c, |room, _| {
- assert_eq!(
- room.remote_participants()[&client_a.user_id().unwrap()]
- .video_tracks
- .len(),
- 1
- );
- });
- } else {
- panic!("unexpected event")
+ // User C observes the remote screen sharing track.
+ assert_eq!(events_c.borrow().len(), 1);
+ let event_c = events_c.borrow().first().unwrap().clone();
+ if let call::room::Event::RemoteVideoTracksChanged { participant_id } = event_c {
+ assert_eq!(participant_id, client_a.peer_id().unwrap());
+
+ room_c.read_with(cx_c, |room, _| {
+ assert_eq!(
+ room.remote_participants()[&client_a.user_id().unwrap()]
+ .video_tracks
+ .len(),
+ 1
+ );
+ });
+ } else {
+ panic!("unexpected event")
+ }
}
// User A leaves the room.
@@ -329,7 +332,7 @@ async fn test_basic_calls(
// to automatically leave the room. User C leaves the room as well because
// nobody else is in there.
server
- .test_live_kit_server
+ .test_livekit_server
.disconnect_client(client_b.user_id().unwrap().to_string())
.await;
executor.run_until_parked();
@@ -844,7 +847,7 @@ async fn test_client_disconnecting_from_room(
// User B gets disconnected from the LiveKit server, which causes it
// to automatically leave the room.
server
- .test_live_kit_server
+ .test_livekit_server
.disconnect_client(client_b.user_id().unwrap().to_string())
.await;
executor.run_until_parked();
@@ -1943,7 +1946,7 @@ async fn test_mute_deafen(
room_a.read_with(cx_a, |room, _| assert!(!room.is_muted()));
room_b.read_with(cx_b, |room, _| assert!(!room.is_muted()));
- // Users A and B are both muted.
+ // Users A and B are both unmuted.
assert_eq!(
participant_audio_state(&room_a, cx_a),
&[ParticipantAudioState {
@@ -2075,7 +2078,17 @@ async fn test_mute_deafen(
audio_tracks_playing: participant
.audio_tracks
.values()
- .map(|track| track.is_playing())
+ .map({
+ #[cfg(target_os = "macos")]
+ {
+ |track| track.is_playing()
+ }
+
+ #[cfg(not(target_os = "macos"))]
+ {
+ |(track, _)| track.rtc_track().enabled()
+ }
+ })
.collect(),
})
.collect::<Vec<_>>()
@@ -6015,6 +6028,8 @@ async fn test_contact_requests(
}
}
+// TODO: Re-enable this test once we can replace our swift Livekit SDK with the rust SDK
+#[cfg(not(target_os = "macos"))]
#[gpui::test(iterations = 10)]
async fn test_join_call_after_screen_was_shared(
executor: BackgroundExecutor,
@@ -6057,13 +6072,13 @@ async fn test_join_call_after_screen_was_shared(
assert_eq!(call_b.calling_user.github_login, "user_a");
// User A shares their screen
- let display = MacOSDisplay::new();
+ let display = gpui::TestScreenCaptureSource::new();
+ cx_a.set_screen_capture_sources(vec![display]);
active_call_a
.update(cx_a, |call, cx| {
- call.room().unwrap().update(cx, |room, cx| {
- room.set_display_sources(vec![display.clone()]);
- room.share_screen(cx)
- })
+ call.room()
+ .unwrap()
+ .update(cx, |room, cx| room.share_screen(cx))
})
.await
.unwrap();
@@ -45,9 +45,15 @@ use std::{
};
use workspace::{Workspace, WorkspaceStore};
+#[cfg(not(target_os = "macos"))]
+use livekit_client::test::TestServer as LivekitTestServer;
+
+#[cfg(target_os = "macos")]
+use livekit_client_macos::TestServer as LivekitTestServer;
+
pub struct TestServer {
pub app_state: Arc<AppState>,
- pub test_live_kit_server: Arc<live_kit_client::TestServer>,
+ pub test_livekit_server: Arc<LivekitTestServer>,
server: Arc<Server>,
next_github_user_id: i32,
connection_killers: Arc<Mutex<HashMap<PeerId, Arc<AtomicBool>>>>,
@@ -79,7 +85,7 @@ pub struct ContactsSummary {
impl TestServer {
pub async fn start(deterministic: BackgroundExecutor) -> Self {
- static NEXT_LIVE_KIT_SERVER_ID: AtomicUsize = AtomicUsize::new(0);
+ static NEXT_LIVEKIT_SERVER_ID: AtomicUsize = AtomicUsize::new(0);
let use_postgres = env::var("USE_POSTGRES").ok();
let use_postgres = use_postgres.as_deref();
@@ -88,16 +94,16 @@ impl TestServer {
} else {
TestDb::sqlite(deterministic.clone())
};
- let live_kit_server_id = NEXT_LIVE_KIT_SERVER_ID.fetch_add(1, SeqCst);
- let live_kit_server = live_kit_client::TestServer::create(
- format!("http://livekit.{}.test", live_kit_server_id),
- format!("devkey-{}", live_kit_server_id),
- format!("secret-{}", live_kit_server_id),
+ let livekit_server_id = NEXT_LIVEKIT_SERVER_ID.fetch_add(1, SeqCst);
+ let livekit_server = LivekitTestServer::create(
+ format!("http://livekit.{}.test", livekit_server_id),
+ format!("devkey-{}", livekit_server_id),
+ format!("secret-{}", livekit_server_id),
deterministic.clone(),
)
.unwrap();
let executor = Executor::Deterministic(deterministic.clone());
- let app_state = Self::build_app_state(&test_db, &live_kit_server, executor.clone()).await;
+ let app_state = Self::build_app_state(&test_db, &livekit_server, executor.clone()).await;
let epoch = app_state
.db
.create_server(&app_state.config.zed_environment)
@@ -114,7 +120,7 @@ impl TestServer {
forbid_connections: Default::default(),
next_github_user_id: 0,
_test_db: test_db,
- test_live_kit_server: live_kit_server,
+ test_livekit_server: livekit_server,
}
}
@@ -500,13 +506,13 @@ impl TestServer {
pub async fn build_app_state(
test_db: &TestDb,
- live_kit_test_server: &live_kit_client::TestServer,
+ livekit_test_server: &LivekitTestServer,
executor: Executor,
) -> Arc<AppState> {
Arc::new(AppState {
db: test_db.db().clone(),
llm_db: None,
- live_kit_client: Some(Arc::new(live_kit_test_server.create_api_client())),
+ livekit_client: Some(Arc::new(livekit_test_server.create_api_client())),
blob_store_client: None,
stripe_client: None,
stripe_billing: None,
@@ -520,9 +526,9 @@ impl TestServer {
database_max_connections: 0,
api_token: "".into(),
invite_link_prefix: "".into(),
- live_kit_server: None,
- live_kit_key: None,
- live_kit_secret: None,
+ livekit_server: None,
+ livekit_key: None,
+ livekit_secret: None,
llm_database_url: None,
llm_database_max_connections: None,
llm_database_migrations_path: None,
@@ -572,7 +578,7 @@ impl Deref for TestServer {
impl Drop for TestServer {
fn drop(&mut self) {
self.server.teardown();
- self.test_live_kit_server.teardown().unwrap();
+ self.test_livekit_server.teardown().unwrap();
}
}
@@ -474,11 +474,10 @@ impl CollabPanel {
project_id: project.id,
worktree_root_names: project.worktree_root_names.clone(),
host_user_id: participant.user.id,
- is_last: projects.peek().is_none()
- && participant.video_tracks.is_empty(),
+ is_last: projects.peek().is_none() && !participant.has_video_tracks(),
});
}
- if !participant.video_tracks.is_empty() {
+ if participant.has_video_tracks() {
self.entries.push(ListEntry::ParticipantScreen {
peer_id: Some(participant.peer_id),
is_last: true,
@@ -50,6 +50,7 @@ mod macos {
fn generate_dispatch_bindings() {
println!("cargo:rustc-link-lib=framework=System");
+ println!("cargo:rustc-link-lib=framework=ScreenCaptureKit");
println!("cargo:rerun-if-changed=src/platform/mac/dispatch.h");
let bindings = bindgen::Builder::default()
@@ -33,8 +33,8 @@ use crate::{
Entity, EventEmitter, ForegroundExecutor, Global, KeyBinding, Keymap, Keystroke, LayoutId,
Menu, MenuItem, OwnedMenu, PathPromptOptions, Pixels, Platform, PlatformDisplay, Point,
PromptBuilder, PromptHandle, PromptLevel, Render, RenderablePromptHandle, Reservation,
- SharedString, SubscriberSet, Subscription, SvgRenderer, Task, TextSystem, View, ViewContext,
- Window, WindowAppearance, WindowContext, WindowHandle, WindowId,
+ ScreenCaptureSource, SharedString, SubscriberSet, Subscription, SvgRenderer, Task, TextSystem,
+ View, ViewContext, Window, WindowAppearance, WindowContext, WindowHandle, WindowId,
};
mod async_context;
@@ -599,6 +599,13 @@ impl AppContext {
self.platform.primary_display()
}
+ /// Returns a list of available screen capture sources.
+ pub fn screen_capture_sources(
+ &self,
+ ) -> oneshot::Receiver<Result<Vec<Box<dyn ScreenCaptureSource>>>> {
+ self.platform.screen_capture_sources()
+ }
+
/// Returns the display with the given ID, if one exists.
pub fn find_display(&self, id: DisplayId) -> Option<Rc<dyn PlatformDisplay>> {
self.displays()
@@ -4,8 +4,8 @@ use crate::{
Element, Empty, Entity, EventEmitter, ForegroundExecutor, Global, InputEvent, Keystroke, Model,
ModelContext, Modifiers, ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent,
MouseUpEvent, Pixels, Platform, Point, Render, Result, Size, Task, TestDispatcher,
- TestPlatform, TestWindow, TextSystem, View, ViewContext, VisualContext, WindowBounds,
- WindowContext, WindowHandle, WindowOptions,
+ TestPlatform, TestScreenCaptureSource, TestWindow, TextSystem, View, ViewContext,
+ VisualContext, WindowBounds, WindowContext, WindowHandle, WindowOptions,
};
use anyhow::{anyhow, bail};
use futures::{channel::oneshot, Stream, StreamExt};
@@ -287,6 +287,12 @@ impl TestAppContext {
self.test_window(window_handle).simulate_resize(size);
}
+ /// Causes the given sources to be returned if the application queries for screen
+ /// capture sources.
+ pub fn set_screen_capture_sources(&self, sources: Vec<TestScreenCaptureSource>) {
+ self.test_platform.set_screen_capture_sources(sources);
+ }
+
/// Returns all windows open in the test.
pub fn windows(&self) -> Vec<AnyWindowHandle> {
self.app.borrow().windows().clone()
@@ -704,6 +704,11 @@ pub struct Bounds<T: Clone + Default + Debug> {
pub size: Size<T>,
}
+/// Create a bounds with the given origin and size
+pub fn bounds<T: Clone + Default + Debug>(origin: Point<T>, size: Size<T>) -> Bounds<T> {
+ Bounds { origin, size }
+}
+
impl Bounds<Pixels> {
/// Generate a centered bounds for the given display or primary display if none is provided
pub fn centered(display_id: Option<DisplayId>, size: Size<Pixels>, cx: &AppContext) -> Self {
@@ -71,6 +71,9 @@ pub(crate) use test::*;
#[cfg(target_os = "windows")]
pub(crate) use windows::*;
+#[cfg(any(test, feature = "test-support"))]
+pub use test::TestScreenCaptureSource;
+
#[cfg(target_os = "macos")]
pub(crate) fn current_platform(headless: bool) -> Rc<dyn Platform> {
Rc::new(MacPlatform::new(headless))
@@ -150,6 +153,10 @@ pub(crate) trait Platform: 'static {
None
}
+ fn screen_capture_sources(
+ &self,
+ ) -> oneshot::Receiver<Result<Vec<Box<dyn ScreenCaptureSource>>>>;
+
fn open_window(
&self,
handle: AnyWindowHandle,
@@ -229,6 +236,25 @@ pub trait PlatformDisplay: Send + Sync + Debug {
}
}
+/// A source of on-screen video content that can be captured.
+pub trait ScreenCaptureSource {
+ /// Returns the video resolution of this source.
+ fn resolution(&self) -> Result<Size<Pixels>>;
+
+ /// Start capture video from this source, invoking the given callback
+ /// with each frame.
+ fn stream(
+ &self,
+ frame_callback: Box<dyn Fn(ScreenCaptureFrame)>,
+ ) -> oneshot::Receiver<Result<Box<dyn ScreenCaptureStream>>>;
+}
+
+/// A video stream captured from a screen.
+pub trait ScreenCaptureStream {}
+
+/// A frame of video captured from a screen.
+pub struct ScreenCaptureFrame(pub PlatformScreenCaptureFrame);
+
/// An opaque identifier for a hardware display
#[derive(PartialEq, Eq, Hash, Copy, Clone)]
pub struct DisplayId(pub(crate) u32);
@@ -20,3 +20,5 @@ pub(crate) use text_system::*;
pub(crate) use wayland::*;
#[cfg(feature = "x11")]
pub(crate) use x11::*;
+
+pub(crate) type PlatformScreenCaptureFrame = ();
@@ -35,8 +35,8 @@ use crate::{
px, Action, AnyWindowHandle, BackgroundExecutor, ClipboardItem, CursorStyle, DisplayId,
ForegroundExecutor, Keymap, Keystroke, LinuxDispatcher, Menu, MenuItem, Modifiers, OwnedMenu,
PathPromptOptions, Pixels, Platform, PlatformDisplay, PlatformInputHandler, PlatformTextSystem,
- PlatformWindow, Point, PromptLevel, Result, SemanticVersion, SharedString, Size, Task,
- WindowAppearance, WindowOptions, WindowParams,
+ PlatformWindow, Point, PromptLevel, Result, ScreenCaptureSource, SemanticVersion, SharedString,
+ Size, Task, WindowAppearance, WindowOptions, WindowParams,
};
pub(crate) const SCROLL_LINES: f32 = 3.0;
@@ -242,6 +242,14 @@ impl<P: LinuxClient + 'static> Platform for P {
self.displays()
}
+ fn screen_capture_sources(
+ &self,
+ ) -> oneshot::Receiver<Result<Vec<Box<dyn ScreenCaptureSource>>>> {
+ let (mut tx, rx) = oneshot::channel();
+ tx.send(Err(anyhow!("screen capture not implemented"))).ok();
+ rx
+ }
+
fn active_window(&self) -> Option<AnyWindowHandle> {
self.active_window()
}
@@ -4,12 +4,14 @@ mod dispatcher;
mod display;
mod display_link;
mod events;
+mod screen_capture;
#[cfg(not(feature = "macos-blade"))]
mod metal_atlas;
#[cfg(not(feature = "macos-blade"))]
pub mod metal_renderer;
+use media::core_video::CVImageBuffer;
#[cfg(not(feature = "macos-blade"))]
use metal_renderer as renderer;
@@ -49,6 +51,9 @@ pub(crate) use window::*;
#[cfg(feature = "font-kit")]
pub(crate) use text_system::*;
+/// A frame of video captured from a screen.
+pub(crate) type PlatformScreenCaptureFrame = CVImageBuffer;
+
trait BoolExt {
fn to_objc(self) -> BOOL;
}
@@ -1,14 +1,14 @@
use super::{
attributed_string::{NSAttributedString, NSMutableAttributedString},
events::key_to_native,
- BoolExt,
+ renderer, screen_capture, BoolExt,
};
use crate::{
hash, Action, AnyWindowHandle, BackgroundExecutor, ClipboardEntry, ClipboardItem,
ClipboardString, CursorStyle, ForegroundExecutor, Image, ImageFormat, Keymap, MacDispatcher,
MacDisplay, MacWindow, Menu, MenuItem, PathPromptOptions, Platform, PlatformDisplay,
- PlatformTextSystem, PlatformWindow, Result, SemanticVersion, Task, WindowAppearance,
- WindowParams,
+ PlatformTextSystem, PlatformWindow, Result, ScreenCaptureSource, SemanticVersion, Task,
+ WindowAppearance, WindowParams,
};
use anyhow::anyhow;
use block::ConcreteBlock;
@@ -58,8 +58,6 @@ use std::{
};
use strum::IntoEnumIterator;
-use super::renderer;
-
#[allow(non_upper_case_globals)]
const NSUTF8StringEncoding: NSUInteger = 4;
@@ -552,6 +550,12 @@ impl Platform for MacPlatform {
.collect()
}
+ fn screen_capture_sources(
+ &self,
+ ) -> oneshot::Receiver<Result<Vec<Box<dyn ScreenCaptureSource>>>> {
+ screen_capture::get_sources()
+ }
+
fn active_window(&self) -> Option<AnyWindowHandle> {
MacWindow::active_window()
}
@@ -0,0 +1,239 @@
+use crate::{
+ platform::{ScreenCaptureFrame, ScreenCaptureSource, ScreenCaptureStream},
+ px, size, Pixels, Size,
+};
+use anyhow::{anyhow, Result};
+use block::ConcreteBlock;
+use cocoa::{
+ base::{id, nil, YES},
+ foundation::NSArray,
+};
+use core_foundation::base::TCFType;
+use ctor::ctor;
+use futures::channel::oneshot;
+use media::core_media::{CMSampleBuffer, CMSampleBufferRef};
+use metal::NSInteger;
+use objc::{
+ class,
+ declare::ClassDecl,
+ msg_send,
+ runtime::{Class, Object, Sel},
+ sel, sel_impl,
+};
+use std::{cell::RefCell, ffi::c_void, mem, ptr, rc::Rc};
+
+#[derive(Clone)]
+pub struct MacScreenCaptureSource {
+ sc_display: id,
+}
+
+pub struct MacScreenCaptureStream {
+ sc_stream: id,
+ sc_stream_output: id,
+}
+
+#[link(name = "ScreenCaptureKit", kind = "framework")]
+extern "C" {}
+
+static mut DELEGATE_CLASS: *const Class = ptr::null();
+static mut OUTPUT_CLASS: *const Class = ptr::null();
+const FRAME_CALLBACK_IVAR: &str = "frame_callback";
+
+#[allow(non_upper_case_globals)]
+const SCStreamOutputTypeScreen: NSInteger = 0;
+
+impl ScreenCaptureSource for MacScreenCaptureSource {
+ fn resolution(&self) -> Result<Size<Pixels>> {
+ unsafe {
+ let width: i64 = msg_send![self.sc_display, width];
+ let height: i64 = msg_send![self.sc_display, height];
+ Ok(size(px(width as f32), px(height as f32)))
+ }
+ }
+
+ fn stream(
+ &self,
+ frame_callback: Box<dyn Fn(ScreenCaptureFrame)>,
+ ) -> oneshot::Receiver<Result<Box<dyn ScreenCaptureStream>>> {
+ unsafe {
+ let stream: id = msg_send![class!(SCStream), alloc];
+ let filter: id = msg_send![class!(SCContentFilter), alloc];
+ let configuration: id = msg_send![class!(SCStreamConfiguration), alloc];
+ let delegate: id = msg_send![DELEGATE_CLASS, alloc];
+ let output: id = msg_send![OUTPUT_CLASS, alloc];
+
+ let excluded_windows = NSArray::array(nil);
+ let filter: id = msg_send![filter, initWithDisplay:self.sc_display excludingWindows:excluded_windows];
+ let configuration: id = msg_send![configuration, init];
+ let delegate: id = msg_send![delegate, init];
+ let output: id = msg_send![output, init];
+
+ output.as_mut().unwrap().set_ivar(
+ FRAME_CALLBACK_IVAR,
+ Box::into_raw(Box::new(frame_callback)) as *mut c_void,
+ );
+
+ let stream: id = msg_send![stream, initWithFilter:filter configuration:configuration delegate:delegate];
+
+ let (mut tx, rx) = oneshot::channel();
+
+ let mut error: id = nil;
+ let _: () = msg_send![stream, addStreamOutput:output type:SCStreamOutputTypeScreen sampleHandlerQueue:0 error:&mut error as *mut id];
+ if error != nil {
+ let message: id = msg_send![error, localizedDescription];
+ tx.send(Err(anyhow!("failed to add stream output {message:?}")))
+ .ok();
+ return rx;
+ }
+
+ let tx = Rc::new(RefCell::new(Some(tx)));
+ let handler = ConcreteBlock::new({
+ move |error: id| {
+ let result = if error == nil {
+ let stream = MacScreenCaptureStream {
+ sc_stream: stream,
+ sc_stream_output: output,
+ };
+ Ok(Box::new(stream) as Box<dyn ScreenCaptureStream>)
+ } else {
+ let message: id = msg_send![error, localizedDescription];
+ Err(anyhow!("failed to stop screen capture stream {message:?}"))
+ };
+ if let Some(tx) = tx.borrow_mut().take() {
+ tx.send(result).ok();
+ }
+ }
+ });
+ let handler = handler.copy();
+ let _: () = msg_send![stream, startCaptureWithCompletionHandler:handler];
+ rx
+ }
+ }
+}
+
+impl Drop for MacScreenCaptureSource {
+ fn drop(&mut self) {
+ unsafe {
+ let _: () = msg_send![self.sc_display, release];
+ }
+ }
+}
+
+impl ScreenCaptureStream for MacScreenCaptureStream {}
+
+impl Drop for MacScreenCaptureStream {
+ fn drop(&mut self) {
+ unsafe {
+ let mut error: id = nil;
+ let _: () = msg_send![self.sc_stream, removeStreamOutput:self.sc_stream_output type:SCStreamOutputTypeScreen error:&mut error as *mut _];
+ if error != nil {
+ let message: id = msg_send![error, localizedDescription];
+ log::error!("failed to add stream output {message:?}");
+ }
+
+ let handler = ConcreteBlock::new(move |error: id| {
+ if error != nil {
+ let message: id = msg_send![error, localizedDescription];
+ log::error!("failed to stop screen capture stream {message:?}");
+ }
+ });
+ let block = handler.copy();
+ let _: () = msg_send![self.sc_stream, stopCaptureWithCompletionHandler:block];
+ let _: () = msg_send![self.sc_stream, release];
+ let _: () = msg_send![self.sc_stream_output, release];
+ }
+ }
+}
+
+pub(crate) fn get_sources() -> oneshot::Receiver<Result<Vec<Box<dyn ScreenCaptureSource>>>> {
+ unsafe {
+ let (mut tx, rx) = oneshot::channel();
+ let tx = Rc::new(RefCell::new(Some(tx)));
+
+ let block = ConcreteBlock::new(move |shareable_content: id, error: id| {
+ let Some(mut tx) = tx.borrow_mut().take() else {
+ return;
+ };
+ let result = if error == nil {
+ let displays: id = msg_send![shareable_content, displays];
+ let mut result = Vec::new();
+ for i in 0..displays.count() {
+ let display = displays.objectAtIndex(i);
+ let source = MacScreenCaptureSource {
+ sc_display: msg_send![display, retain],
+ };
+ result.push(Box::new(source) as Box<dyn ScreenCaptureSource>);
+ }
+ Ok(result)
+ } else {
+ let msg: id = msg_send![error, localizedDescription];
+ Err(anyhow!("Failed to register: {:?}", msg))
+ };
+ tx.send(result).ok();
+ });
+ let block = block.copy();
+
+ let _: () = msg_send![
+ class!(SCShareableContent),
+ getShareableContentExcludingDesktopWindows:YES
+ onScreenWindowsOnly:YES
+ completionHandler:block];
+ rx
+ }
+}
+
+#[ctor]
+unsafe fn build_classes() {
+ let mut decl = ClassDecl::new("GPUIStreamDelegate", class!(NSObject)).unwrap();
+ decl.add_method(
+ sel!(outputVideoEffectDidStartForStream:),
+ output_video_effect_did_start_for_stream as extern "C" fn(&Object, Sel, id),
+ );
+ decl.add_method(
+ sel!(outputVideoEffectDidStopForStream:),
+ output_video_effect_did_stop_for_stream as extern "C" fn(&Object, Sel, id),
+ );
+ decl.add_method(
+ sel!(stream:didStopWithError:),
+ stream_did_stop_with_error as extern "C" fn(&Object, Sel, id, id),
+ );
+ DELEGATE_CLASS = decl.register();
+
+ let mut decl = ClassDecl::new("GPUIStreamOutput", class!(NSObject)).unwrap();
+ decl.add_method(
+ sel!(stream:didOutputSampleBuffer:ofType:),
+ stream_did_output_sample_buffer_of_type as extern "C" fn(&Object, Sel, id, id, NSInteger),
+ );
+ decl.add_ivar::<*mut c_void>(FRAME_CALLBACK_IVAR);
+
+ OUTPUT_CLASS = decl.register();
+}
+
+extern "C" fn output_video_effect_did_start_for_stream(_this: &Object, _: Sel, _stream: id) {}
+
+extern "C" fn output_video_effect_did_stop_for_stream(_this: &Object, _: Sel, _stream: id) {}
+
+extern "C" fn stream_did_stop_with_error(_this: &Object, _: Sel, _stream: id, _error: id) {}
+
+extern "C" fn stream_did_output_sample_buffer_of_type(
+ this: &Object,
+ _: Sel,
+ _stream: id,
+ sample_buffer: id,
+ buffer_type: NSInteger,
+) {
+ if buffer_type != SCStreamOutputTypeScreen {
+ return;
+ }
+
+ unsafe {
+ let sample_buffer = sample_buffer as CMSampleBufferRef;
+ let sample_buffer = CMSampleBuffer::wrap_under_get_rule(sample_buffer);
+ if let Some(buffer) = sample_buffer.image_buffer() {
+ let callback: Box<Box<dyn Fn(ScreenCaptureFrame)>> =
+ Box::from_raw(*this.get_ivar::<*mut c_void>(FRAME_CALLBACK_IVAR) as *mut _);
+ callback(ScreenCaptureFrame(buffer));
+ mem::forget(callback);
+ }
+ }
+}
@@ -7,3 +7,5 @@ pub(crate) use dispatcher::*;
pub(crate) use display::*;
pub(crate) use platform::*;
pub(crate) use window::*;
+
+pub use platform::TestScreenCaptureSource;
@@ -1,7 +1,7 @@
use crate::{
- AnyWindowHandle, BackgroundExecutor, ClipboardItem, CursorStyle, ForegroundExecutor, Keymap,
- Platform, PlatformDisplay, PlatformTextSystem, Task, TestDisplay, TestWindow, WindowAppearance,
- WindowParams,
+ px, size, AnyWindowHandle, BackgroundExecutor, ClipboardItem, CursorStyle, ForegroundExecutor,
+ Keymap, Platform, PlatformDisplay, PlatformTextSystem, ScreenCaptureFrame, ScreenCaptureSource,
+ ScreenCaptureStream, Task, TestDisplay, TestWindow, WindowAppearance, WindowParams,
};
use anyhow::Result;
use collections::VecDeque;
@@ -31,6 +31,7 @@ pub(crate) struct TestPlatform {
#[cfg(any(target_os = "linux", target_os = "freebsd"))]
current_primary_item: Mutex<Option<ClipboardItem>>,
pub(crate) prompts: RefCell<TestPrompts>,
+ screen_capture_sources: RefCell<Vec<TestScreenCaptureSource>>,
pub opened_url: RefCell<Option<String>>,
pub text_system: Arc<dyn PlatformTextSystem>,
#[cfg(target_os = "windows")]
@@ -38,6 +39,31 @@ pub(crate) struct TestPlatform {
weak: Weak<Self>,
}
+#[derive(Clone)]
+/// A fake screen capture source, used for testing.
+pub struct TestScreenCaptureSource {}
+
+pub struct TestScreenCaptureStream {}
+
+impl ScreenCaptureSource for TestScreenCaptureSource {
+ fn resolution(&self) -> Result<crate::Size<crate::Pixels>> {
+ Ok(size(px(1.), px(1.)))
+ }
+
+ fn stream(
+ &self,
+ _frame_callback: Box<dyn Fn(ScreenCaptureFrame)>,
+ ) -> oneshot::Receiver<Result<Box<dyn ScreenCaptureStream>>> {
+ let (mut tx, rx) = oneshot::channel();
+ let stream = TestScreenCaptureStream {};
+ tx.send(Ok(Box::new(stream) as Box<dyn ScreenCaptureStream>))
+ .ok();
+ rx
+ }
+}
+
+impl ScreenCaptureStream for TestScreenCaptureStream {}
+
#[derive(Default)]
pub(crate) struct TestPrompts {
multiple_choice: VecDeque<oneshot::Sender<usize>>,
@@ -72,6 +98,7 @@ impl TestPlatform {
background_executor: executor,
foreground_executor,
prompts: Default::default(),
+ screen_capture_sources: Default::default(),
active_cursor: Default::default(),
active_display: Rc::new(TestDisplay::new()),
active_window: Default::default(),
@@ -114,6 +141,10 @@ impl TestPlatform {
!self.prompts.borrow().multiple_choice.is_empty()
}
+ pub(crate) fn set_screen_capture_sources(&self, sources: Vec<TestScreenCaptureSource>) {
+ *self.screen_capture_sources.borrow_mut() = sources;
+ }
+
pub(crate) fn prompt(&self, msg: &str, detail: Option<&str>) -> oneshot::Receiver<usize> {
let (tx, rx) = oneshot::channel();
self.background_executor()
@@ -202,6 +233,20 @@ impl Platform for TestPlatform {
Some(self.active_display.clone())
}
+ fn screen_capture_sources(
+ &self,
+ ) -> oneshot::Receiver<Result<Vec<Box<dyn ScreenCaptureSource>>>> {
+ let (mut tx, rx) = oneshot::channel();
+ tx.send(Ok(self
+ .screen_capture_sources
+ .borrow()
+ .iter()
+ .map(|source| Box::new(source.clone()) as Box<dyn ScreenCaptureSource>)
+ .collect()))
+ .ok();
+ rx
+ }
+
fn active_window(&self) -> Option<crate::AnyWindowHandle> {
self.active_window
.borrow()
@@ -330,6 +375,13 @@ impl Platform for TestPlatform {
}
}
+impl TestScreenCaptureSource {
+ /// Create a fake screen capture source, for testing.
+ pub fn new() -> Self {
+ Self {}
+ }
+}
+
#[cfg(target_os = "windows")]
impl Drop for TestPlatform {
fn drop(&mut self) {
@@ -21,3 +21,5 @@ pub(crate) use window::*;
pub(crate) use wrapper::*;
pub(crate) use windows::Win32::Foundation::HWND;
+
+pub(crate) type PlatformScreenCaptureFrame = ();
@@ -325,6 +325,14 @@ impl Platform for WindowsPlatform {
WindowsDisplay::primary_monitor().map(|display| Rc::new(display) as Rc<dyn PlatformDisplay>)
}
+ fn screen_capture_sources(
+ &self,
+ ) -> oneshot::Receiver<Result<Vec<Box<dyn ScreenCaptureSource>>>> {
+ let (mut tx, rx) = oneshot::channel();
+ tx.send(Err(anyhow!("screen capture not implemented"))).ok();
+ rx
+ }
+
fn active_window(&self) -> Option<AnyWindowHandle> {
let active_window_hwnd = unsafe { GetActiveWindow() };
self.try_get_windows_inner_from_hwnd(active_window_hwnd)
@@ -20,7 +20,7 @@ bytes.workspace = true
anyhow.workspace = true
derive_more.workspace = true
futures.workspace = true
-http = "1.1"
+http.workspace = true
log.workspace = true
serde.workspace = true
serde_json.workspace = true
@@ -1,2 +1,2 @@
-[live_kit_client_test]
+[livekit_client_test]
rustflags = ["-C", "link-args=-ObjC"]
@@ -0,0 +1,65 @@
+[package]
+name = "livekit_client"
+version = "0.1.0"
+edition = "2021"
+description = "Logic for using LiveKit with GPUI"
+publish = false
+license = "GPL-3.0-or-later"
+
+[lints]
+workspace = true
+
+[lib]
+path = "src/livekit_client.rs"
+doctest = false
+
+[[example]]
+name = "test_app"
+
+[features]
+no-webrtc = []
+test-support = [
+ "collections/test-support",
+ "gpui/test-support",
+ "nanoid",
+]
+
+[dependencies]
+anyhow.workspace = true
+async-trait.workspace = true
+collections.workspace = true
+cpal = "0.15"
+futures.workspace = true
+gpui.workspace = true
+http_2 = { package = "http", version = "0.2.1" }
+livekit_server.workspace = true
+log.workspace = true
+media.workspace = true
+nanoid = { workspace = true, optional = true}
+parking_lot.workspace = true
+postage.workspace = true
+util.workspace = true
+http_client.workspace = true
+smallvec.workspace = true
+image.workspace = true
+
+[target.'cfg(not(target_os = "windows"))'.dependencies]
+livekit.workspace = true
+
+[target.'cfg(target_os = "macos")'.dependencies]
+core-foundation.workspace = true
+coreaudio-rs = "0.12.1"
+
+[dev-dependencies]
+collections = { workspace = true, features = ["test-support"] }
+gpui = { workspace = true, features = ["test-support"] }
+nanoid.workspace = true
+sha2.workspace = true
+simplelog.workspace = true
+
+[build-dependencies]
+serde.workspace = true
+serde_json.workspace = true
+
+[package.metadata.cargo-machete]
+ignored = ["serde_json"]
@@ -0,0 +1,442 @@
+#![cfg_attr(windows, allow(unused))]
+// TODO: For some reason mac build complains about import of postage::stream::Stream, but removal of
+// it causes compile errors.
+#![cfg_attr(target_os = "macos", allow(unused_imports))]
+
+use gpui::{
+ actions, bounds, div, point,
+ prelude::{FluentBuilder as _, IntoElement},
+ px, rgb, size, AsyncAppContext, Bounds, InteractiveElement, KeyBinding, Menu, MenuItem,
+ ParentElement, Pixels, Render, ScreenCaptureStream, SharedString,
+ StatefulInteractiveElement as _, Styled, Task, View, ViewContext, VisualContext, WindowBounds,
+ WindowHandle, WindowOptions,
+};
+#[cfg(not(target_os = "windows"))]
+use livekit_client::{
+ capture_local_audio_track, capture_local_video_track,
+ id::ParticipantIdentity,
+ options::{TrackPublishOptions, VideoCodec},
+ participant::{Participant, RemoteParticipant},
+ play_remote_audio_track,
+ publication::{LocalTrackPublication, RemoteTrackPublication},
+ track::{LocalTrack, RemoteTrack, RemoteVideoTrack, TrackSource},
+ AudioStream, RemoteVideoTrackView, Room, RoomEvent, RoomOptions,
+};
+#[cfg(not(target_os = "windows"))]
+use postage::stream::Stream;
+
+#[cfg(target_os = "windows")]
+use livekit_client::{
+ participant::{Participant, RemoteParticipant},
+ publication::{LocalTrackPublication, RemoteTrackPublication},
+ track::{LocalTrack, RemoteTrack, RemoteVideoTrack},
+ AudioStream, RemoteVideoTrackView, Room, RoomEvent,
+};
+
+use livekit_server::token::{self, VideoGrant};
+use log::LevelFilter;
+use simplelog::SimpleLogger;
+
+actions!(livekit_client, [Quit]);
+
+#[cfg(windows)]
+fn main() {}
+
+#[cfg(not(windows))]
+fn main() {
+ SimpleLogger::init(LevelFilter::Info, Default::default()).expect("could not initialize logger");
+
+ gpui::App::new().run(|cx| {
+ livekit_client::init(
+ cx.background_executor().dispatcher.clone(),
+ cx.http_client(),
+ );
+
+ #[cfg(any(test, feature = "test-support"))]
+ println!("USING TEST LIVEKIT");
+
+ #[cfg(not(any(test, feature = "test-support")))]
+ println!("USING REAL LIVEKIT");
+
+ cx.activate(true);
+ cx.on_action(quit);
+ cx.bind_keys([KeyBinding::new("cmd-q", Quit, None)]);
+ cx.set_menus(vec![Menu {
+ name: "Zed".into(),
+ items: vec![MenuItem::Action {
+ name: "Quit".into(),
+ action: Box::new(Quit),
+ os_action: None,
+ }],
+ }]);
+
+ let livekit_url = std::env::var("LIVEKIT_URL").unwrap_or("http://localhost:7880".into());
+ let livekit_key = std::env::var("LIVEKIT_KEY").unwrap_or("devkey".into());
+ let livekit_secret = std::env::var("LIVEKIT_SECRET").unwrap_or("secret".into());
+ let height = px(800.);
+ let width = px(800.);
+
+ cx.spawn(|cx| async move {
+ let mut windows = Vec::new();
+ for i in 0..2 {
+ let token = token::create(
+ &livekit_key,
+ &livekit_secret,
+ Some(&format!("test-participant-{i}")),
+ VideoGrant::to_join("test-room"),
+ )
+ .unwrap();
+
+ let bounds = bounds(point(width * i, px(0.0)), size(width, height));
+ let window =
+ LivekitWindow::new(livekit_url.as_str(), token.as_str(), bounds, cx.clone())
+ .await;
+ windows.push(window);
+ }
+ })
+ .detach();
+ });
+}
+
+fn quit(_: &Quit, cx: &mut gpui::AppContext) {
+ cx.quit();
+}
+
+struct LivekitWindow {
+ room: Room,
+ microphone_track: Option<LocalTrackPublication>,
+ screen_share_track: Option<LocalTrackPublication>,
+ microphone_stream: Option<AudioStream>,
+ screen_share_stream: Option<Box<dyn ScreenCaptureStream>>,
+ #[cfg(not(target_os = "windows"))]
+ remote_participants: Vec<(ParticipantIdentity, ParticipantState)>,
+ _events_task: Task<()>,
+}
+
+#[derive(Default)]
+struct ParticipantState {
+ audio_output_stream: Option<(RemoteTrackPublication, AudioStream)>,
+ muted: bool,
+ screen_share_output_view: Option<(RemoteVideoTrack, View<RemoteVideoTrackView>)>,
+ speaking: bool,
+}
+
+#[cfg(not(windows))]
+impl LivekitWindow {
+ async fn new(
+ url: &str,
+ token: &str,
+ bounds: Bounds<Pixels>,
+ cx: AsyncAppContext,
+ ) -> WindowHandle<Self> {
+ let (room, mut events) = Room::connect(url, token, RoomOptions::default())
+ .await
+ .unwrap();
+
+ cx.update(|cx| {
+ cx.open_window(
+ WindowOptions {
+ window_bounds: Some(WindowBounds::Windowed(bounds)),
+ ..Default::default()
+ },
+ |cx| {
+ cx.new_view(|cx| {
+ let _events_task = cx.spawn(|this, mut cx| async move {
+ while let Some(event) = events.recv().await {
+ this.update(&mut cx, |this: &mut LivekitWindow, cx| {
+ this.handle_room_event(event, cx)
+ })
+ .ok();
+ }
+ });
+
+ Self {
+ room,
+ microphone_track: None,
+ microphone_stream: None,
+ screen_share_track: None,
+ screen_share_stream: None,
+ remote_participants: Vec::new(),
+ _events_task,
+ }
+ })
+ },
+ )
+ .unwrap()
+ })
+ .unwrap()
+ }
+
+ fn handle_room_event(&mut self, event: RoomEvent, cx: &mut ViewContext<Self>) {
+ eprintln!("event: {event:?}");
+
+ match event {
+ RoomEvent::TrackUnpublished {
+ publication,
+ participant,
+ } => {
+ let output = self.remote_participant(participant);
+ let unpublish_sid = publication.sid();
+ if output
+ .audio_output_stream
+ .as_ref()
+ .map_or(false, |(track, _)| track.sid() == unpublish_sid)
+ {
+ output.audio_output_stream.take();
+ }
+ if output
+ .screen_share_output_view
+ .as_ref()
+ .map_or(false, |(track, _)| track.sid() == unpublish_sid)
+ {
+ output.screen_share_output_view.take();
+ }
+ cx.notify();
+ }
+
+ RoomEvent::TrackSubscribed {
+ publication,
+ participant,
+ track,
+ } => {
+ let output = self.remote_participant(participant);
+ match track {
+ RemoteTrack::Audio(track) => {
+ output.audio_output_stream = Some((
+ publication.clone(),
+ play_remote_audio_track(&track, cx.background_executor()).unwrap(),
+ ));
+ }
+ RemoteTrack::Video(track) => {
+ output.screen_share_output_view = Some((
+ track.clone(),
+ cx.new_view(|cx| RemoteVideoTrackView::new(track, cx)),
+ ));
+ }
+ }
+ cx.notify();
+ }
+
+ RoomEvent::TrackMuted { participant, .. } => {
+ if let Participant::Remote(participant) = participant {
+ self.remote_participant(participant).muted = true;
+ cx.notify();
+ }
+ }
+
+ RoomEvent::TrackUnmuted { participant, .. } => {
+ if let Participant::Remote(participant) = participant {
+ self.remote_participant(participant).muted = false;
+ cx.notify();
+ }
+ }
+
+ RoomEvent::ActiveSpeakersChanged { speakers } => {
+ for (identity, output) in &mut self.remote_participants {
+ output.speaking = speakers.iter().any(|speaker| {
+ if let Participant::Remote(speaker) = speaker {
+ speaker.identity() == *identity
+ } else {
+ false
+ }
+ });
+ }
+ cx.notify();
+ }
+
+ _ => {}
+ }
+
+ cx.notify();
+ }
+
+ fn remote_participant(&mut self, participant: RemoteParticipant) -> &mut ParticipantState {
+ match self
+ .remote_participants
+ .binary_search_by_key(&&participant.identity(), |row| &row.0)
+ {
+ Ok(ix) => &mut self.remote_participants[ix].1,
+ Err(ix) => {
+ self.remote_participants
+ .insert(ix, (participant.identity(), ParticipantState::default()));
+ &mut self.remote_participants[ix].1
+ }
+ }
+ }
+
+ fn toggle_mute(&mut self, cx: &mut ViewContext<Self>) {
+ if let Some(track) = &self.microphone_track {
+ if track.is_muted() {
+ track.unmute();
+ } else {
+ track.mute();
+ }
+ cx.notify();
+ } else {
+ let participant = self.room.local_participant();
+ cx.spawn(|this, mut cx| async move {
+ let (track, stream) = capture_local_audio_track(cx.background_executor())?.await;
+ let publication = participant
+ .publish_track(
+ LocalTrack::Audio(track),
+ TrackPublishOptions {
+ source: TrackSource::Microphone,
+ ..Default::default()
+ },
+ )
+ .await
+ .unwrap();
+ this.update(&mut cx, |this, cx| {
+ this.microphone_track = Some(publication);
+ this.microphone_stream = Some(stream);
+ cx.notify();
+ })
+ })
+ .detach();
+ }
+ }
+
+ fn toggle_screen_share(&mut self, cx: &mut ViewContext<Self>) {
+ if let Some(track) = self.screen_share_track.take() {
+ self.screen_share_stream.take();
+ let participant = self.room.local_participant();
+ cx.background_executor()
+ .spawn(async move {
+ participant.unpublish_track(&track.sid()).await.unwrap();
+ })
+ .detach();
+ cx.notify();
+ } else {
+ let participant = self.room.local_participant();
+ let sources = cx.screen_capture_sources();
+ cx.spawn(|this, mut cx| async move {
+ let sources = sources.await.unwrap()?;
+ let source = sources.into_iter().next().unwrap();
+ let (track, stream) = capture_local_video_track(&*source).await?;
+ let publication = participant
+ .publish_track(
+ LocalTrack::Video(track),
+ TrackPublishOptions {
+ source: TrackSource::Screenshare,
+ video_codec: VideoCodec::H264,
+ ..Default::default()
+ },
+ )
+ .await
+ .unwrap();
+ this.update(&mut cx, |this, cx| {
+ this.screen_share_track = Some(publication);
+ this.screen_share_stream = Some(stream);
+ cx.notify();
+ })
+ })
+ .detach();
+ }
+ }
+
+ fn toggle_remote_audio_for_participant(
+ &mut self,
+ identity: &ParticipantIdentity,
+ cx: &mut ViewContext<Self>,
+ ) -> Option<()> {
+ let participant = self.remote_participants.iter().find_map(|(id, state)| {
+ if id == identity {
+ Some(state)
+ } else {
+ None
+ }
+ })?;
+ let publication = &participant.audio_output_stream.as_ref()?.0;
+ publication.set_enabled(!publication.is_enabled());
+ cx.notify();
+ Some(())
+ }
+}
+
+#[cfg(not(windows))]
+impl Render for LivekitWindow {
+ fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
+ fn button() -> gpui::Div {
+ div()
+ .w(px(180.0))
+ .h(px(30.0))
+ .px_2()
+ .m_2()
+ .bg(rgb(0x8888ff))
+ }
+
+ div()
+ .bg(rgb(0xffffff))
+ .size_full()
+ .flex()
+ .flex_col()
+ .child(
+ div().bg(rgb(0xffd4a8)).flex().flex_row().children([
+ button()
+ .id("toggle-mute")
+ .child(if let Some(track) = &self.microphone_track {
+ if track.is_muted() {
+ "Unmute"
+ } else {
+ "Mute"
+ }
+ } else {
+ "Publish mic"
+ })
+ .on_click(cx.listener(|this, _, cx| this.toggle_mute(cx))),
+ button()
+ .id("toggle-screen-share")
+ .child(if self.screen_share_track.is_none() {
+ "Share screen"
+ } else {
+ "Unshare screen"
+ })
+ .on_click(cx.listener(|this, _, cx| this.toggle_screen_share(cx))),
+ ]),
+ )
+ .child(
+ div()
+ .id("remote-participants")
+ .overflow_y_scroll()
+ .flex()
+ .flex_col()
+ .flex_grow()
+ .children(self.remote_participants.iter().map(|(identity, state)| {
+ div()
+ .h(px(300.0))
+ .flex()
+ .flex_col()
+ .m_2()
+ .px_2()
+ .bg(rgb(0x8888ff))
+ .child(SharedString::from(if state.speaking {
+ format!("{} (speaking)", &identity.0)
+ } else if state.muted {
+ format!("{} (muted)", &identity.0)
+ } else {
+ identity.0.clone()
+ }))
+ .when_some(state.audio_output_stream.as_ref(), |el, state| {
+ el.child(
+ button()
+ .id(SharedString::from(identity.0.clone()))
+ .child(if state.0.is_enabled() {
+ "Deafen"
+ } else {
+ "Undeafen"
+ })
+ .on_click(cx.listener({
+ let identity = identity.clone();
+ move |this, _, cx| {
+ this.toggle_remote_audio_for_participant(
+ &identity, cx,
+ );
+ }
+ })),
+ )
+ })
+ .children(state.screen_share_output_view.as_ref().map(|e| e.1.clone()))
+ })),
+ )
+ }
+}
@@ -0,0 +1,661 @@
+#![cfg_attr(target_os = "windows", allow(unused))]
+
+mod remote_video_track_view;
+#[cfg(any(test, feature = "test-support", target_os = "windows"))]
+pub mod test;
+
+use anyhow::{anyhow, Context as _, Result};
+use cpal::traits::{DeviceTrait, HostTrait, StreamTrait as _};
+use futures::{io, Stream, StreamExt as _};
+use gpui::{
+ BackgroundExecutor, ScreenCaptureFrame, ScreenCaptureSource, ScreenCaptureStream, Task,
+};
+use parking_lot::Mutex;
+use std::{borrow::Cow, collections::VecDeque, future::Future, pin::Pin, sync::Arc, thread};
+use util::{debug_panic, ResultExt as _};
+#[cfg(not(target_os = "windows"))]
+use webrtc::{
+ audio_frame::AudioFrame,
+ audio_source::{native::NativeAudioSource, AudioSourceOptions, RtcAudioSource},
+ audio_stream::native::NativeAudioStream,
+ video_frame::{VideoBuffer, VideoFrame, VideoRotation},
+ video_source::{native::NativeVideoSource, RtcVideoSource, VideoResolution},
+ video_stream::native::NativeVideoStream,
+};
+
+#[cfg(all(not(any(test, feature = "test-support")), not(target_os = "windows")))]
+use livekit::track::RemoteAudioTrack;
+#[cfg(all(not(any(test, feature = "test-support")), not(target_os = "windows")))]
+pub use livekit::*;
+#[cfg(any(test, feature = "test-support", target_os = "windows"))]
+use test::track::RemoteAudioTrack;
+#[cfg(any(test, feature = "test-support", target_os = "windows"))]
+pub use test::*;
+
+pub use remote_video_track_view::{RemoteVideoTrackView, RemoteVideoTrackViewEvent};
+
+pub enum AudioStream {
+ Input {
+ _thread_handle: std::sync::mpsc::Sender<()>,
+ _transmit_task: Task<()>,
+ },
+ Output {
+ _task: Task<()>,
+ },
+}
+
+struct Dispatcher(Arc<dyn gpui::PlatformDispatcher>);
+
+#[cfg(not(target_os = "windows"))]
+impl livekit::dispatcher::Dispatcher for Dispatcher {
+ fn dispatch(&self, runnable: livekit::dispatcher::Runnable) {
+ self.0.dispatch(runnable, None);
+ }
+
+ fn dispatch_after(
+ &self,
+ duration: std::time::Duration,
+ runnable: livekit::dispatcher::Runnable,
+ ) {
+ self.0.dispatch_after(duration, runnable);
+ }
+}
+
+struct HttpClientAdapter(Arc<dyn http_client::HttpClient>);
+
+fn http_2_status(status: http_client::http::StatusCode) -> http_2::StatusCode {
+ http_2::StatusCode::from_u16(status.as_u16())
+ .expect("valid status code to status code conversion")
+}
+
+#[cfg(not(target_os = "windows"))]
+impl livekit::dispatcher::HttpClient for HttpClientAdapter {
+ fn get(
+ &self,
+ url: &str,
+ ) -> Pin<Box<dyn Future<Output = io::Result<livekit::dispatcher::Response>> + Send>> {
+ let http_client = self.0.clone();
+ let url = url.to_string();
+ Box::pin(async move {
+ let response = http_client
+ .get(&url, http_client::AsyncBody::empty(), false)
+ .await
+ .map_err(io::Error::other)?;
+ Ok(livekit::dispatcher::Response {
+ status: http_2_status(response.status()),
+ body: Box::pin(response.into_body()),
+ })
+ })
+ }
+
+ fn send_async(
+ &self,
+ request: http_2::Request<Vec<u8>>,
+ ) -> Pin<Box<dyn Future<Output = io::Result<livekit::dispatcher::Response>> + Send>> {
+ let http_client = self.0.clone();
+ let mut builder = http_client::http::Request::builder()
+ .method(request.method().as_str())
+ .uri(request.uri().to_string());
+
+ for (key, value) in request.headers().iter() {
+ builder = builder.header(key.as_str(), value.as_bytes());
+ }
+
+ if !request.extensions().is_empty() {
+ debug_panic!(
+ "Livekit sent an HTTP request with a protocol extension that Zed doesn't support!"
+ );
+ }
+
+ let request = builder
+ .body(http_client::AsyncBody::from_bytes(
+ request.into_body().into(),
+ ))
+ .unwrap();
+
+ Box::pin(async move {
+ let response = http_client.send(request).await.map_err(io::Error::other)?;
+ Ok(livekit::dispatcher::Response {
+ status: http_2_status(response.status()),
+ body: Box::pin(response.into_body()),
+ })
+ })
+ }
+}
+
+#[cfg(target_os = "windows")]
+pub fn init(
+ dispatcher: Arc<dyn gpui::PlatformDispatcher>,
+ http_client: Arc<dyn http_client::HttpClient>,
+) {
+}
+
+#[cfg(not(target_os = "windows"))]
+pub fn init(
+ dispatcher: Arc<dyn gpui::PlatformDispatcher>,
+ http_client: Arc<dyn http_client::HttpClient>,
+) {
+ livekit::dispatcher::set_dispatcher(Dispatcher(dispatcher));
+ livekit::dispatcher::set_http_client(HttpClientAdapter(http_client));
+}
+
+#[cfg(not(target_os = "windows"))]
+pub async fn capture_local_video_track(
+ capture_source: &dyn ScreenCaptureSource,
+) -> Result<(track::LocalVideoTrack, Box<dyn ScreenCaptureStream>)> {
+ let resolution = capture_source.resolution()?;
+ let track_source = NativeVideoSource::new(VideoResolution {
+ width: resolution.width.0 as u32,
+ height: resolution.height.0 as u32,
+ });
+
+ let capture_stream = capture_source
+ .stream({
+ let track_source = track_source.clone();
+ Box::new(move |frame| {
+ if let Some(buffer) = video_frame_buffer_to_webrtc(frame) {
+ track_source.capture_frame(&VideoFrame {
+ rotation: VideoRotation::VideoRotation0,
+ timestamp_us: 0,
+ buffer,
+ });
+ }
+ })
+ })
+ .await??;
+
+ Ok((
+ track::LocalVideoTrack::create_video_track(
+ "screen share",
+ RtcVideoSource::Native(track_source),
+ ),
+ capture_stream,
+ ))
+}
+
+#[cfg(not(target_os = "windows"))]
+pub fn capture_local_audio_track(
+ background_executor: &BackgroundExecutor,
+) -> Result<Task<(track::LocalAudioTrack, AudioStream)>> {
+ use util::maybe;
+
+ let (frame_tx, mut frame_rx) = futures::channel::mpsc::unbounded();
+ let (thread_handle, thread_kill_rx) = std::sync::mpsc::channel::<()>();
+ let sample_rate;
+ let channels;
+
+ if cfg!(any(test, feature = "test-support")) {
+ sample_rate = 2;
+ channels = 1;
+ } else {
+ let (device, config) = default_device(true)?;
+ sample_rate = config.sample_rate().0;
+ channels = config.channels() as u32;
+ thread::spawn(move || {
+ maybe!({
+ if let Some(name) = device.name().ok() {
+ log::info!("Using microphone: {}", name)
+ } else {
+ log::info!("Using microphone: <unknown>");
+ }
+
+ let stream = device
+ .build_input_stream_raw(
+ &config.config(),
+ cpal::SampleFormat::I16,
+ move |data, _: &_| {
+ frame_tx
+ .unbounded_send(AudioFrame {
+ data: Cow::Owned(data.as_slice::<i16>().unwrap().to_vec()),
+ sample_rate,
+ num_channels: channels,
+ samples_per_channel: data.len() as u32 / channels,
+ })
+ .ok();
+ },
+ |err| log::error!("error capturing audio track: {:?}", err),
+ None,
+ )
+ .context("failed to build input stream")?;
+
+ stream.play()?;
+ // Keep the thread alive and holding onto the `stream`
+ thread_kill_rx.recv().ok();
+ anyhow::Ok(Some(()))
+ })
+ .log_err();
+ });
+ }
+
+ Ok(background_executor.spawn({
+ let background_executor = background_executor.clone();
+ async move {
+ let source = NativeAudioSource::new(
+ AudioSourceOptions {
+ echo_cancellation: true,
+ noise_suppression: true,
+ auto_gain_control: true,
+ },
+ sample_rate,
+ channels,
+ 100,
+ );
+ let transmit_task = background_executor.spawn({
+ let source = source.clone();
+ async move {
+ while let Some(frame) = frame_rx.next().await {
+ source.capture_frame(&frame).await.log_err();
+ }
+ }
+ });
+
+ let track = track::LocalAudioTrack::create_audio_track(
+ "microphone",
+ RtcAudioSource::Native(source),
+ );
+
+ (
+ track,
+ AudioStream::Input {
+ _thread_handle: thread_handle,
+ _transmit_task: transmit_task,
+ },
+ )
+ }
+ }))
+}
+
+#[cfg(not(target_os = "windows"))]
+pub fn play_remote_audio_track(
+ track: &RemoteAudioTrack,
+ background_executor: &BackgroundExecutor,
+) -> Result<AudioStream> {
+ let track = track.clone();
+ // We track device changes in our output because Livekit has a resampler built in,
+ // and it's easy to create a new native audio stream when the device changes.
+ if cfg!(any(test, feature = "test-support")) {
+ Ok(AudioStream::Output {
+ _task: background_executor.spawn(async {}),
+ })
+ } else {
+ let mut default_change_listener = DeviceChangeListener::new(false)?;
+ let (output_device, output_config) = default_device(false)?;
+
+ let _task = background_executor.spawn({
+ let background_executor = background_executor.clone();
+ async move {
+ let (mut _receive_task, mut _thread) =
+ start_output_stream(output_config, output_device, &track, &background_executor);
+
+ while let Some(_) = default_change_listener.next().await {
+ let Some((output_device, output_config)) = get_default_output().log_err()
+ else {
+ continue;
+ };
+
+ if let Ok(name) = output_device.name() {
+ log::info!("Using speaker: {}", name)
+ } else {
+ log::info!("Using speaker: <unknown>")
+ }
+
+ (_receive_task, _thread) = start_output_stream(
+ output_config,
+ output_device,
+ &track,
+ &background_executor,
+ );
+ }
+
+ futures::future::pending::<()>().await;
+ }
+ });
+
+ Ok(AudioStream::Output { _task })
+ }
+}
+
+fn default_device(input: bool) -> anyhow::Result<(cpal::Device, cpal::SupportedStreamConfig)> {
+ let device;
+ let config;
+ if input {
+ device = cpal::default_host()
+ .default_input_device()
+ .ok_or_else(|| anyhow!("no audio input device available"))?;
+ config = device
+ .default_input_config()
+ .context("failed to get default input config")?;
+ } else {
+ device = cpal::default_host()
+ .default_output_device()
+ .ok_or_else(|| anyhow!("no audio output device available"))?;
+ config = device
+ .default_output_config()
+ .context("failed to get default output config")?;
+ }
+ Ok((device, config))
+}
+
+#[cfg(not(target_os = "windows"))]
+fn get_default_output() -> anyhow::Result<(cpal::Device, cpal::SupportedStreamConfig)> {
+ let host = cpal::default_host();
+ let output_device = host
+ .default_output_device()
+ .context("failed to read default output device")?;
+ let output_config = output_device.default_output_config()?;
+ Ok((output_device, output_config))
+}
+
+#[cfg(not(target_os = "windows"))]
+fn start_output_stream(
+ output_config: cpal::SupportedStreamConfig,
+ output_device: cpal::Device,
+ track: &track::RemoteAudioTrack,
+ background_executor: &BackgroundExecutor,
+) -> (Task<()>, std::sync::mpsc::Sender<()>) {
+ let buffer = Arc::new(Mutex::new(VecDeque::<i16>::new()));
+ let sample_rate = output_config.sample_rate();
+
+ let mut stream = NativeAudioStream::new(
+ track.rtc_track(),
+ sample_rate.0 as i32,
+ output_config.channels() as i32,
+ );
+
+ let receive_task = background_executor.spawn({
+ let buffer = buffer.clone();
+ async move {
+ const MS_OF_BUFFER: u32 = 100;
+ const MS_IN_SEC: u32 = 1000;
+ while let Some(frame) = stream.next().await {
+ let frame_size = frame.samples_per_channel * frame.num_channels;
+ debug_assert!(frame.data.len() == frame_size as usize);
+
+ let buffer_size =
+ ((frame.sample_rate * frame.num_channels) / MS_IN_SEC * MS_OF_BUFFER) as usize;
+
+ let mut buffer = buffer.lock();
+ let new_size = buffer.len() + frame.data.len();
+ if new_size > buffer_size {
+ let overflow = new_size - buffer_size;
+ buffer.drain(0..overflow);
+ }
+
+ buffer.extend(frame.data.iter());
+ }
+ }
+ });
+
+ // The _output_stream needs to be on it's own thread because it's !Send
+ // and we experienced a deadlock when it's created on the main thread.
+ let (thread, end_on_drop_rx) = std::sync::mpsc::channel::<()>();
+ thread::spawn(move || {
+ if cfg!(any(test, feature = "test-support")) {
+ // Can't play audio in tests
+ return;
+ }
+
+ let output_stream = output_device.build_output_stream(
+ &output_config.config(),
+ {
+ let buffer = buffer.clone();
+ move |data, _info| {
+ let mut buffer = buffer.lock();
+ if buffer.len() < data.len() {
+ // Instead of partially filling a buffer, output silence. If a partial
+ // buffer was outputted then this could lead to a perpetual state of
+ // outputting partial buffers as it never gets filled enough for a full
+ // frame.
+ data.fill(0);
+ } else {
+ // SAFETY: We know that buffer has at least data.len() values in it.
+ // because we just checked
+ let mut drain = buffer.drain(..data.len());
+ data.fill_with(|| unsafe { drain.next().unwrap_unchecked() });
+ }
+ }
+ },
+ |error| log::error!("error playing audio track: {:?}", error),
+ None,
+ );
+
+ let Some(output_stream) = output_stream.log_err() else {
+ return;
+ };
+
+ output_stream.play().log_err();
+ // Block forever to keep the output stream alive
+ end_on_drop_rx.recv().ok();
+ });
+
+ (receive_task, thread)
+}
+
+#[cfg(target_os = "windows")]
+pub fn play_remote_video_track(
+ track: &track::RemoteVideoTrack,
+) -> impl Stream<Item = RemoteVideoFrame> {
+ futures::stream::empty()
+}
+
+#[cfg(not(target_os = "windows"))]
+pub fn play_remote_video_track(
+ track: &track::RemoteVideoTrack,
+) -> impl Stream<Item = RemoteVideoFrame> {
+ NativeVideoStream::new(track.rtc_track())
+ .filter_map(|frame| async move { video_frame_buffer_from_webrtc(frame.buffer) })
+}
+
+#[cfg(target_os = "macos")]
+pub type RemoteVideoFrame = media::core_video::CVImageBuffer;
+
+#[cfg(target_os = "macos")]
+fn video_frame_buffer_from_webrtc(buffer: Box<dyn VideoBuffer>) -> Option<RemoteVideoFrame> {
+ use core_foundation::base::TCFType as _;
+ use media::core_video::CVImageBuffer;
+
+ let buffer = buffer.as_native()?;
+ let pixel_buffer = buffer.get_cv_pixel_buffer();
+ if pixel_buffer.is_null() {
+ return None;
+ }
+
+ unsafe { Some(CVImageBuffer::wrap_under_get_rule(pixel_buffer as _)) }
+}
+
+#[cfg(not(target_os = "macos"))]
+pub type RemoteVideoFrame = Arc<gpui::RenderImage>;
+
+#[cfg(not(any(target_os = "macos", target_os = "windows")))]
+fn video_frame_buffer_from_webrtc(buffer: Box<dyn VideoBuffer>) -> Option<RemoteVideoFrame> {
+ use gpui::RenderImage;
+ use image::{Frame, RgbaImage};
+ use livekit::webrtc::prelude::VideoFormatType;
+ use smallvec::SmallVec;
+ use std::alloc::{alloc, Layout};
+
+ let width = buffer.width();
+ let height = buffer.height();
+ let stride = width * 4;
+ let byte_len = (stride * height) as usize;
+ let argb_image = unsafe {
+ // Motivation for this unsafe code is to avoid initializing the frame data, since to_argb
+ // will write all bytes anyway.
+ let start_ptr = alloc(Layout::array::<u8>(byte_len).log_err()?);
+ if start_ptr.is_null() {
+ return None;
+ }
+ let bgra_frame_slice = std::slice::from_raw_parts_mut(start_ptr, byte_len);
+ buffer.to_argb(
+ VideoFormatType::ARGB, // For some reason, this displays correctly while RGBA (the correct format) does not
+ bgra_frame_slice,
+ stride,
+ width as i32,
+ height as i32,
+ );
+ Vec::from_raw_parts(start_ptr, byte_len, byte_len)
+ };
+
+ Some(Arc::new(RenderImage::new(SmallVec::from_elem(
+ Frame::new(
+ RgbaImage::from_raw(width, height, argb_image)
+ .with_context(|| "Bug: not enough bytes allocated for image.")
+ .log_err()?,
+ ),
+ 1,
+ ))))
+}
+
+#[cfg(target_os = "macos")]
+fn video_frame_buffer_to_webrtc(frame: ScreenCaptureFrame) -> Option<impl AsRef<dyn VideoBuffer>> {
+ use core_foundation::base::TCFType as _;
+
+ let pixel_buffer = frame.0.as_concrete_TypeRef();
+ std::mem::forget(frame.0);
+ unsafe {
+ Some(webrtc::video_frame::native::NativeBuffer::from_cv_pixel_buffer(pixel_buffer as _))
+ }
+}
+
+#[cfg(not(any(target_os = "macos", target_os = "windows")))]
+fn video_frame_buffer_to_webrtc(_frame: ScreenCaptureFrame) -> Option<impl AsRef<dyn VideoBuffer>> {
+ None as Option<Box<dyn VideoBuffer>>
+}
+
+trait DeviceChangeListenerApi: Stream<Item = ()> + Sized {
+ fn new(input: bool) -> Result<Self>;
+}
+
+#[cfg(target_os = "macos")]
+mod macos {
+
+ use coreaudio::sys::{
+ kAudioHardwarePropertyDefaultInputDevice, kAudioHardwarePropertyDefaultOutputDevice,
+ kAudioObjectPropertyElementMaster, kAudioObjectPropertyScopeGlobal,
+ kAudioObjectSystemObject, AudioObjectAddPropertyListener, AudioObjectID,
+ AudioObjectPropertyAddress, AudioObjectRemovePropertyListener, OSStatus,
+ };
+ use futures::{channel::mpsc::UnboundedReceiver, StreamExt};
+
+ use crate::DeviceChangeListenerApi;
+
+ /// Implementation from: https://github.com/zed-industries/cpal/blob/fd8bc2fd39f1f5fdee5a0690656caff9a26d9d50/src/host/coreaudio/macos/property_listener.rs#L15
+ pub struct CoreAudioDefaultDeviceChangeListener {
+ rx: UnboundedReceiver<()>,
+ callback: Box<PropertyListenerCallbackWrapper>,
+ input: bool,
+ }
+
+ trait _AssertSend: Send {}
+ impl _AssertSend for CoreAudioDefaultDeviceChangeListener {}
+
+ struct PropertyListenerCallbackWrapper(Box<dyn FnMut() + Send>);
+
+ unsafe extern "C" fn property_listener_handler_shim(
+ _: AudioObjectID,
+ _: u32,
+ _: *const AudioObjectPropertyAddress,
+ callback: *mut ::std::os::raw::c_void,
+ ) -> OSStatus {
+ let wrapper = callback as *mut PropertyListenerCallbackWrapper;
+ (*wrapper).0();
+ 0
+ }
+
+ impl DeviceChangeListenerApi for CoreAudioDefaultDeviceChangeListener {
+ fn new(input: bool) -> gpui::Result<Self> {
+ let (tx, rx) = futures::channel::mpsc::unbounded();
+
+ let callback = Box::new(PropertyListenerCallbackWrapper(Box::new(move || {
+ tx.unbounded_send(()).ok();
+ })));
+
+ unsafe {
+ coreaudio::Error::from_os_status(AudioObjectAddPropertyListener(
+ kAudioObjectSystemObject,
+ &AudioObjectPropertyAddress {
+ mSelector: if input {
+ kAudioHardwarePropertyDefaultInputDevice
+ } else {
+ kAudioHardwarePropertyDefaultOutputDevice
+ },
+ mScope: kAudioObjectPropertyScopeGlobal,
+ mElement: kAudioObjectPropertyElementMaster,
+ },
+ Some(property_listener_handler_shim),
+ &*callback as *const _ as *mut _,
+ ))?;
+ }
+
+ Ok(Self {
+ rx,
+ callback,
+ input,
+ })
+ }
+ }
+
+ impl Drop for CoreAudioDefaultDeviceChangeListener {
+ fn drop(&mut self) {
+ unsafe {
+ AudioObjectRemovePropertyListener(
+ kAudioObjectSystemObject,
+ &AudioObjectPropertyAddress {
+ mSelector: if self.input {
+ kAudioHardwarePropertyDefaultInputDevice
+ } else {
+ kAudioHardwarePropertyDefaultOutputDevice
+ },
+ mScope: kAudioObjectPropertyScopeGlobal,
+ mElement: kAudioObjectPropertyElementMaster,
+ },
+ Some(property_listener_handler_shim),
+ &*self.callback as *const _ as *mut _,
+ );
+ }
+ }
+ }
+
+ impl futures::Stream for CoreAudioDefaultDeviceChangeListener {
+ type Item = ();
+
+ fn poll_next(
+ mut self: std::pin::Pin<&mut Self>,
+ cx: &mut std::task::Context<'_>,
+ ) -> std::task::Poll<Option<Self::Item>> {
+ self.rx.poll_next_unpin(cx)
+ }
+ }
+}
+
+#[cfg(target_os = "macos")]
+type DeviceChangeListener = macos::CoreAudioDefaultDeviceChangeListener;
+
+#[cfg(not(target_os = "macos"))]
+mod noop_change_listener {
+ use std::task::Poll;
+
+ use crate::DeviceChangeListenerApi;
+
+ pub struct NoopOutputDeviceChangelistener {}
+
+ impl DeviceChangeListenerApi for NoopOutputDeviceChangelistener {
+ fn new(_input: bool) -> anyhow::Result<Self> {
+ Ok(NoopOutputDeviceChangelistener {})
+ }
+ }
+
+ impl futures::Stream for NoopOutputDeviceChangelistener {
+ type Item = ();
+
+ fn poll_next(
+ self: std::pin::Pin<&mut Self>,
+ _cx: &mut std::task::Context<'_>,
+ ) -> Poll<Option<Self::Item>> {
+ Poll::Pending
+ }
+ }
+}
+
+#[cfg(not(target_os = "macos"))]
+type DeviceChangeListener = noop_change_listener::NoopOutputDeviceChangelistener;
@@ -0,0 +1,99 @@
+use crate::track::RemoteVideoTrack;
+use anyhow::Result;
+use futures::StreamExt as _;
+use gpui::{Empty, EventEmitter, IntoElement, Render, Task, View, ViewContext, VisualContext as _};
+
+pub struct RemoteVideoTrackView {
+ track: RemoteVideoTrack,
+ latest_frame: Option<crate::RemoteVideoFrame>,
+ #[cfg(not(target_os = "macos"))]
+ current_rendered_frame: Option<crate::RemoteVideoFrame>,
+ #[cfg(not(target_os = "macos"))]
+ previous_rendered_frame: Option<crate::RemoteVideoFrame>,
+ _maintain_frame: Task<Result<()>>,
+}
+
+#[derive(Debug)]
+pub enum RemoteVideoTrackViewEvent {
+ Close,
+}
+
+impl RemoteVideoTrackView {
+ pub fn new(track: RemoteVideoTrack, cx: &mut ViewContext<Self>) -> Self {
+ cx.focus_handle();
+ let frames = super::play_remote_video_track(&track);
+
+ Self {
+ track,
+ latest_frame: None,
+ _maintain_frame: cx.spawn(|this, mut cx| async move {
+ futures::pin_mut!(frames);
+ while let Some(frame) = frames.next().await {
+ this.update(&mut cx, |this, cx| {
+ this.latest_frame = Some(frame);
+ cx.notify();
+ })?;
+ }
+ this.update(&mut cx, |_this, cx| {
+ #[cfg(not(target_os = "macos"))]
+ {
+ use util::ResultExt as _;
+ if let Some(frame) = _this.previous_rendered_frame.take() {
+ cx.window_context().drop_image(frame).log_err();
+ }
+ // TODO(mgsloan): This might leak the last image of the screenshare if
+ // render is called after the screenshare ends.
+ if let Some(frame) = _this.current_rendered_frame.take() {
+ cx.window_context().drop_image(frame).log_err();
+ }
+ }
+ cx.emit(RemoteVideoTrackViewEvent::Close)
+ })?;
+ Ok(())
+ }),
+ #[cfg(not(target_os = "macos"))]
+ current_rendered_frame: None,
+ #[cfg(not(target_os = "macos"))]
+ previous_rendered_frame: None,
+ }
+ }
+
+ pub fn clone(&self, cx: &mut ViewContext<Self>) -> View<Self> {
+ cx.new_view(|cx| Self::new(self.track.clone(), cx))
+ }
+}
+
+impl EventEmitter<RemoteVideoTrackViewEvent> for RemoteVideoTrackView {}
+
+impl Render for RemoteVideoTrackView {
+ fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
+ #[cfg(target_os = "macos")]
+ if let Some(latest_frame) = &self.latest_frame {
+ use gpui::Styled as _;
+ return gpui::surface(latest_frame.clone())
+ .size_full()
+ .into_any_element();
+ }
+
+ #[cfg(not(target_os = "macos"))]
+ if let Some(latest_frame) = &self.latest_frame {
+ use gpui::Styled as _;
+ if let Some(current_rendered_frame) = self.current_rendered_frame.take() {
+ if let Some(frame) = self.previous_rendered_frame.take() {
+ // Only drop the frame if it's not also the current frame.
+ if frame.id != current_rendered_frame.id {
+ use util::ResultExt as _;
+ _cx.window_context().drop_image(frame).log_err();
+ }
+ }
+ self.previous_rendered_frame = Some(current_rendered_frame)
+ }
+ self.current_rendered_frame = Some(latest_frame.clone());
+ return gpui::img(latest_frame.clone())
+ .size_full()
+ .into_any_element();
+ }
+
+ Empty.into_any_element()
+ }
+}
@@ -0,0 +1,825 @@
+pub mod participant;
+pub mod publication;
+pub mod track;
+
+#[cfg(not(windows))]
+pub mod webrtc;
+
+#[cfg(not(windows))]
+use self::id::*;
+use self::{participant::*, publication::*, track::*};
+use anyhow::{anyhow, Context, Result};
+use async_trait::async_trait;
+use collections::{btree_map::Entry as BTreeEntry, hash_map::Entry, BTreeMap, HashMap, HashSet};
+use gpui::BackgroundExecutor;
+#[cfg(not(windows))]
+use livekit::options::TrackPublishOptions;
+use livekit_server::{proto, token};
+use parking_lot::Mutex;
+use postage::{mpsc, sink::Sink};
+use std::sync::{
+ atomic::{AtomicBool, Ordering::SeqCst},
+ Arc, Weak,
+};
+
+#[cfg(not(windows))]
+pub use livekit::{id, options, ConnectionState, DisconnectReason, RoomOptions};
+
+static SERVERS: Mutex<BTreeMap<String, Arc<TestServer>>> = Mutex::new(BTreeMap::new());
+
+pub struct TestServer {
+ pub url: String,
+ pub api_key: String,
+ pub secret_key: String,
+ #[cfg(not(target_os = "windows"))]
+ rooms: Mutex<HashMap<String, TestServerRoom>>,
+ executor: BackgroundExecutor,
+}
+
+#[cfg(not(target_os = "windows"))]
+impl TestServer {
+ pub fn create(
+ url: String,
+ api_key: String,
+ secret_key: String,
+ executor: BackgroundExecutor,
+ ) -> Result<Arc<TestServer>> {
+ let mut servers = SERVERS.lock();
+ if let BTreeEntry::Vacant(e) = servers.entry(url.clone()) {
+ let server = Arc::new(TestServer {
+ url,
+ api_key,
+ secret_key,
+ rooms: Default::default(),
+ executor,
+ });
+ e.insert(server.clone());
+ Ok(server)
+ } else {
+ Err(anyhow!("a server with url {:?} already exists", url))
+ }
+ }
+
+ fn get(url: &str) -> Result<Arc<TestServer>> {
+ Ok(SERVERS
+ .lock()
+ .get(url)
+ .ok_or_else(|| anyhow!("no server found for url"))?
+ .clone())
+ }
+
+ pub fn teardown(&self) -> Result<()> {
+ SERVERS
+ .lock()
+ .remove(&self.url)
+ .ok_or_else(|| anyhow!("server with url {:?} does not exist", self.url))?;
+ Ok(())
+ }
+
+ pub fn create_api_client(&self) -> TestApiClient {
+ TestApiClient {
+ url: self.url.clone(),
+ }
+ }
+
+ pub async fn create_room(&self, room: String) -> Result<()> {
+ self.executor.simulate_random_delay().await;
+
+ let mut server_rooms = self.rooms.lock();
+ if let Entry::Vacant(e) = server_rooms.entry(room.clone()) {
+ e.insert(Default::default());
+ Ok(())
+ } else {
+ Err(anyhow!("room {:?} already exists", room))
+ }
+ }
+
+ async fn delete_room(&self, room: String) -> Result<()> {
+ self.executor.simulate_random_delay().await;
+
+ let mut server_rooms = self.rooms.lock();
+ server_rooms
+ .remove(&room)
+ .ok_or_else(|| anyhow!("room {:?} does not exist", room))?;
+ Ok(())
+ }
+
+ async fn join_room(&self, token: String, client_room: Room) -> Result<ParticipantIdentity> {
+ self.executor.simulate_random_delay().await;
+
+ let claims = livekit_server::token::validate(&token, &self.secret_key)?;
+ let identity = ParticipantIdentity(claims.sub.unwrap().to_string());
+ let room_name = claims.video.room.unwrap();
+ let mut server_rooms = self.rooms.lock();
+ let room = (*server_rooms).entry(room_name.to_string()).or_default();
+
+ if let Entry::Vacant(e) = room.client_rooms.entry(identity.clone()) {
+ for server_track in &room.video_tracks {
+ let track = RemoteTrack::Video(RemoteVideoTrack {
+ server_track: server_track.clone(),
+ _room: client_room.downgrade(),
+ });
+ client_room
+ .0
+ .lock()
+ .updates_tx
+ .blocking_send(RoomEvent::TrackSubscribed {
+ track: track.clone(),
+ publication: RemoteTrackPublication {
+ sid: server_track.sid.clone(),
+ room: client_room.downgrade(),
+ track,
+ },
+ participant: RemoteParticipant {
+ room: client_room.downgrade(),
+ identity: server_track.publisher_id.clone(),
+ },
+ })
+ .unwrap();
+ }
+ for server_track in &room.audio_tracks {
+ let track = RemoteTrack::Audio(RemoteAudioTrack {
+ server_track: server_track.clone(),
+ room: client_room.downgrade(),
+ });
+ client_room
+ .0
+ .lock()
+ .updates_tx
+ .blocking_send(RoomEvent::TrackSubscribed {
+ track: track.clone(),
+ publication: RemoteTrackPublication {
+ sid: server_track.sid.clone(),
+ room: client_room.downgrade(),
+ track,
+ },
+ participant: RemoteParticipant {
+ room: client_room.downgrade(),
+ identity: server_track.publisher_id.clone(),
+ },
+ })
+ .unwrap();
+ }
+ e.insert(client_room);
+ Ok(identity)
+ } else {
+ Err(anyhow!(
+ "{:?} attempted to join room {:?} twice",
+ identity,
+ room_name
+ ))
+ }
+ }
+
+ async fn leave_room(&self, token: String) -> Result<()> {
+ self.executor.simulate_random_delay().await;
+
+ let claims = livekit_server::token::validate(&token, &self.secret_key)?;
+ let identity = ParticipantIdentity(claims.sub.unwrap().to_string());
+ let room_name = claims.video.room.unwrap();
+ let mut server_rooms = self.rooms.lock();
+ let room = server_rooms
+ .get_mut(&*room_name)
+ .ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
+ room.client_rooms.remove(&identity).ok_or_else(|| {
+ anyhow!(
+ "{:?} attempted to leave room {:?} before joining it",
+ identity,
+ room_name
+ )
+ })?;
+ Ok(())
+ }
+
+ fn remote_participants(
+ &self,
+ token: String,
+ ) -> Result<HashMap<ParticipantIdentity, RemoteParticipant>> {
+ let claims = livekit_server::token::validate(&token, &self.secret_key)?;
+ let local_identity = ParticipantIdentity(claims.sub.unwrap().to_string());
+ let room_name = claims.video.room.unwrap().to_string();
+
+ if let Some(server_room) = self.rooms.lock().get(&room_name) {
+ let room = server_room
+ .client_rooms
+ .get(&local_identity)
+ .unwrap()
+ .downgrade();
+ Ok(server_room
+ .client_rooms
+ .iter()
+ .filter(|(identity, _)| *identity != &local_identity)
+ .map(|(identity, _)| {
+ (
+ identity.clone(),
+ RemoteParticipant {
+ room: room.clone(),
+ identity: identity.clone(),
+ },
+ )
+ })
+ .collect())
+ } else {
+ Ok(Default::default())
+ }
+ }
+
+ async fn remove_participant(
+ &self,
+ room_name: String,
+ identity: ParticipantIdentity,
+ ) -> Result<()> {
+ self.executor.simulate_random_delay().await;
+
+ let mut server_rooms = self.rooms.lock();
+ let room = server_rooms
+ .get_mut(&room_name)
+ .ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
+ room.client_rooms.remove(&identity).ok_or_else(|| {
+ anyhow!(
+ "participant {:?} did not join room {:?}",
+ identity,
+ room_name
+ )
+ })?;
+ Ok(())
+ }
+
+ async fn update_participant(
+ &self,
+ room_name: String,
+ identity: String,
+ permission: proto::ParticipantPermission,
+ ) -> Result<()> {
+ self.executor.simulate_random_delay().await;
+
+ let mut server_rooms = self.rooms.lock();
+ let room = server_rooms
+ .get_mut(&room_name)
+ .ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
+ room.participant_permissions
+ .insert(ParticipantIdentity(identity), permission);
+ Ok(())
+ }
+
+ pub async fn disconnect_client(&self, client_identity: String) {
+ let client_identity = ParticipantIdentity(client_identity);
+
+ self.executor.simulate_random_delay().await;
+
+ let mut server_rooms = self.rooms.lock();
+ for room in server_rooms.values_mut() {
+ if let Some(room) = room.client_rooms.remove(&client_identity) {
+ let mut room = room.0.lock();
+ room.connection_state = ConnectionState::Disconnected;
+ room.updates_tx
+ .blocking_send(RoomEvent::Disconnected {
+ reason: DisconnectReason::SignalClose,
+ })
+ .ok();
+ }
+ }
+ }
+
+ async fn publish_video_track(
+ &self,
+ token: String,
+ _local_track: LocalVideoTrack,
+ ) -> Result<TrackSid> {
+ self.executor.simulate_random_delay().await;
+
+ let claims = livekit_server::token::validate(&token, &self.secret_key)?;
+ let identity = ParticipantIdentity(claims.sub.unwrap().to_string());
+ let room_name = claims.video.room.unwrap();
+
+ let mut server_rooms = self.rooms.lock();
+ let room = server_rooms
+ .get_mut(&*room_name)
+ .ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
+
+ let can_publish = room
+ .participant_permissions
+ .get(&identity)
+ .map(|permission| permission.can_publish)
+ .or(claims.video.can_publish)
+ .unwrap_or(true);
+
+ if !can_publish {
+ return Err(anyhow!("user is not allowed to publish"));
+ }
+
+ let sid: TrackSid = format!("TR_{}", nanoid::nanoid!(17)).try_into().unwrap();
+ let server_track = Arc::new(TestServerVideoTrack {
+ sid: sid.clone(),
+ publisher_id: identity.clone(),
+ });
+
+ room.video_tracks.push(server_track.clone());
+
+ for (room_identity, client_room) in &room.client_rooms {
+ if *room_identity != identity {
+ let track = RemoteTrack::Video(RemoteVideoTrack {
+ server_track: server_track.clone(),
+ _room: client_room.downgrade(),
+ });
+ let publication = RemoteTrackPublication {
+ sid: sid.clone(),
+ room: client_room.downgrade(),
+ track: track.clone(),
+ };
+ let participant = RemoteParticipant {
+ identity: identity.clone(),
+ room: client_room.downgrade(),
+ };
+ client_room
+ .0
+ .lock()
+ .updates_tx
+ .blocking_send(RoomEvent::TrackSubscribed {
+ track,
+ publication,
+ participant,
+ })
+ .unwrap();
+ }
+ }
+
+ Ok(sid)
+ }
+
+ async fn publish_audio_track(
+ &self,
+ token: String,
+ _local_track: &LocalAudioTrack,
+ ) -> Result<TrackSid> {
+ self.executor.simulate_random_delay().await;
+
+ let claims = livekit_server::token::validate(&token, &self.secret_key)?;
+ let identity = ParticipantIdentity(claims.sub.unwrap().to_string());
+ let room_name = claims.video.room.unwrap();
+
+ let mut server_rooms = self.rooms.lock();
+ let room = server_rooms
+ .get_mut(&*room_name)
+ .ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
+
+ let can_publish = room
+ .participant_permissions
+ .get(&identity)
+ .map(|permission| permission.can_publish)
+ .or(claims.video.can_publish)
+ .unwrap_or(true);
+
+ if !can_publish {
+ return Err(anyhow!("user is not allowed to publish"));
+ }
+
+ let sid: TrackSid = format!("TR_{}", nanoid::nanoid!(17)).try_into().unwrap();
+ let server_track = Arc::new(TestServerAudioTrack {
+ sid: sid.clone(),
+ publisher_id: identity.clone(),
+ muted: AtomicBool::new(false),
+ });
+
+ room.audio_tracks.push(server_track.clone());
+
+ for (room_identity, client_room) in &room.client_rooms {
+ if *room_identity != identity {
+ let track = RemoteTrack::Audio(RemoteAudioTrack {
+ server_track: server_track.clone(),
+ room: client_room.downgrade(),
+ });
+ let publication = RemoteTrackPublication {
+ sid: sid.clone(),
+ room: client_room.downgrade(),
+ track: track.clone(),
+ };
+ let participant = RemoteParticipant {
+ identity: identity.clone(),
+ room: client_room.downgrade(),
+ };
+ client_room
+ .0
+ .lock()
+ .updates_tx
+ .blocking_send(RoomEvent::TrackSubscribed {
+ track,
+ publication,
+ participant,
+ })
+ .ok();
+ }
+ }
+
+ Ok(sid)
+ }
+
+ async fn unpublish_track(&self, _token: String, _track: &TrackSid) -> Result<()> {
+ Ok(())
+ }
+
+ fn set_track_muted(&self, token: &str, track_sid: &TrackSid, muted: bool) -> Result<()> {
+ let claims = livekit_server::token::validate(&token, &self.secret_key)?;
+ let room_name = claims.video.room.unwrap();
+ let identity = ParticipantIdentity(claims.sub.unwrap().to_string());
+ let mut server_rooms = self.rooms.lock();
+ let room = server_rooms
+ .get_mut(&*room_name)
+ .ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
+ if let Some(track) = room
+ .audio_tracks
+ .iter_mut()
+ .find(|track| track.sid == *track_sid)
+ {
+ track.muted.store(muted, SeqCst);
+ for (id, client_room) in room.client_rooms.iter() {
+ if *id != identity {
+ let participant = Participant::Remote(RemoteParticipant {
+ identity: identity.clone(),
+ room: client_room.downgrade(),
+ });
+ let track = RemoteTrack::Audio(RemoteAudioTrack {
+ server_track: track.clone(),
+ room: client_room.downgrade(),
+ });
+ let publication = TrackPublication::Remote(RemoteTrackPublication {
+ sid: track_sid.clone(),
+ room: client_room.downgrade(),
+ track,
+ });
+
+ let event = if muted {
+ RoomEvent::TrackMuted {
+ participant,
+ publication,
+ }
+ } else {
+ RoomEvent::TrackUnmuted {
+ participant,
+ publication,
+ }
+ };
+
+ client_room
+ .0
+ .lock()
+ .updates_tx
+ .blocking_send(event)
+ .unwrap();
+ }
+ }
+ }
+ Ok(())
+ }
+
+ fn is_track_muted(&self, token: &str, track_sid: &TrackSid) -> Option<bool> {
+ let claims = livekit_server::token::validate(&token, &self.secret_key).ok()?;
+ let room_name = claims.video.room.unwrap();
+
+ let mut server_rooms = self.rooms.lock();
+ let room = server_rooms.get_mut(&*room_name)?;
+ room.audio_tracks.iter().find_map(|track| {
+ if track.sid == *track_sid {
+ Some(track.muted.load(SeqCst))
+ } else {
+ None
+ }
+ })
+ }
+
+ fn video_tracks(&self, token: String) -> Result<Vec<RemoteVideoTrack>> {
+ let claims = livekit_server::token::validate(&token, &self.secret_key)?;
+ let room_name = claims.video.room.unwrap();
+ let identity = ParticipantIdentity(claims.sub.unwrap().to_string());
+
+ let mut server_rooms = self.rooms.lock();
+ let room = server_rooms
+ .get_mut(&*room_name)
+ .ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
+ let client_room = room
+ .client_rooms
+ .get(&identity)
+ .ok_or_else(|| anyhow!("not a participant in room"))?;
+ Ok(room
+ .video_tracks
+ .iter()
+ .map(|track| RemoteVideoTrack {
+ server_track: track.clone(),
+ _room: client_room.downgrade(),
+ })
+ .collect())
+ }
+
+ fn audio_tracks(&self, token: String) -> Result<Vec<RemoteAudioTrack>> {
+ let claims = livekit_server::token::validate(&token, &self.secret_key)?;
+ let room_name = claims.video.room.unwrap();
+ let identity = ParticipantIdentity(claims.sub.unwrap().to_string());
+
+ let mut server_rooms = self.rooms.lock();
+ let room = server_rooms
+ .get_mut(&*room_name)
+ .ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
+ let client_room = room
+ .client_rooms
+ .get(&identity)
+ .ok_or_else(|| anyhow!("not a participant in room"))?;
+ Ok(room
+ .audio_tracks
+ .iter()
+ .map(|track| RemoteAudioTrack {
+ server_track: track.clone(),
+ room: client_room.downgrade(),
+ })
+ .collect())
+ }
+}
+
+#[cfg(not(target_os = "windows"))]
+#[derive(Default, Debug)]
+struct TestServerRoom {
+ client_rooms: HashMap<ParticipantIdentity, Room>,
+ video_tracks: Vec<Arc<TestServerVideoTrack>>,
+ audio_tracks: Vec<Arc<TestServerAudioTrack>>,
+ participant_permissions: HashMap<ParticipantIdentity, proto::ParticipantPermission>,
+}
+
+#[cfg(not(target_os = "windows"))]
+#[derive(Debug)]
+struct TestServerVideoTrack {
+ sid: TrackSid,
+ publisher_id: ParticipantIdentity,
+ // frames_rx: async_broadcast::Receiver<Frame>,
+}
+
+#[cfg(not(target_os = "windows"))]
+#[derive(Debug)]
+struct TestServerAudioTrack {
+ sid: TrackSid,
+ publisher_id: ParticipantIdentity,
+ muted: AtomicBool,
+}
+
+pub struct TestApiClient {
+ url: String,
+}
+
+#[derive(Clone, Debug)]
+#[non_exhaustive]
+pub enum RoomEvent {
+ ParticipantConnected(RemoteParticipant),
+ ParticipantDisconnected(RemoteParticipant),
+ LocalTrackPublished {
+ publication: LocalTrackPublication,
+ track: LocalTrack,
+ participant: LocalParticipant,
+ },
+ LocalTrackUnpublished {
+ publication: LocalTrackPublication,
+ participant: LocalParticipant,
+ },
+ TrackSubscribed {
+ track: RemoteTrack,
+ publication: RemoteTrackPublication,
+ participant: RemoteParticipant,
+ },
+ TrackUnsubscribed {
+ track: RemoteTrack,
+ publication: RemoteTrackPublication,
+ participant: RemoteParticipant,
+ },
+ TrackSubscriptionFailed {
+ participant: RemoteParticipant,
+ error: String,
+ #[cfg(not(target_os = "windows"))]
+ track_sid: TrackSid,
+ },
+ TrackPublished {
+ publication: RemoteTrackPublication,
+ participant: RemoteParticipant,
+ },
+ TrackUnpublished {
+ publication: RemoteTrackPublication,
+ participant: RemoteParticipant,
+ },
+ TrackMuted {
+ participant: Participant,
+ publication: TrackPublication,
+ },
+ TrackUnmuted {
+ participant: Participant,
+ publication: TrackPublication,
+ },
+ RoomMetadataChanged {
+ old_metadata: String,
+ metadata: String,
+ },
+ ParticipantMetadataChanged {
+ participant: Participant,
+ old_metadata: String,
+ metadata: String,
+ },
+ ParticipantNameChanged {
+ participant: Participant,
+ old_name: String,
+ name: String,
+ },
+ ActiveSpeakersChanged {
+ speakers: Vec<Participant>,
+ },
+ #[cfg(not(target_os = "windows"))]
+ ConnectionStateChanged(ConnectionState),
+ Connected {
+ participants_with_tracks: Vec<(RemoteParticipant, Vec<RemoteTrackPublication>)>,
+ },
+ #[cfg(not(target_os = "windows"))]
+ Disconnected {
+ reason: DisconnectReason,
+ },
+ Reconnecting,
+ Reconnected,
+}
+
+#[cfg(not(target_os = "windows"))]
+#[async_trait]
+impl livekit_server::api::Client for TestApiClient {
+ fn url(&self) -> &str {
+ &self.url
+ }
+
+ async fn create_room(&self, name: String) -> Result<()> {
+ let server = TestServer::get(&self.url)?;
+ server.create_room(name).await?;
+ Ok(())
+ }
+
+ async fn delete_room(&self, name: String) -> Result<()> {
+ let server = TestServer::get(&self.url)?;
+ server.delete_room(name).await?;
+ Ok(())
+ }
+
+ async fn remove_participant(&self, room: String, identity: String) -> Result<()> {
+ let server = TestServer::get(&self.url)?;
+ server
+ .remove_participant(room, ParticipantIdentity(identity))
+ .await?;
+ Ok(())
+ }
+
+ async fn update_participant(
+ &self,
+ room: String,
+ identity: String,
+ permission: livekit_server::proto::ParticipantPermission,
+ ) -> Result<()> {
+ let server = TestServer::get(&self.url)?;
+ server
+ .update_participant(room, identity, permission)
+ .await?;
+ Ok(())
+ }
+
+ fn room_token(&self, room: &str, identity: &str) -> Result<String> {
+ let server = TestServer::get(&self.url)?;
+ token::create(
+ &server.api_key,
+ &server.secret_key,
+ Some(identity),
+ token::VideoGrant::to_join(room),
+ )
+ }
+
+ fn guest_token(&self, room: &str, identity: &str) -> Result<String> {
+ let server = TestServer::get(&self.url)?;
+ token::create(
+ &server.api_key,
+ &server.secret_key,
+ Some(identity),
+ token::VideoGrant::for_guest(room),
+ )
+ }
+}
+
+struct RoomState {
+ url: String,
+ token: String,
+ #[cfg(not(target_os = "windows"))]
+ local_identity: ParticipantIdentity,
+ #[cfg(not(target_os = "windows"))]
+ connection_state: ConnectionState,
+ #[cfg(not(target_os = "windows"))]
+ paused_audio_tracks: HashSet<TrackSid>,
+ updates_tx: mpsc::Sender<RoomEvent>,
+}
+
+#[derive(Clone, Debug)]
+pub struct Room(Arc<Mutex<RoomState>>);
+
+#[derive(Clone, Debug)]
+pub(crate) struct WeakRoom(Weak<Mutex<RoomState>>);
+
+#[cfg(not(target_os = "windows"))]
+impl std::fmt::Debug for RoomState {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("Room")
+ .field("url", &self.url)
+ .field("token", &self.token)
+ .field("local_identity", &self.local_identity)
+ .field("connection_state", &self.connection_state)
+ .field("paused_audio_tracks", &self.paused_audio_tracks)
+ .finish()
+ }
+}
+
+#[cfg(target_os = "windows")]
+impl std::fmt::Debug for RoomState {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("Room")
+ .field("url", &self.url)
+ .field("token", &self.token)
+ .finish()
+ }
+}
+
+#[cfg(not(target_os = "windows"))]
+impl Room {
+ fn downgrade(&self) -> WeakRoom {
+ WeakRoom(Arc::downgrade(&self.0))
+ }
+
+ pub fn connection_state(&self) -> ConnectionState {
+ self.0.lock().connection_state
+ }
+
+ pub fn local_participant(&self) -> LocalParticipant {
+ let identity = self.0.lock().local_identity.clone();
+ LocalParticipant {
+ identity,
+ room: self.clone(),
+ }
+ }
+
+ pub async fn connect(
+ url: &str,
+ token: &str,
+ _options: RoomOptions,
+ ) -> Result<(Self, mpsc::Receiver<RoomEvent>)> {
+ let server = TestServer::get(&url)?;
+ let (updates_tx, updates_rx) = mpsc::channel(1024);
+ let this = Self(Arc::new(Mutex::new(RoomState {
+ local_identity: ParticipantIdentity(String::new()),
+ url: url.to_string(),
+ token: token.to_string(),
+ connection_state: ConnectionState::Disconnected,
+ paused_audio_tracks: Default::default(),
+ updates_tx,
+ })));
+
+ let identity = server
+ .join_room(token.to_string(), this.clone())
+ .await
+ .context("room join")?;
+ {
+ let mut state = this.0.lock();
+ state.local_identity = identity;
+ state.connection_state = ConnectionState::Connected;
+ }
+
+ Ok((this, updates_rx))
+ }
+
+ pub fn remote_participants(&self) -> HashMap<ParticipantIdentity, RemoteParticipant> {
+ self.test_server()
+ .remote_participants(self.0.lock().token.clone())
+ .unwrap()
+ }
+
+ fn test_server(&self) -> Arc<TestServer> {
+ TestServer::get(&self.0.lock().url).unwrap()
+ }
+
+ fn token(&self) -> String {
+ self.0.lock().token.clone()
+ }
+}
+
+#[cfg(not(target_os = "windows"))]
+impl Drop for RoomState {
+ fn drop(&mut self) {
+ if self.connection_state == ConnectionState::Connected {
+ if let Ok(server) = TestServer::get(&self.url) {
+ let executor = server.executor.clone();
+ let token = self.token.clone();
+ executor
+ .spawn(async move { server.leave_room(token).await.ok() })
+ .detach();
+ }
+ }
+ }
+}
+
+impl WeakRoom {
+ fn upgrade(&self) -> Option<Room> {
+ self.0.upgrade().map(Room)
+ }
+}
@@ -0,0 +1,111 @@
+use super::*;
+
+#[derive(Clone, Debug)]
+pub enum Participant {
+ Local(LocalParticipant),
+ Remote(RemoteParticipant),
+}
+
+#[derive(Clone, Debug)]
+pub struct LocalParticipant {
+ #[cfg(not(target_os = "windows"))]
+ pub(super) identity: ParticipantIdentity,
+ pub(super) room: Room,
+}
+
+#[derive(Clone, Debug)]
+pub struct RemoteParticipant {
+ #[cfg(not(target_os = "windows"))]
+ pub(super) identity: ParticipantIdentity,
+ pub(super) room: WeakRoom,
+}
+
+#[cfg(not(target_os = "windows"))]
+impl Participant {
+ pub fn identity(&self) -> ParticipantIdentity {
+ match self {
+ Participant::Local(participant) => participant.identity.clone(),
+ Participant::Remote(participant) => participant.identity.clone(),
+ }
+ }
+}
+
+#[cfg(not(target_os = "windows"))]
+impl LocalParticipant {
+ pub async fn unpublish_track(&self, track: &TrackSid) -> Result<()> {
+ self.room
+ .test_server()
+ .unpublish_track(self.room.token(), track)
+ .await
+ }
+
+ pub async fn publish_track(
+ &self,
+ track: LocalTrack,
+ _options: TrackPublishOptions,
+ ) -> Result<LocalTrackPublication> {
+ let this = self.clone();
+ let track = track.clone();
+ let server = this.room.test_server();
+ let sid = match track {
+ LocalTrack::Video(track) => {
+ server.publish_video_track(this.room.token(), track).await?
+ }
+ LocalTrack::Audio(track) => {
+ server
+ .publish_audio_track(this.room.token(), &track)
+ .await?
+ }
+ };
+ Ok(LocalTrackPublication {
+ room: self.room.downgrade(),
+ sid,
+ })
+ }
+}
+
+#[cfg(not(target_os = "windows"))]
+impl RemoteParticipant {
+ pub fn track_publications(&self) -> HashMap<TrackSid, RemoteTrackPublication> {
+ if let Some(room) = self.room.upgrade() {
+ let server = room.test_server();
+ let audio = server
+ .audio_tracks(room.token())
+ .unwrap()
+ .into_iter()
+ .filter(|track| track.publisher_id() == self.identity)
+ .map(|track| {
+ (
+ track.sid(),
+ RemoteTrackPublication {
+ sid: track.sid(),
+ room: self.room.clone(),
+ track: RemoteTrack::Audio(track),
+ },
+ )
+ });
+ let video = server
+ .video_tracks(room.token())
+ .unwrap()
+ .into_iter()
+ .filter(|track| track.publisher_id() == self.identity)
+ .map(|track| {
+ (
+ track.sid(),
+ RemoteTrackPublication {
+ sid: track.sid(),
+ room: self.room.clone(),
+ track: RemoteTrack::Video(track),
+ },
+ )
+ });
+ audio.chain(video).collect()
+ } else {
+ HashMap::default()
+ }
+ }
+
+ pub fn identity(&self) -> ParticipantIdentity {
+ self.identity.clone()
+ }
+}
@@ -0,0 +1,116 @@
+use super::*;
+
+#[derive(Clone, Debug)]
+pub enum TrackPublication {
+ Local(LocalTrackPublication),
+ Remote(RemoteTrackPublication),
+}
+
+#[derive(Clone, Debug)]
+pub struct LocalTrackPublication {
+ #[cfg(not(target_os = "windows"))]
+ pub(crate) sid: TrackSid,
+ pub(crate) room: WeakRoom,
+}
+
+#[derive(Clone, Debug)]
+pub struct RemoteTrackPublication {
+ #[cfg(not(target_os = "windows"))]
+ pub(crate) sid: TrackSid,
+ pub(crate) room: WeakRoom,
+ pub(crate) track: RemoteTrack,
+}
+
+#[cfg(not(target_os = "windows"))]
+impl TrackPublication {
+ pub fn sid(&self) -> TrackSid {
+ match self {
+ TrackPublication::Local(track) => track.sid(),
+ TrackPublication::Remote(track) => track.sid(),
+ }
+ }
+
+ pub fn is_muted(&self) -> bool {
+ match self {
+ TrackPublication::Local(track) => track.is_muted(),
+ TrackPublication::Remote(track) => track.is_muted(),
+ }
+ }
+}
+
+#[cfg(not(target_os = "windows"))]
+impl LocalTrackPublication {
+ pub fn sid(&self) -> TrackSid {
+ self.sid.clone()
+ }
+
+ pub fn mute(&self) {
+ self.set_mute(true)
+ }
+
+ pub fn unmute(&self) {
+ self.set_mute(false)
+ }
+
+ fn set_mute(&self, mute: bool) {
+ if let Some(room) = self.room.upgrade() {
+ room.test_server()
+ .set_track_muted(&room.token(), &self.sid, mute)
+ .ok();
+ }
+ }
+
+ pub fn is_muted(&self) -> bool {
+ if let Some(room) = self.room.upgrade() {
+ room.test_server()
+ .is_track_muted(&room.token(), &self.sid)
+ .unwrap_or(false)
+ } else {
+ false
+ }
+ }
+}
+
+#[cfg(not(target_os = "windows"))]
+impl RemoteTrackPublication {
+ pub fn sid(&self) -> TrackSid {
+ self.sid.clone()
+ }
+
+ pub fn track(&self) -> Option<RemoteTrack> {
+ Some(self.track.clone())
+ }
+
+ pub fn kind(&self) -> TrackKind {
+ self.track.kind()
+ }
+
+ pub fn is_muted(&self) -> bool {
+ if let Some(room) = self.room.upgrade() {
+ room.test_server()
+ .is_track_muted(&room.token(), &self.sid)
+ .unwrap_or(false)
+ } else {
+ false
+ }
+ }
+
+ pub fn is_enabled(&self) -> bool {
+ if let Some(room) = self.room.upgrade() {
+ !room.0.lock().paused_audio_tracks.contains(&self.sid)
+ } else {
+ false
+ }
+ }
+
+ pub fn set_enabled(&self, enabled: bool) {
+ if let Some(room) = self.room.upgrade() {
+ let paused_audio_tracks = &mut room.0.lock().paused_audio_tracks;
+ if enabled {
+ paused_audio_tracks.remove(&self.sid);
+ } else {
+ paused_audio_tracks.insert(self.sid.clone());
+ }
+ }
+ }
+}
@@ -0,0 +1,201 @@
+use super::*;
+#[cfg(not(windows))]
+use webrtc::{audio_source::RtcAudioSource, video_source::RtcVideoSource};
+
+#[cfg(not(windows))]
+pub use livekit::track::{TrackKind, TrackSource};
+
+#[derive(Clone, Debug)]
+pub enum LocalTrack {
+ Audio(LocalAudioTrack),
+ Video(LocalVideoTrack),
+}
+
+#[derive(Clone, Debug)]
+pub enum RemoteTrack {
+ Audio(RemoteAudioTrack),
+ Video(RemoteVideoTrack),
+}
+
+#[derive(Clone, Debug)]
+pub struct LocalVideoTrack {}
+
+#[derive(Clone, Debug)]
+pub struct LocalAudioTrack {}
+
+#[derive(Clone, Debug)]
+pub struct RemoteVideoTrack {
+ #[cfg(not(target_os = "windows"))]
+ pub(super) server_track: Arc<TestServerVideoTrack>,
+ pub(super) _room: WeakRoom,
+}
+
+#[derive(Clone, Debug)]
+pub struct RemoteAudioTrack {
+ #[cfg(not(target_os = "windows"))]
+ pub(super) server_track: Arc<TestServerAudioTrack>,
+ pub(super) room: WeakRoom,
+}
+
+pub enum RtcTrack {
+ Audio(RtcAudioTrack),
+ Video(RtcVideoTrack),
+}
+
+pub struct RtcAudioTrack {
+ #[cfg(not(target_os = "windows"))]
+ pub(super) server_track: Arc<TestServerAudioTrack>,
+ pub(super) room: WeakRoom,
+}
+
+pub struct RtcVideoTrack {
+ #[cfg(not(target_os = "windows"))]
+ pub(super) _server_track: Arc<TestServerVideoTrack>,
+}
+
+#[cfg(not(target_os = "windows"))]
+impl RemoteTrack {
+ pub fn sid(&self) -> TrackSid {
+ match self {
+ RemoteTrack::Audio(track) => track.sid(),
+ RemoteTrack::Video(track) => track.sid(),
+ }
+ }
+
+ pub fn kind(&self) -> TrackKind {
+ match self {
+ RemoteTrack::Audio(_) => TrackKind::Audio,
+ RemoteTrack::Video(_) => TrackKind::Video,
+ }
+ }
+
+ pub fn publisher_id(&self) -> ParticipantIdentity {
+ match self {
+ RemoteTrack::Audio(track) => track.publisher_id(),
+ RemoteTrack::Video(track) => track.publisher_id(),
+ }
+ }
+
+ pub fn rtc_track(&self) -> RtcTrack {
+ match self {
+ RemoteTrack::Audio(track) => RtcTrack::Audio(track.rtc_track()),
+ RemoteTrack::Video(track) => RtcTrack::Video(track.rtc_track()),
+ }
+ }
+}
+
+#[cfg(not(windows))]
+impl LocalVideoTrack {
+ pub fn create_video_track(_name: &str, _source: RtcVideoSource) -> Self {
+ Self {}
+ }
+}
+
+#[cfg(not(windows))]
+impl LocalAudioTrack {
+ pub fn create_audio_track(_name: &str, _source: RtcAudioSource) -> Self {
+ Self {}
+ }
+}
+
+#[cfg(not(target_os = "windows"))]
+impl RemoteAudioTrack {
+ pub fn sid(&self) -> TrackSid {
+ self.server_track.sid.clone()
+ }
+
+ pub fn publisher_id(&self) -> ParticipantIdentity {
+ self.server_track.publisher_id.clone()
+ }
+
+ pub fn start(&self) {
+ if let Some(room) = self.room.upgrade() {
+ room.0
+ .lock()
+ .paused_audio_tracks
+ .remove(&self.server_track.sid);
+ }
+ }
+
+ pub fn stop(&self) {
+ if let Some(room) = self.room.upgrade() {
+ room.0
+ .lock()
+ .paused_audio_tracks
+ .insert(self.server_track.sid.clone());
+ }
+ }
+
+ pub fn rtc_track(&self) -> RtcAudioTrack {
+ RtcAudioTrack {
+ server_track: self.server_track.clone(),
+ room: self.room.clone(),
+ }
+ }
+}
+
+#[cfg(not(target_os = "windows"))]
+impl RemoteVideoTrack {
+ pub fn sid(&self) -> TrackSid {
+ self.server_track.sid.clone()
+ }
+
+ pub fn publisher_id(&self) -> ParticipantIdentity {
+ self.server_track.publisher_id.clone()
+ }
+
+ pub fn rtc_track(&self) -> RtcVideoTrack {
+ RtcVideoTrack {
+ _server_track: self.server_track.clone(),
+ }
+ }
+}
+
+#[cfg(not(target_os = "windows"))]
+impl RtcTrack {
+ pub fn enabled(&self) -> bool {
+ match self {
+ RtcTrack::Audio(track) => track.enabled(),
+ RtcTrack::Video(track) => track.enabled(),
+ }
+ }
+
+ pub fn set_enabled(&self, enabled: bool) {
+ match self {
+ RtcTrack::Audio(track) => track.set_enabled(enabled),
+ RtcTrack::Video(_) => {}
+ }
+ }
+}
+
+#[cfg(not(target_os = "windows"))]
+impl RtcAudioTrack {
+ pub fn set_enabled(&self, enabled: bool) {
+ if let Some(room) = self.room.upgrade() {
+ let paused_audio_tracks = &mut room.0.lock().paused_audio_tracks;
+ if enabled {
+ paused_audio_tracks.remove(&self.server_track.sid);
+ } else {
+ paused_audio_tracks.insert(self.server_track.sid.clone());
+ }
+ }
+ }
+
+ pub fn enabled(&self) -> bool {
+ if let Some(room) = self.room.upgrade() {
+ !room
+ .0
+ .lock()
+ .paused_audio_tracks
+ .contains(&self.server_track.sid)
+ } else {
+ false
+ }
+ }
+}
+
+impl RtcVideoTrack {
+ pub fn enabled(&self) -> bool {
+ true
+ }
+}
@@ -0,0 +1,136 @@
+use super::track::{RtcAudioTrack, RtcVideoTrack};
+use futures::Stream;
+use livekit::webrtc as real;
+use std::{
+ pin::Pin,
+ task::{Context, Poll},
+};
+
+pub mod video_stream {
+ use super::*;
+
+ pub mod native {
+ use super::*;
+ use real::video_frame::BoxVideoFrame;
+
+ pub struct NativeVideoStream {
+ pub track: RtcVideoTrack,
+ }
+
+ impl NativeVideoStream {
+ pub fn new(track: RtcVideoTrack) -> Self {
+ Self { track }
+ }
+ }
+
+ impl Stream for NativeVideoStream {
+ type Item = BoxVideoFrame;
+
+ fn poll_next(self: Pin<&mut Self>, _cx: &mut Context) -> Poll<Option<Self::Item>> {
+ Poll::Pending
+ }
+ }
+ }
+}
+
+pub mod audio_stream {
+ use super::*;
+
+ pub mod native {
+ use super::*;
+ use real::audio_frame::AudioFrame;
+
+ pub struct NativeAudioStream {
+ pub track: RtcAudioTrack,
+ }
+
+ impl NativeAudioStream {
+ pub fn new(track: RtcAudioTrack, _sample_rate: i32, _num_channels: i32) -> Self {
+ Self { track }
+ }
+ }
+
+ impl Stream for NativeAudioStream {
+ type Item = AudioFrame<'static>;
+
+ fn poll_next(self: Pin<&mut Self>, _cx: &mut Context) -> Poll<Option<Self::Item>> {
+ Poll::Pending
+ }
+ }
+ }
+}
+
+pub mod audio_source {
+ use super::*;
+
+ pub use real::audio_source::AudioSourceOptions;
+
+ pub mod native {
+ use std::sync::Arc;
+
+ use super::*;
+ use real::{audio_frame::AudioFrame, RtcError};
+
+ #[derive(Clone)]
+ pub struct NativeAudioSource {
+ pub options: Arc<AudioSourceOptions>,
+ pub sample_rate: u32,
+ pub num_channels: u32,
+ }
+
+ impl NativeAudioSource {
+ pub fn new(
+ options: AudioSourceOptions,
+ sample_rate: u32,
+ num_channels: u32,
+ _queue_size_ms: u32,
+ ) -> Self {
+ Self {
+ options: Arc::new(options),
+ sample_rate,
+ num_channels,
+ }
+ }
+
+ pub async fn capture_frame(&self, _frame: &AudioFrame<'_>) -> Result<(), RtcError> {
+ Ok(())
+ }
+ }
+ }
+
+ pub enum RtcAudioSource {
+ Native(native::NativeAudioSource),
+ }
+}
+
+pub use livekit::webrtc::audio_frame;
+pub use livekit::webrtc::video_frame;
+
+pub mod video_source {
+ use super::*;
+ pub use real::video_source::VideoResolution;
+
+ pub struct RTCVideoSource;
+
+ pub mod native {
+ use super::*;
+ use real::video_frame::{VideoBuffer, VideoFrame};
+
+ #[derive(Clone)]
+ pub struct NativeVideoSource {
+ pub resolution: VideoResolution,
+ }
+
+ impl NativeVideoSource {
+ pub fn new(resolution: super::VideoResolution) -> Self {
+ Self { resolution }
+ }
+
+ pub fn capture_frame<T: AsRef<dyn VideoBuffer>>(&self, _frame: &VideoFrame<T>) {}
+ }
+ }
+
+ pub enum RtcVideoSource {
+ Native(native::NativeVideoSource),
+ }
+}
@@ -0,0 +1,2 @@
+[livekit_client_test]
+rustflags = ["-C", "link-args=-ObjC"]
@@ -1,5 +1,5 @@
[package]
-name = "live_kit_client"
+name = "livekit_client_macos"
version = "0.1.0"
edition = "2021"
description = "Bindings to LiveKit Swift client SDK"
@@ -10,7 +10,7 @@ license = "GPL-3.0-or-later"
workspace = true
[lib]
-path = "src/live_kit_client.rs"
+path = "src/livekit_client.rs"
doctest = false
[[example]]
@@ -22,7 +22,7 @@ test-support = [
"async-trait",
"collections/test-support",
"gpui/test-support",
- "live_kit_server",
+ "livekit_server",
"nanoid",
]
@@ -33,7 +33,7 @@ async-trait = { workspace = true, optional = true }
collections = { workspace = true, optional = true }
futures.workspace = true
gpui = { workspace = true, optional = true }
-live_kit_server = { workspace = true, optional = true }
+livekit_server = { workspace = true, optional = true }
log.workspace = true
media.workspace = true
nanoid = { workspace = true, optional = true}
@@ -47,14 +47,14 @@ core-foundation.workspace = true
async-trait = { workspace = true }
collections = { workspace = true }
gpui = { workspace = true }
-live_kit_server.workspace = true
+livekit_server.workspace = true
nanoid.workspace = true
[dev-dependencies]
async-trait.workspace = true
collections = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
-live_kit_server.workspace = true
+livekit_server.workspace = true
nanoid.workspace = true
sha2.workspace = true
simplelog.workspace = true
@@ -0,0 +1 @@
+../../LICENSE-GPL
@@ -2,12 +2,12 @@ use std::time::Duration;
use futures::StreamExt;
use gpui::{actions, KeyBinding, Menu, MenuItem};
-use live_kit_client::{LocalAudioTrack, LocalVideoTrack, Room, RoomUpdate};
-use live_kit_server::token::{self, VideoGrant};
+use livekit_client_macos::{LocalAudioTrack, LocalVideoTrack, Room, RoomUpdate};
+use livekit_server::token::{self, VideoGrant};
use log::LevelFilter;
use simplelog::SimpleLogger;
-actions!(live_kit_client, [Quit]);
+actions!(livekit_client_macos, [Quit]);
fn main() {
SimpleLogger::init(LevelFilter::Info, Default::default()).expect("could not initialize logger");
@@ -4,7 +4,7 @@ use async_trait::async_trait;
use collections::{btree_map::Entry as BTreeEntry, hash_map::Entry, BTreeMap, HashMap, HashSet};
use futures::Stream;
use gpui::{BackgroundExecutor, SurfaceSource};
-use live_kit_server::{proto, token};
+use livekit_server::{proto, token};
use parking_lot::Mutex;
use postage::watch;
@@ -102,7 +102,7 @@ impl TestServer {
#[cfg(any(test, feature = "test-support"))]
self.executor.simulate_random_delay().await;
- let claims = live_kit_server::token::validate(&token, &self.secret_key)?;
+ let claims = livekit_server::token::validate(&token, &self.secret_key)?;
let identity = claims.sub.unwrap().to_string();
let room_name = claims.video.room.unwrap();
let mut server_rooms = self.rooms.lock();
@@ -150,7 +150,7 @@ impl TestServer {
// todo(linux): Remove this once the cross-platform LiveKit implementation is merged
#[cfg(any(test, feature = "test-support"))]
self.executor.simulate_random_delay().await;
- let claims = live_kit_server::token::validate(&token, &self.secret_key)?;
+ let claims = livekit_server::token::validate(&token, &self.secret_key)?;
let identity = claims.sub.unwrap().to_string();
let room_name = claims.video.room.unwrap();
let mut server_rooms = self.rooms.lock();
@@ -224,7 +224,7 @@ impl TestServer {
// todo(linux): Remove this once the cross-platform LiveKit implementation is merged
#[cfg(any(test, feature = "test-support"))]
self.executor.simulate_random_delay().await;
- let claims = live_kit_server::token::validate(&token, &self.secret_key)?;
+ let claims = livekit_server::token::validate(&token, &self.secret_key)?;
let identity = claims.sub.unwrap().to_string();
let room_name = claims.video.room.unwrap();
@@ -280,7 +280,7 @@ impl TestServer {
#[cfg(any(test, feature = "test-support"))]
self.executor.simulate_random_delay().await;
- let claims = live_kit_server::token::validate(&token, &self.secret_key)?;
+ let claims = livekit_server::token::validate(&token, &self.secret_key)?;
let identity = claims.sub.unwrap().to_string();
let room_name = claims.video.room.unwrap();
@@ -332,7 +332,7 @@ impl TestServer {
}
fn set_track_muted(&self, token: &str, track_sid: &str, muted: bool) -> Result<()> {
- let claims = live_kit_server::token::validate(token, &self.secret_key)?;
+ let claims = livekit_server::token::validate(token, &self.secret_key)?;
let room_name = claims.video.room.unwrap();
let identity = claims.sub.unwrap();
let mut server_rooms = self.rooms.lock();
@@ -363,7 +363,7 @@ impl TestServer {
}
fn is_track_muted(&self, token: &str, track_sid: &str) -> Option<bool> {
- let claims = live_kit_server::token::validate(token, &self.secret_key).ok()?;
+ let claims = livekit_server::token::validate(token, &self.secret_key).ok()?;
let room_name = claims.video.room.unwrap();
let mut server_rooms = self.rooms.lock();
@@ -378,7 +378,7 @@ impl TestServer {
}
fn video_tracks(&self, token: String) -> Result<Vec<Arc<RemoteVideoTrack>>> {
- let claims = live_kit_server::token::validate(&token, &self.secret_key)?;
+ let claims = livekit_server::token::validate(&token, &self.secret_key)?;
let room_name = claims.video.room.unwrap();
let identity = claims.sub.unwrap();
@@ -401,7 +401,7 @@ impl TestServer {
}
fn audio_tracks(&self, token: String) -> Result<Vec<Arc<RemoteAudioTrack>>> {
- let claims = live_kit_server::token::validate(&token, &self.secret_key)?;
+ let claims = livekit_server::token::validate(&token, &self.secret_key)?;
let room_name = claims.video.room.unwrap();
let identity = claims.sub.unwrap();
@@ -455,7 +455,7 @@ pub struct TestApiClient {
}
#[async_trait]
-impl live_kit_server::api::Client for TestApiClient {
+impl livekit_server::api::Client for TestApiClient {
fn url(&self) -> &str {
&self.url
}
@@ -482,7 +482,7 @@ impl live_kit_server::api::Client for TestApiClient {
&self,
room: String,
identity: String,
- permission: live_kit_server::proto::ParticipantPermission,
+ permission: livekit_server::proto::ParticipantPermission,
) -> Result<()> {
let server = TestServer::get(&self.url)?;
server
@@ -1,5 +1,5 @@
[package]
-name = "live_kit_server"
+name = "livekit_server"
version = "0.1.0"
edition = "2021"
description = "SDK for the LiveKit server API"
@@ -10,7 +10,7 @@ license = "AGPL-3.0-or-later"
workspace = true
[lib]
-path = "src/live_kit_server.rs"
+path = "src/livekit_server.rs"
doctest = false
[dependencies]
@@ -17,6 +17,7 @@ anyhow.workspace = true
[target.'cfg(target_os = "macos")'.dependencies]
core-foundation.workspace = true
+ctor.workspace = true
foreign-types = "0.5"
metal = "0.29"
objc = "0.2"
@@ -253,11 +253,14 @@ pub mod core_media {
}
}
- pub fn image_buffer(&self) -> CVImageBuffer {
+ pub fn image_buffer(&self) -> Option<CVImageBuffer> {
unsafe {
- CVImageBuffer::wrap_under_get_rule(CMSampleBufferGetImageBuffer(
- self.as_concrete_TypeRef(),
- ))
+ let ptr = CMSampleBufferGetImageBuffer(self.as_concrete_TypeRef());
+ if ptr.is_null() {
+ None
+ } else {
+ Some(CVImageBuffer::wrap_under_get_rule(ptr))
+ }
}
}
@@ -432,7 +432,7 @@ message Room {
repeated Participant participants = 2;
repeated PendingParticipant pending_participants = 3;
repeated Follower followers = 4;
- string live_kit_room = 5;
+ string livekit_room = 5;
}
message Participant {
@@ -294,9 +294,9 @@ impl TitleBar {
let is_muted = room.is_muted();
let is_deafened = room.is_deafened().unwrap_or(false);
let is_screen_sharing = room.is_screen_sharing();
- let can_use_microphone = room.can_use_microphone();
+ let can_use_microphone = room.can_use_microphone(cx);
let can_share_projects = room.can_share_projects();
- let platform_supported = match self.platform_style {
+ let screen_sharing_supported = match self.platform_style {
PlatformStyle::Mac => true,
PlatformStyle::Linux | PlatformStyle::Windows => false,
};
@@ -363,9 +363,7 @@ impl TitleBar {
)
.tooltip(move |cx| {
Tooltip::text(
- if !platform_supported {
- "Cannot share microphone"
- } else if is_muted {
+ if is_muted {
"Unmute microphone"
} else {
"Mute microphone"
@@ -375,56 +373,45 @@ impl TitleBar {
})
.style(ButtonStyle::Subtle)
.icon_size(IconSize::Small)
- .selected(platform_supported && is_muted)
- .disabled(!platform_supported)
+ .selected(is_muted)
.selected_style(ButtonStyle::Tinted(TintColor::Negative))
.on_click(move |_, cx| {
toggle_mute(&Default::default(), cx);
})
.into_any_element(),
);
- }
- children.push(
- IconButton::new(
- "mute-sound",
- if is_deafened {
- ui::IconName::AudioOff
- } else {
- ui::IconName::AudioOn
- },
- )
- .style(ButtonStyle::Subtle)
- .selected_style(ButtonStyle::Tinted(TintColor::Negative))
- .icon_size(IconSize::Small)
- .selected(is_deafened)
- .disabled(!platform_supported)
- .tooltip(move |cx| {
- if !platform_supported {
- Tooltip::text("Cannot share microphone", cx)
- } else if can_use_microphone {
+ children.push(
+ IconButton::new(
+ "mute-sound",
+ if is_deafened {
+ ui::IconName::AudioOff
+ } else {
+ ui::IconName::AudioOn
+ },
+ )
+ .style(ButtonStyle::Subtle)
+ .selected_style(ButtonStyle::Tinted(TintColor::Negative))
+ .icon_size(IconSize::Small)
+ .selected(is_deafened)
+ .tooltip(move |cx| {
Tooltip::with_meta("Deafen Audio", None, "Mic will be muted", cx)
- } else {
- Tooltip::text("Deafen Audio", cx)
- }
- })
- .on_click(move |_, cx| toggle_deafen(&Default::default(), cx))
- .into_any_element(),
- );
+ })
+ .on_click(move |_, cx| toggle_deafen(&Default::default(), cx))
+ .into_any_element(),
+ );
+ }
- if can_share_projects {
+ if screen_sharing_supported {
children.push(
IconButton::new("screen-share", ui::IconName::Screen)
.style(ButtonStyle::Subtle)
.icon_size(IconSize::Small)
.selected(is_screen_sharing)
- .disabled(!platform_supported)
.selected_style(ButtonStyle::Tinted(TintColor::Accent))
.tooltip(move |cx| {
Tooltip::text(
- if !platform_supported {
- "Cannot share screen"
- } else if is_screen_sharing {
+ if is_screen_sharing {
"Stop Sharing Screen"
} else {
"Share Screen"
@@ -24,6 +24,8 @@ test-support = [
"gpui/test-support",
"fs/test-support",
]
+livekit-macos = ["call/livekit-macos"]
+livekit-cross-platform = ["call/livekit-cross-platform"]
[dependencies]
anyhow.workspace = true
@@ -1,126 +1,282 @@
-use crate::{
- item::{Item, ItemEvent},
- ItemNavHistory, WorkspaceId,
-};
-use anyhow::Result;
-use call::participant::{Frame, RemoteVideoTrack};
-use client::{proto::PeerId, User};
-use futures::StreamExt;
-use gpui::{
- div, surface, AppContext, EventEmitter, FocusHandle, FocusableView, InteractiveElement,
- ParentElement, Render, SharedString, Styled, Task, View, ViewContext, VisualContext,
- WindowContext,
-};
-use std::sync::{Arc, Weak};
-use ui::{prelude::*, Icon, IconName};
-
-pub enum Event {
- Close,
-}
+#[cfg(any(
+ all(
+ target_os = "macos",
+ feature = "livekit-cross-platform",
+ not(feature = "livekit-macos"),
+ ),
+ all(not(target_os = "macos"), feature = "livekit-cross-platform"),
+))]
+mod cross_platform {
+ use crate::{
+ item::{Item, ItemEvent},
+ ItemNavHistory, WorkspaceId,
+ };
+ use call::{RemoteVideoTrack, RemoteVideoTrackView};
+ use client::{proto::PeerId, User};
+ use gpui::{
+ div, AppContext, EventEmitter, FocusHandle, FocusableView, InteractiveElement,
+ ParentElement, Render, SharedString, Styled, View, ViewContext, VisualContext,
+ WindowContext,
+ };
+ use std::sync::Arc;
+ use ui::{prelude::*, Icon, IconName};
-pub struct SharedScreen {
- track: Weak<RemoteVideoTrack>,
- frame: Option<Frame>,
- pub peer_id: PeerId,
- user: Arc<User>,
- nav_history: Option<ItemNavHistory>,
- _maintain_frame: Task<Result<()>>,
- focus: FocusHandle,
-}
+ pub enum Event {
+ Close,
+ }
-impl SharedScreen {
- pub fn new(
- track: &Arc<RemoteVideoTrack>,
- peer_id: PeerId,
+ pub struct SharedScreen {
+ pub peer_id: PeerId,
user: Arc<User>,
- cx: &mut ViewContext<Self>,
- ) -> Self {
- cx.focus_handle();
- let mut frames = track.frames();
- Self {
- track: Arc::downgrade(track),
- frame: None,
- peer_id,
- user,
- nav_history: Default::default(),
- _maintain_frame: cx.spawn(|this, mut cx| async move {
- while let Some(frame) = frames.next().await {
- this.update(&mut cx, |this, cx| {
- this.frame = Some(frame);
- cx.notify();
- })?;
- }
- this.update(&mut cx, |_, cx| cx.emit(Event::Close))?;
- Ok(())
- }),
- focus: cx.focus_handle(),
+ nav_history: Option<ItemNavHistory>,
+ view: View<RemoteVideoTrackView>,
+ focus: FocusHandle,
+ }
+
+ impl SharedScreen {
+ pub fn new(
+ track: RemoteVideoTrack,
+ peer_id: PeerId,
+ user: Arc<User>,
+ cx: &mut ViewContext<Self>,
+ ) -> Self {
+ let view = cx.new_view(|cx| RemoteVideoTrackView::new(track.clone(), cx));
+ cx.subscribe(&view, |_, _, ev, cx| match ev {
+ call::RemoteVideoTrackViewEvent::Close => cx.emit(Event::Close),
+ })
+ .detach();
+ Self {
+ view,
+ peer_id,
+ user,
+ nav_history: Default::default(),
+ focus: cx.focus_handle(),
+ }
}
}
-}
-impl EventEmitter<Event> for SharedScreen {}
+ impl EventEmitter<Event> for SharedScreen {}
-impl FocusableView for SharedScreen {
- fn focus_handle(&self, _: &AppContext) -> FocusHandle {
- self.focus.clone()
+ impl FocusableView for SharedScreen {
+ fn focus_handle(&self, _: &AppContext) -> FocusHandle {
+ self.focus.clone()
+ }
}
-}
-impl Render for SharedScreen {
- fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
- div()
- .bg(cx.theme().colors().editor_background)
- .track_focus(&self.focus)
- .key_context("SharedScreen")
- .size_full()
- .children(
- self.frame
- .as_ref()
- .map(|frame| surface(frame.image()).size_full()),
- )
+ impl Render for SharedScreen {
+ fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
+ div()
+ .bg(cx.theme().colors().editor_background)
+ .track_focus(&self.focus)
+ .key_context("SharedScreen")
+ .size_full()
+ .child(self.view.clone())
+ }
}
-}
-impl Item for SharedScreen {
- type Event = Event;
+ impl Item for SharedScreen {
+ type Event = Event;
- fn tab_tooltip_text(&self, _: &AppContext) -> Option<SharedString> {
- Some(format!("{}'s screen", self.user.github_login).into())
- }
+ fn tab_tooltip_text(&self, _: &AppContext) -> Option<SharedString> {
+ Some(format!("{}'s screen", self.user.github_login).into())
+ }
- fn deactivated(&mut self, cx: &mut ViewContext<Self>) {
- if let Some(nav_history) = self.nav_history.as_mut() {
- nav_history.push::<()>(None, cx);
+ fn deactivated(&mut self, cx: &mut ViewContext<Self>) {
+ if let Some(nav_history) = self.nav_history.as_mut() {
+ nav_history.push::<()>(None, cx);
+ }
}
- }
- fn tab_icon(&self, _cx: &WindowContext) -> Option<Icon> {
- Some(Icon::new(IconName::Screen))
+ fn tab_icon(&self, _cx: &WindowContext) -> Option<Icon> {
+ Some(Icon::new(IconName::Screen))
+ }
+
+ fn tab_content_text(&self, _cx: &WindowContext) -> Option<SharedString> {
+ Some(format!("{}'s screen", self.user.github_login).into())
+ }
+
+ fn telemetry_event_text(&self) -> Option<&'static str> {
+ None
+ }
+
+ fn set_nav_history(&mut self, history: ItemNavHistory, _: &mut ViewContext<Self>) {
+ self.nav_history = Some(history);
+ }
+
+ fn clone_on_split(
+ &self,
+ _workspace_id: Option<WorkspaceId>,
+ cx: &mut ViewContext<Self>,
+ ) -> Option<View<Self>> {
+ Some(cx.new_view(|cx| Self {
+ view: self.view.update(cx, |view, cx| view.clone(cx)),
+ peer_id: self.peer_id,
+ user: self.user.clone(),
+ nav_history: Default::default(),
+ focus: cx.focus_handle(),
+ }))
+ }
+
+ fn to_item_events(event: &Self::Event, mut f: impl FnMut(ItemEvent)) {
+ match event {
+ Event::Close => f(ItemEvent::CloseItem),
+ }
+ }
}
+}
- fn tab_content_text(&self, _cx: &WindowContext) -> Option<SharedString> {
- Some(format!("{}'s screen", self.user.github_login).into())
+#[cfg(any(
+ all(
+ target_os = "macos",
+ feature = "livekit-cross-platform",
+ not(feature = "livekit-macos"),
+ ),
+ all(not(target_os = "macos"), feature = "livekit-cross-platform"),
+))]
+pub use cross_platform::*;
+
+#[cfg(any(
+ all(target_os = "macos", feature = "livekit-macos"),
+ all(
+ not(target_os = "macos"),
+ feature = "livekit-macos",
+ not(feature = "livekit-cross-platform")
+ )
+))]
+mod macos {
+ use crate::{
+ item::{Item, ItemEvent},
+ ItemNavHistory, WorkspaceId,
+ };
+ use anyhow::Result;
+ use call::participant::{Frame, RemoteVideoTrack};
+ use client::{proto::PeerId, User};
+ use futures::StreamExt;
+ use gpui::{
+ div, surface, AppContext, EventEmitter, FocusHandle, FocusableView, InteractiveElement,
+ ParentElement, Render, SharedString, Styled, Task, View, ViewContext, VisualContext,
+ WindowContext,
+ };
+ use std::sync::{Arc, Weak};
+ use ui::{prelude::*, Icon, IconName};
+
+ pub enum Event {
+ Close,
}
- fn telemetry_event_text(&self) -> Option<&'static str> {
- None
+ pub struct SharedScreen {
+ track: Weak<RemoteVideoTrack>,
+ frame: Option<Frame>,
+ pub peer_id: PeerId,
+ user: Arc<User>,
+ nav_history: Option<ItemNavHistory>,
+ _maintain_frame: Task<Result<()>>,
+ focus: FocusHandle,
}
- fn set_nav_history(&mut self, history: ItemNavHistory, _: &mut ViewContext<Self>) {
- self.nav_history = Some(history);
+ impl SharedScreen {
+ pub fn new(
+ track: Arc<RemoteVideoTrack>,
+ peer_id: PeerId,
+ user: Arc<User>,
+ cx: &mut ViewContext<Self>,
+ ) -> Self {
+ cx.focus_handle();
+ let mut frames = track.frames();
+ Self {
+ track: Arc::downgrade(&track),
+ frame: None,
+ peer_id,
+ user,
+ nav_history: Default::default(),
+ _maintain_frame: cx.spawn(|this, mut cx| async move {
+ while let Some(frame) = frames.next().await {
+ this.update(&mut cx, |this, cx| {
+ this.frame = Some(frame);
+ cx.notify();
+ })?;
+ }
+ this.update(&mut cx, |_, cx| cx.emit(Event::Close))?;
+ Ok(())
+ }),
+ focus: cx.focus_handle(),
+ }
+ }
}
- fn clone_on_split(
- &self,
- _workspace_id: Option<WorkspaceId>,
- cx: &mut ViewContext<Self>,
- ) -> Option<View<Self>> {
- let track = self.track.upgrade()?;
- Some(cx.new_view(|cx| Self::new(&track, self.peer_id, self.user.clone(), cx)))
+ impl EventEmitter<Event> for SharedScreen {}
+
+ impl FocusableView for SharedScreen {
+ fn focus_handle(&self, _: &AppContext) -> FocusHandle {
+ self.focus.clone()
+ }
}
+ impl Render for SharedScreen {
+ fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
+ div()
+ .bg(cx.theme().colors().editor_background)
+ .track_focus(&self.focus)
+ .key_context("SharedScreen")
+ .size_full()
+ .children(
+ self.frame
+ .as_ref()
+ .map(|frame| surface(frame.image()).size_full()),
+ )
+ }
+ }
+
+ impl Item for SharedScreen {
+ type Event = Event;
+
+ fn tab_tooltip_text(&self, _: &AppContext) -> Option<SharedString> {
+ Some(format!("{}'s screen", self.user.github_login).into())
+ }
+
+ fn deactivated(&mut self, cx: &mut ViewContext<Self>) {
+ if let Some(nav_history) = self.nav_history.as_mut() {
+ nav_history.push::<()>(None, cx);
+ }
+ }
+
+ fn tab_icon(&self, _cx: &WindowContext) -> Option<Icon> {
+ Some(Icon::new(IconName::Screen))
+ }
+
+ fn tab_content_text(&self, _cx: &WindowContext) -> Option<SharedString> {
+ Some(format!("{}'s screen", self.user.github_login).into())
+ }
+
+ fn telemetry_event_text(&self) -> Option<&'static str> {
+ None
+ }
+
+ fn set_nav_history(&mut self, history: ItemNavHistory, _: &mut ViewContext<Self>) {
+ self.nav_history = Some(history);
+ }
- fn to_item_events(event: &Self::Event, mut f: impl FnMut(ItemEvent)) {
- match event {
- Event::Close => f(ItemEvent::CloseItem),
+ fn clone_on_split(
+ &self,
+ _workspace_id: Option<WorkspaceId>,
+ cx: &mut ViewContext<Self>,
+ ) -> Option<View<Self>> {
+ let track = self.track.upgrade()?;
+ Some(cx.new_view(|cx| Self::new(track, self.peer_id, self.user.clone(), cx)))
+ }
+
+ fn to_item_events(event: &Self::Event, mut f: impl FnMut(ItemEvent)) {
+ match event {
+ Event::Close => f(ItemEvent::CloseItem),
+ }
}
}
}
+
+#[cfg(any(
+ all(target_os = "macos", feature = "livekit-macos"),
+ all(
+ not(target_os = "macos"),
+ feature = "livekit-macos",
+ not(feature = "livekit-cross-platform")
+ )
+))]
+pub use macos::*;
@@ -3944,6 +3944,17 @@ impl Workspace {
None
}
+ #[cfg(target_os = "windows")]
+ fn shared_screen_for_peer(
+ &self,
+ _peer_id: PeerId,
+ _pane: &View<Pane>,
+ _cx: &mut WindowContext,
+ ) -> Option<View<SharedScreen>> {
+ None
+ }
+
+ #[cfg(not(target_os = "windows"))]
fn shared_screen_for_peer(
&self,
peer_id: PeerId,
@@ -3962,7 +3973,7 @@ impl Workspace {
}
}
- Some(cx.new_view(|cx| SharedScreen::new(&track, peer_id, user.clone(), cx)))
+ Some(cx.new_view(|cx| SharedScreen::new(track, peer_id, user.clone(), cx)))
}
pub fn on_window_activation_changed(&mut self, cx: &mut ViewContext<Self>) {
@@ -126,6 +126,12 @@ welcome.workspace = true
workspace.workspace = true
zed_actions.workspace = true
+[target.'cfg(target_os = "macos")'.dependencies]
+workspace = { workspace = true, features = ["livekit-macos"] }
+
+[target.'cfg(not(target_os = "macos"))'.dependencies]
+workspace = { workspace = true, features = ["livekit-cross-platform"] }
+
[target.'cfg(target_os = "windows")'.dependencies]
windows.workspace = true
@@ -13,6 +13,14 @@ fn main() {
println!("cargo:rustc-link-arg=-Wl,-rpath,@executable_path");
}
+ if std::env::var("ZED_BUNDLE").ok().as_deref() == Some("true") {
+ // Find WebRTC.framework in the Frameworks folder when running as part of an application bundle.
+ println!("cargo:rustc-link-arg=-Wl,-rpath,@executable_path/../Frameworks");
+ } else {
+ // Find WebRTC.framework as a sibling of the executable when running outside of an application bundle.
+ println!("cargo:rustc-link-arg=-Wl,-rpath,@executable_path");
+ }
+
// Weakly link ReplayKit to ensure Zed can be used on macOS 10.15+.
println!("cargo:rustc-link-arg=-Wl,-weak_framework,ReplayKit");
@@ -92,7 +92,7 @@ cp "${target_dir}/${target_triple}/release/cli" "${zed_dir}/bin/zed"
find_libs() {
ldd ${target_dir}/${target_triple}/release/zed |\
cut -d' ' -f3 |\
- grep -v '\<\(libstdc++.so\|libc.so\|libgcc_s.so\|libm.so\|libpthread.so\|libdl.so\)'
+ grep -v '\<\(libstdc++.so\|libc.so\|libgcc_s.so\|libm.so\|libpthread.so\|libdl.so\|libasound.so\)'
}
mkdir -p "${zed_dir}/lib"
@@ -22,7 +22,7 @@ extend-exclude = [
# Stripe IDs are flagged as typos.
"crates/collab/src/db/tests/processed_stripe_event_tests.rs",
# Not our typos.
- "crates/live_kit_server/",
+ "crates/livekit_server/",
# Vim makes heavy use of partial typing tables.
"crates/vim/",
# Editor and file finder rely on partial typing and custom in-string syntax.