Revert "Use livekit's Rust SDK instead of their swift SDK (#13343)" (#20809)

Issues found:

* audio does not work well with various set-ups using USB
* switching audio during initial join may leave the client with no audio
at all
* audio streaming is done on the main thread, beachballing certain
set-ups
* worse screenshare quality (seems that there's no dynamic scaling
anymore, compared to the Swift SDK)

This reverts commit 1235d0808e.

Release Notes:

- N/A
This commit is contained in:
Kirill Bulatov 2024-11-18 11:43:53 +02:00 committed by GitHub
parent 59a355da74
commit d92166f9f6
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
48 changed files with 3599 additions and 4288 deletions

View file

@ -2,7 +2,7 @@
name = "live_kit_client"
version = "0.1.0"
edition = "2021"
description = "Logic for using LiveKit with GPUI"
description = "Bindings to LiveKit Swift client SDK"
publish = false
license = "GPL-3.0-or-later"
@ -19,37 +19,42 @@ name = "test_app"
[features]
no-webrtc = []
test-support = [
"async-trait",
"collections/test-support",
"gpui/test-support",
"live_kit_server",
"nanoid",
]
[dependencies]
anyhow.workspace = true
async-trait.workspace = true
collections.workspace = true
cpal = "0.15"
async-broadcast = "0.7"
async-trait = { workspace = true, optional = true }
collections = { workspace = true, optional = true }
futures.workspace = true
gpui.workspace = true
http_2 = { package = "http", version = "0.2.1" }
live_kit_server.workspace = true
gpui = { workspace = true, optional = true }
live_kit_server = { workspace = true, optional = true }
log.workspace = true
media.workspace = true
nanoid = { workspace = true, optional = true}
parking_lot.workspace = true
postage.workspace = true
util.workspace = true
http_client.workspace = true
[target.'cfg(not(target_os = "windows"))'.dependencies]
livekit.workspace = true
[target.'cfg(target_os = "macos")'.dependencies]
core-foundation.workspace = true
[target.'cfg(all(not(target_os = "macos")))'.dependencies]
async-trait = { workspace = true }
collections = { workspace = true }
gpui = { workspace = true }
live_kit_server.workspace = true
nanoid.workspace = true
[dev-dependencies]
async-trait.workspace = true
collections = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
live_kit_server.workspace = true
nanoid.workspace = true
sha2.workspace = true
simplelog.workspace = true

View file

@ -0,0 +1,52 @@
{
"object": {
"pins": [
{
"package": "LiveKit",
"repositoryURL": "https://github.com/livekit/client-sdk-swift.git",
"state": {
"branch": null,
"revision": "7331b813a5ab8a95cfb81fb2b4ed10519428b9ff",
"version": "1.0.12"
}
},
{
"package": "Promises",
"repositoryURL": "https://github.com/google/promises.git",
"state": {
"branch": null,
"revision": "ec957ccddbcc710ccc64c9dcbd4c7006fcf8b73a",
"version": "2.2.0"
}
},
{
"package": "WebRTC",
"repositoryURL": "https://github.com/webrtc-sdk/Specs.git",
"state": {
"branch": null,
"revision": "2f6bab30c8df0fe59ab3e58bc99097f757f85f65",
"version": "104.5112.17"
}
},
{
"package": "swift-log",
"repositoryURL": "https://github.com/apple/swift-log.git",
"state": {
"branch": null,
"revision": "32e8d724467f8fe623624570367e3d50c5638e46",
"version": "1.5.2"
}
},
{
"package": "SwiftProtobuf",
"repositoryURL": "https://github.com/apple/swift-protobuf.git",
"state": {
"branch": null,
"revision": "ce20dc083ee485524b802669890291c0d8090170",
"version": "1.22.1"
}
}
]
},
"version": 1
}

View file

@ -0,0 +1,27 @@
// swift-tools-version: 5.5
import PackageDescription
let package = Package(
name: "LiveKitBridge",
platforms: [
.macOS(.v10_15)
],
products: [
// Products define the executables and libraries a package produces, and make them visible to other packages.
.library(
name: "LiveKitBridge",
type: .static,
targets: ["LiveKitBridge"]),
],
dependencies: [
.package(url: "https://github.com/livekit/client-sdk-swift.git", .exact("1.0.12")),
],
targets: [
// Targets are the basic building blocks of a package. A target can define a module or a test suite.
// Targets can depend on other targets in this package, and on products in packages this package depends on.
.target(
name: "LiveKitBridge",
dependencies: [.product(name: "LiveKit", package: "client-sdk-swift")]),
]
)

View file

@ -0,0 +1,3 @@
# LiveKitBridge
A description of this package.

View file

@ -0,0 +1,383 @@
import Foundation
import LiveKit
import WebRTC
import ScreenCaptureKit
class LKRoomDelegate: RoomDelegate {
var data: UnsafeRawPointer
var onDidDisconnect: @convention(c) (UnsafeRawPointer) -> Void
var onDidSubscribeToRemoteAudioTrack: @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer, UnsafeRawPointer) -> Void
var onDidUnsubscribeFromRemoteAudioTrack: @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void
var onMuteChangedFromRemoteAudioTrack: @convention(c) (UnsafeRawPointer, CFString, Bool) -> Void
var onActiveSpeakersChanged: @convention(c) (UnsafeRawPointer, CFArray) -> Void
var onDidSubscribeToRemoteVideoTrack: @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void
var onDidUnsubscribeFromRemoteVideoTrack: @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void
var onDidPublishOrUnpublishLocalAudioTrack: @convention(c) (UnsafeRawPointer, UnsafeRawPointer, Bool) -> Void
var onDidPublishOrUnpublishLocalVideoTrack: @convention(c) (UnsafeRawPointer, UnsafeRawPointer, Bool) -> Void
init(
data: UnsafeRawPointer,
onDidDisconnect: @escaping @convention(c) (UnsafeRawPointer) -> Void,
onDidSubscribeToRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer, UnsafeRawPointer) -> Void,
onDidUnsubscribeFromRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void,
onMuteChangedFromRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, Bool) -> Void,
onActiveSpeakersChanged: @convention(c) (UnsafeRawPointer, CFArray) -> Void,
onDidSubscribeToRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void,
onDidUnsubscribeFromRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void,
onDidPublishOrUnpublishLocalAudioTrack: @escaping @convention(c) (UnsafeRawPointer, UnsafeRawPointer, Bool) -> Void,
onDidPublishOrUnpublishLocalVideoTrack: @escaping @convention(c) (UnsafeRawPointer, UnsafeRawPointer, Bool) -> Void
)
{
self.data = data
self.onDidDisconnect = onDidDisconnect
self.onDidSubscribeToRemoteAudioTrack = onDidSubscribeToRemoteAudioTrack
self.onDidUnsubscribeFromRemoteAudioTrack = onDidUnsubscribeFromRemoteAudioTrack
self.onDidSubscribeToRemoteVideoTrack = onDidSubscribeToRemoteVideoTrack
self.onDidUnsubscribeFromRemoteVideoTrack = onDidUnsubscribeFromRemoteVideoTrack
self.onMuteChangedFromRemoteAudioTrack = onMuteChangedFromRemoteAudioTrack
self.onActiveSpeakersChanged = onActiveSpeakersChanged
self.onDidPublishOrUnpublishLocalAudioTrack = onDidPublishOrUnpublishLocalAudioTrack
self.onDidPublishOrUnpublishLocalVideoTrack = onDidPublishOrUnpublishLocalVideoTrack
}
func room(_ room: Room, didUpdate connectionState: ConnectionState, oldValue: ConnectionState) {
if connectionState.isDisconnected {
self.onDidDisconnect(self.data)
}
}
func room(_ room: Room, participant: RemoteParticipant, didSubscribe publication: RemoteTrackPublication, track: Track) {
if track.kind == .video {
self.onDidSubscribeToRemoteVideoTrack(self.data, participant.identity as CFString, track.sid! as CFString, Unmanaged.passUnretained(track).toOpaque())
} else if track.kind == .audio {
self.onDidSubscribeToRemoteAudioTrack(self.data, participant.identity as CFString, track.sid! as CFString, Unmanaged.passUnretained(track).toOpaque(), Unmanaged.passUnretained(publication).toOpaque())
}
}
func room(_ room: Room, participant: Participant, didUpdate publication: TrackPublication, muted: Bool) {
if publication.kind == .audio {
self.onMuteChangedFromRemoteAudioTrack(self.data, publication.sid as CFString, muted)
}
}
func room(_ room: Room, didUpdate speakers: [Participant]) {
guard let speaker_ids = speakers.compactMap({ $0.identity as CFString }) as CFArray? else { return }
self.onActiveSpeakersChanged(self.data, speaker_ids)
}
func room(_ room: Room, participant: RemoteParticipant, didUnsubscribe publication: RemoteTrackPublication, track: Track) {
if track.kind == .video {
self.onDidUnsubscribeFromRemoteVideoTrack(self.data, participant.identity as CFString, track.sid! as CFString)
} else if track.kind == .audio {
self.onDidUnsubscribeFromRemoteAudioTrack(self.data, participant.identity as CFString, track.sid! as CFString)
}
}
func room(_ room: Room, localParticipant: LocalParticipant, didPublish publication: LocalTrackPublication) {
if publication.kind == .video {
self.onDidPublishOrUnpublishLocalVideoTrack(self.data, Unmanaged.passUnretained(publication).toOpaque(), true)
} else if publication.kind == .audio {
self.onDidPublishOrUnpublishLocalAudioTrack(self.data, Unmanaged.passUnretained(publication).toOpaque(), true)
}
}
func room(_ room: Room, localParticipant: LocalParticipant, didUnpublish publication: LocalTrackPublication) {
if publication.kind == .video {
self.onDidPublishOrUnpublishLocalVideoTrack(self.data, Unmanaged.passUnretained(publication).toOpaque(), false)
} else if publication.kind == .audio {
self.onDidPublishOrUnpublishLocalAudioTrack(self.data, Unmanaged.passUnretained(publication).toOpaque(), false)
}
}
}
class LKVideoRenderer: NSObject, VideoRenderer {
var data: UnsafeRawPointer
var onFrame: @convention(c) (UnsafeRawPointer, CVPixelBuffer) -> Bool
var onDrop: @convention(c) (UnsafeRawPointer) -> Void
var adaptiveStreamIsEnabled: Bool = false
var adaptiveStreamSize: CGSize = .zero
weak var track: VideoTrack?
init(data: UnsafeRawPointer, onFrame: @escaping @convention(c) (UnsafeRawPointer, CVPixelBuffer) -> Bool, onDrop: @escaping @convention(c) (UnsafeRawPointer) -> Void) {
self.data = data
self.onFrame = onFrame
self.onDrop = onDrop
}
deinit {
self.onDrop(self.data)
}
func setSize(_ size: CGSize) {
}
func renderFrame(_ frame: RTCVideoFrame?) {
let buffer = frame?.buffer as? RTCCVPixelBuffer
if let pixelBuffer = buffer?.pixelBuffer {
if !self.onFrame(self.data, pixelBuffer) {
DispatchQueue.main.async {
self.track?.remove(videoRenderer: self)
}
}
}
}
}
@_cdecl("LKRoomDelegateCreate")
public func LKRoomDelegateCreate(
data: UnsafeRawPointer,
onDidDisconnect: @escaping @convention(c) (UnsafeRawPointer) -> Void,
onDidSubscribeToRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer, UnsafeRawPointer) -> Void,
onDidUnsubscribeFromRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void,
onMuteChangedFromRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, Bool) -> Void,
onActiveSpeakerChanged: @escaping @convention(c) (UnsafeRawPointer, CFArray) -> Void,
onDidSubscribeToRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void,
onDidUnsubscribeFromRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void,
onDidPublishOrUnpublishLocalAudioTrack: @escaping @convention(c) (UnsafeRawPointer, UnsafeRawPointer, Bool) -> Void,
onDidPublishOrUnpublishLocalVideoTrack: @escaping @convention(c) (UnsafeRawPointer, UnsafeRawPointer, Bool) -> Void
) -> UnsafeMutableRawPointer {
let delegate = LKRoomDelegate(
data: data,
onDidDisconnect: onDidDisconnect,
onDidSubscribeToRemoteAudioTrack: onDidSubscribeToRemoteAudioTrack,
onDidUnsubscribeFromRemoteAudioTrack: onDidUnsubscribeFromRemoteAudioTrack,
onMuteChangedFromRemoteAudioTrack: onMuteChangedFromRemoteAudioTrack,
onActiveSpeakersChanged: onActiveSpeakerChanged,
onDidSubscribeToRemoteVideoTrack: onDidSubscribeToRemoteVideoTrack,
onDidUnsubscribeFromRemoteVideoTrack: onDidUnsubscribeFromRemoteVideoTrack,
onDidPublishOrUnpublishLocalAudioTrack: onDidPublishOrUnpublishLocalAudioTrack,
onDidPublishOrUnpublishLocalVideoTrack: onDidPublishOrUnpublishLocalVideoTrack
)
return Unmanaged.passRetained(delegate).toOpaque()
}
@_cdecl("LKRoomCreate")
public func LKRoomCreate(delegate: UnsafeRawPointer) -> UnsafeMutableRawPointer {
let delegate = Unmanaged<LKRoomDelegate>.fromOpaque(delegate).takeUnretainedValue()
return Unmanaged.passRetained(Room(delegate: delegate)).toOpaque()
}
@_cdecl("LKRoomConnect")
public func LKRoomConnect(room: UnsafeRawPointer, url: CFString, token: CFString, callback: @escaping @convention(c) (UnsafeRawPointer, CFString?) -> Void, callback_data: UnsafeRawPointer) {
let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
room.connect(url as String, token as String).then { _ in
callback(callback_data, UnsafeRawPointer(nil) as! CFString?)
}.catch { error in
callback(callback_data, error.localizedDescription as CFString)
}
}
@_cdecl("LKRoomDisconnect")
public func LKRoomDisconnect(room: UnsafeRawPointer) {
let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
room.disconnect()
}
@_cdecl("LKRoomPublishVideoTrack")
public func LKRoomPublishVideoTrack(room: UnsafeRawPointer, track: UnsafeRawPointer, callback: @escaping @convention(c) (UnsafeRawPointer, UnsafeMutableRawPointer?, CFString?) -> Void, callback_data: UnsafeRawPointer) {
let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
let track = Unmanaged<LocalVideoTrack>.fromOpaque(track).takeUnretainedValue()
room.localParticipant?.publishVideoTrack(track: track).then { publication in
callback(callback_data, Unmanaged.passRetained(publication).toOpaque(), nil)
}.catch { error in
callback(callback_data, nil, error.localizedDescription as CFString)
}
}
@_cdecl("LKRoomPublishAudioTrack")
public func LKRoomPublishAudioTrack(room: UnsafeRawPointer, track: UnsafeRawPointer, callback: @escaping @convention(c) (UnsafeRawPointer, UnsafeMutableRawPointer?, CFString?) -> Void, callback_data: UnsafeRawPointer) {
let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
let track = Unmanaged<LocalAudioTrack>.fromOpaque(track).takeUnretainedValue()
room.localParticipant?.publishAudioTrack(track: track).then { publication in
callback(callback_data, Unmanaged.passRetained(publication).toOpaque(), nil)
}.catch { error in
callback(callback_data, nil, error.localizedDescription as CFString)
}
}
@_cdecl("LKRoomUnpublishTrack")
public func LKRoomUnpublishTrack(room: UnsafeRawPointer, publication: UnsafeRawPointer) {
let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
let publication = Unmanaged<LocalTrackPublication>.fromOpaque(publication).takeUnretainedValue()
let _ = room.localParticipant?.unpublish(publication: publication)
}
@_cdecl("LKRoomAudioTracksForRemoteParticipant")
public func LKRoomAudioTracksForRemoteParticipant(room: UnsafeRawPointer, participantId: CFString) -> CFArray? {
let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
for (_, participant) in room.remoteParticipants {
if participant.identity == participantId as String {
return participant.audioTracks.compactMap { $0.track as? RemoteAudioTrack } as CFArray?
}
}
return nil;
}
@_cdecl("LKRoomAudioTrackPublicationsForRemoteParticipant")
public func LKRoomAudioTrackPublicationsForRemoteParticipant(room: UnsafeRawPointer, participantId: CFString) -> CFArray? {
let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
for (_, participant) in room.remoteParticipants {
if participant.identity == participantId as String {
return participant.audioTracks.compactMap { $0 as? RemoteTrackPublication } as CFArray?
}
}
return nil;
}
@_cdecl("LKRoomVideoTracksForRemoteParticipant")
public func LKRoomVideoTracksForRemoteParticipant(room: UnsafeRawPointer, participantId: CFString) -> CFArray? {
let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
for (_, participant) in room.remoteParticipants {
if participant.identity == participantId as String {
return participant.videoTracks.compactMap { $0.track as? RemoteVideoTrack } as CFArray?
}
}
return nil;
}
@_cdecl("LKLocalAudioTrackCreateTrack")
public func LKLocalAudioTrackCreateTrack() -> UnsafeMutableRawPointer {
let track = LocalAudioTrack.createTrack(options: AudioCaptureOptions(
echoCancellation: true,
noiseSuppression: true
))
return Unmanaged.passRetained(track).toOpaque()
}
@_cdecl("LKCreateScreenShareTrackForDisplay")
public func LKCreateScreenShareTrackForDisplay(display: UnsafeMutableRawPointer) -> UnsafeMutableRawPointer {
let display = Unmanaged<MacOSDisplay>.fromOpaque(display).takeUnretainedValue()
let track = LocalVideoTrack.createMacOSScreenShareTrack(source: display, preferredMethod: .legacy)
return Unmanaged.passRetained(track).toOpaque()
}
@_cdecl("LKVideoRendererCreate")
public func LKVideoRendererCreate(data: UnsafeRawPointer, onFrame: @escaping @convention(c) (UnsafeRawPointer, CVPixelBuffer) -> Bool, onDrop: @escaping @convention(c) (UnsafeRawPointer) -> Void) -> UnsafeMutableRawPointer {
Unmanaged.passRetained(LKVideoRenderer(data: data, onFrame: onFrame, onDrop: onDrop)).toOpaque()
}
@_cdecl("LKVideoTrackAddRenderer")
public func LKVideoTrackAddRenderer(track: UnsafeRawPointer, renderer: UnsafeRawPointer) {
let track = Unmanaged<Track>.fromOpaque(track).takeUnretainedValue() as! VideoTrack
let renderer = Unmanaged<LKVideoRenderer>.fromOpaque(renderer).takeRetainedValue()
renderer.track = track
track.add(videoRenderer: renderer)
}
@_cdecl("LKRemoteVideoTrackGetSid")
public func LKRemoteVideoTrackGetSid(track: UnsafeRawPointer) -> CFString {
let track = Unmanaged<RemoteVideoTrack>.fromOpaque(track).takeUnretainedValue()
return track.sid! as CFString
}
@_cdecl("LKRemoteAudioTrackGetSid")
public func LKRemoteAudioTrackGetSid(track: UnsafeRawPointer) -> CFString {
let track = Unmanaged<RemoteAudioTrack>.fromOpaque(track).takeUnretainedValue()
return track.sid! as CFString
}
@_cdecl("LKRemoteAudioTrackStart")
public func LKRemoteAudioTrackStart(track: UnsafeRawPointer) {
let track = Unmanaged<RemoteAudioTrack>.fromOpaque(track).takeUnretainedValue()
track.start()
}
@_cdecl("LKRemoteAudioTrackStop")
public func LKRemoteAudioTrackStop(track: UnsafeRawPointer) {
let track = Unmanaged<RemoteAudioTrack>.fromOpaque(track).takeUnretainedValue()
track.stop()
}
@_cdecl("LKDisplaySources")
public func LKDisplaySources(data: UnsafeRawPointer, callback: @escaping @convention(c) (UnsafeRawPointer, CFArray?, CFString?) -> Void) {
MacOSScreenCapturer.sources(for: .display, includeCurrentApplication: false, preferredMethod: .legacy).then { displaySources in
callback(data, displaySources as CFArray, nil)
}.catch { error in
callback(data, nil, error.localizedDescription as CFString)
}
}
@_cdecl("LKLocalTrackPublicationSetMute")
public func LKLocalTrackPublicationSetMute(
publication: UnsafeRawPointer,
muted: Bool,
on_complete: @escaping @convention(c) (UnsafeRawPointer, CFString?) -> Void,
callback_data: UnsafeRawPointer
) {
let publication = Unmanaged<LocalTrackPublication>.fromOpaque(publication).takeUnretainedValue()
if muted {
publication.mute().then {
on_complete(callback_data, nil)
}.catch { error in
on_complete(callback_data, error.localizedDescription as CFString)
}
} else {
publication.unmute().then {
on_complete(callback_data, nil)
}.catch { error in
on_complete(callback_data, error.localizedDescription as CFString)
}
}
}
@_cdecl("LKLocalTrackPublicationIsMuted")
public func LKLocalTrackPublicationIsMuted(
publication: UnsafeRawPointer
) -> Bool {
let publication = Unmanaged<LocalTrackPublication>.fromOpaque(publication).takeUnretainedValue()
return publication.muted
}
@_cdecl("LKRemoteTrackPublicationSetEnabled")
public func LKRemoteTrackPublicationSetEnabled(
publication: UnsafeRawPointer,
enabled: Bool,
on_complete: @escaping @convention(c) (UnsafeRawPointer, CFString?) -> Void,
callback_data: UnsafeRawPointer
) {
let publication = Unmanaged<RemoteTrackPublication>.fromOpaque(publication).takeUnretainedValue()
publication.set(enabled: enabled).then {
on_complete(callback_data, nil)
}.catch { error in
on_complete(callback_data, error.localizedDescription as CFString)
}
}
@_cdecl("LKRemoteTrackPublicationIsMuted")
public func LKRemoteTrackPublicationIsMuted(
publication: UnsafeRawPointer
) -> Bool {
let publication = Unmanaged<RemoteTrackPublication>.fromOpaque(publication).takeUnretainedValue()
return publication.muted
}
@_cdecl("LKRemoteTrackPublicationGetSid")
public func LKRemoteTrackPublicationGetSid(
publication: UnsafeRawPointer
) -> CFString {
let publication = Unmanaged<RemoteTrackPublication>.fromOpaque(publication).takeUnretainedValue()
return publication.sid as CFString
}
@_cdecl("LKLocalTrackPublicationGetSid")
public func LKLocalTrackPublicationGetSid(
publication: UnsafeRawPointer
) -> CFString {
let publication = Unmanaged<LocalTrackPublication>.fromOpaque(publication).takeUnretainedValue()
return publication.sid as CFString
}

View file

@ -0,0 +1,185 @@
use serde::Deserialize;
use std::{
env,
path::{Path, PathBuf},
process::Command,
};
const SWIFT_PACKAGE_NAME: &str = "LiveKitBridge";
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct SwiftTargetInfo {
pub triple: String,
pub unversioned_triple: String,
pub module_triple: String,
pub swift_runtime_compatibility_version: String,
#[serde(rename = "librariesRequireRPath")]
pub libraries_require_rpath: bool,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct SwiftPaths {
pub runtime_library_paths: Vec<String>,
pub runtime_library_import_paths: Vec<String>,
pub runtime_resource_path: String,
}
#[derive(Debug, Deserialize)]
pub struct SwiftTarget {
pub target: SwiftTargetInfo,
pub paths: SwiftPaths,
}
const MACOS_TARGET_VERSION: &str = "10.15.7";
fn main() {
if cfg!(all(
target_os = "macos",
not(any(test, feature = "test-support", feature = "no-webrtc")),
)) {
let swift_target = get_swift_target();
build_bridge(&swift_target);
link_swift_stdlib(&swift_target);
link_webrtc_framework(&swift_target);
// Register exported Objective-C selectors, protocols, etc when building example binaries.
println!("cargo:rustc-link-arg=-Wl,-ObjC");
}
}
fn build_bridge(swift_target: &SwiftTarget) {
println!("cargo:rerun-if-env-changed=MACOSX_DEPLOYMENT_TARGET");
println!("cargo:rerun-if-changed={}/Sources", SWIFT_PACKAGE_NAME);
println!(
"cargo:rerun-if-changed={}/Package.swift",
SWIFT_PACKAGE_NAME
);
println!(
"cargo:rerun-if-changed={}/Package.resolved",
SWIFT_PACKAGE_NAME
);
let swift_package_root = swift_package_root();
let swift_target_folder = swift_target_folder();
let swift_cache_folder = swift_cache_folder();
if !Command::new("swift")
.arg("build")
.arg("--disable-automatic-resolution")
.args(["--configuration", &env::var("PROFILE").unwrap()])
.args(["--triple", &swift_target.target.triple])
.args(["--build-path".into(), swift_target_folder])
.args(["--cache-path".into(), swift_cache_folder])
.current_dir(&swift_package_root)
.status()
.unwrap()
.success()
{
panic!(
"Failed to compile swift package in {}",
swift_package_root.display()
);
}
println!(
"cargo:rustc-link-search=native={}",
swift_target.out_dir_path().display()
);
println!("cargo:rustc-link-lib=static={}", SWIFT_PACKAGE_NAME);
}
fn link_swift_stdlib(swift_target: &SwiftTarget) {
for path in &swift_target.paths.runtime_library_paths {
println!("cargo:rustc-link-search=native={}", path);
}
}
fn link_webrtc_framework(swift_target: &SwiftTarget) {
let swift_out_dir_path = swift_target.out_dir_path();
println!("cargo:rustc-link-lib=framework=WebRTC");
println!(
"cargo:rustc-link-search=framework={}",
swift_out_dir_path.display()
);
// Find WebRTC.framework as a sibling of the executable when running tests.
println!("cargo:rustc-link-arg=-Wl,-rpath,@executable_path");
// Find WebRTC.framework in parent directory of the executable when running examples.
println!("cargo:rustc-link-arg=-Wl,-rpath,@executable_path/..");
let source_path = swift_out_dir_path.join("WebRTC.framework");
let deps_dir_path =
PathBuf::from(env::var("OUT_DIR").unwrap()).join("../../../deps/WebRTC.framework");
let target_dir_path =
PathBuf::from(env::var("OUT_DIR").unwrap()).join("../../../WebRTC.framework");
copy_dir(&source_path, &deps_dir_path);
copy_dir(&source_path, &target_dir_path);
}
fn get_swift_target() -> SwiftTarget {
let mut arch = env::var("CARGO_CFG_TARGET_ARCH").unwrap();
if arch == "aarch64" {
arch = "arm64".into();
}
let target = format!("{}-apple-macosx{}", arch, MACOS_TARGET_VERSION);
let swift_target_info_str = Command::new("swift")
.args(["-target", &target, "-print-target-info"])
.output()
.unwrap()
.stdout;
serde_json::from_slice(&swift_target_info_str).unwrap()
}
fn swift_package_root() -> PathBuf {
env::current_dir().unwrap().join(SWIFT_PACKAGE_NAME)
}
fn swift_target_folder() -> PathBuf {
let target = env::var("TARGET").unwrap();
env::current_dir()
.unwrap()
.join(format!("../../target/{target}/{SWIFT_PACKAGE_NAME}_target"))
}
fn swift_cache_folder() -> PathBuf {
let target = env::var("TARGET").unwrap();
env::current_dir()
.unwrap()
.join(format!("../../target/{target}/{SWIFT_PACKAGE_NAME}_cache"))
}
fn copy_dir(source: &Path, destination: &Path) {
assert!(
Command::new("rm")
.arg("-rf")
.arg(destination)
.status()
.unwrap()
.success(),
"could not remove {:?} before copying",
destination
);
assert!(
Command::new("cp")
.arg("-R")
.args([source, destination])
.status()
.unwrap()
.success(),
"could not copy {:?} to {:?}",
source,
destination
);
}
impl SwiftTarget {
fn out_dir_path(&self) -> PathBuf {
swift_target_folder()
.join(&self.target.unversioned_triple)
.join(env::var("PROFILE").unwrap())
}
}

View file

@ -1,53 +1,18 @@
#![cfg_attr(windows, allow(unused))]
use gpui::{
actions, bounds, div, point,
prelude::{FluentBuilder as _, IntoElement},
px, rgb, size, AsyncAppContext, Bounds, InteractiveElement, KeyBinding, Menu, MenuItem,
ParentElement, Pixels, Render, ScreenCaptureStream, SharedString,
StatefulInteractiveElement as _, Styled, Task, View, ViewContext, VisualContext, WindowBounds,
WindowHandle, WindowOptions,
};
#[cfg(not(target_os = "windows"))]
use live_kit_client::{
capture_local_audio_track, capture_local_video_track,
id::ParticipantIdentity,
options::{TrackPublishOptions, VideoCodec},
participant::{Participant, RemoteParticipant},
play_remote_audio_track,
publication::{LocalTrackPublication, RemoteTrackPublication},
track::{LocalTrack, RemoteTrack, RemoteVideoTrack, TrackSource},
AudioStream, RemoteVideoTrackView, Room, RoomEvent, RoomOptions,
};
#[cfg(target_os = "windows")]
use live_kit_client::{
participant::{Participant, RemoteParticipant},
publication::{LocalTrackPublication, RemoteTrackPublication},
track::{LocalTrack, RemoteTrack, RemoteVideoTrack},
AudioStream, RemoteVideoTrackView, Room, RoomEvent,
};
use std::time::Duration;
use futures::StreamExt;
use gpui::{actions, KeyBinding, Menu, MenuItem};
use live_kit_client::{LocalAudioTrack, LocalVideoTrack, Room, RoomUpdate};
use live_kit_server::token::{self, VideoGrant};
use log::LevelFilter;
use postage::stream::Stream as _;
use simplelog::SimpleLogger;
actions!(live_kit_client, [Quit]);
#[cfg(windows)]
fn main() {}
#[cfg(not(windows))]
fn main() {
SimpleLogger::init(LevelFilter::Info, Default::default()).expect("could not initialize logger");
gpui::App::new().run(|cx| {
live_kit_client::init(
cx.background_executor().dispatcher.clone(),
cx.http_client(),
);
#[cfg(any(test, feature = "test-support"))]
println!("USING TEST LIVEKIT");
@ -55,8 +20,10 @@ fn main() {
println!("USING REAL LIVEKIT");
cx.activate(true);
cx.on_action(quit);
cx.bind_keys([KeyBinding::new("cmd-q", Quit, None)]);
cx.set_menus(vec![Menu {
name: "Zed".into(),
items: vec![MenuItem::Action {
@ -69,26 +36,132 @@ fn main() {
let live_kit_url = std::env::var("LIVE_KIT_URL").unwrap_or("http://localhost:7880".into());
let live_kit_key = std::env::var("LIVE_KIT_KEY").unwrap_or("devkey".into());
let live_kit_secret = std::env::var("LIVE_KIT_SECRET").unwrap_or("secret".into());
let height = px(800.);
let width = px(800.);
cx.spawn(|cx| async move {
let mut windows = Vec::new();
for i in 0..3 {
let token = token::create(
&live_kit_key,
&live_kit_secret,
Some(&format!("test-participant-{i}")),
VideoGrant::to_join("test-room"),
)
.unwrap();
let user_a_token = token::create(
&live_kit_key,
&live_kit_secret,
Some("test-participant-1"),
VideoGrant::to_join("test-room"),
)
.unwrap();
let room_a = Room::new();
room_a.connect(&live_kit_url, &user_a_token).await.unwrap();
let bounds = bounds(point(width * i, px(0.0)), size(width, height));
let window =
LivekitWindow::new(live_kit_url.as_str(), token.as_str(), bounds, cx.clone())
.await;
windows.push(window);
let user2_token = token::create(
&live_kit_key,
&live_kit_secret,
Some("test-participant-2"),
VideoGrant::to_join("test-room"),
)
.unwrap();
let room_b = Room::new();
room_b.connect(&live_kit_url, &user2_token).await.unwrap();
let mut room_updates = room_b.updates();
let audio_track = LocalAudioTrack::create();
let audio_track_publication = room_a.publish_audio_track(audio_track).await.unwrap();
if let RoomUpdate::SubscribedToRemoteAudioTrack(track, _) =
room_updates.next().await.unwrap()
{
let remote_tracks = room_b.remote_audio_tracks("test-participant-1");
assert_eq!(remote_tracks.len(), 1);
assert_eq!(remote_tracks[0].publisher_id(), "test-participant-1");
assert_eq!(track.publisher_id(), "test-participant-1");
} else {
panic!("unexpected message");
}
audio_track_publication.set_mute(true).await.unwrap();
println!("waiting for mute changed!");
if let RoomUpdate::RemoteAudioTrackMuteChanged { track_id, muted } =
room_updates.next().await.unwrap()
{
let remote_tracks = room_b.remote_audio_tracks("test-participant-1");
assert_eq!(remote_tracks[0].sid(), track_id);
assert!(muted);
} else {
panic!("unexpected message");
}
audio_track_publication.set_mute(false).await.unwrap();
if let RoomUpdate::RemoteAudioTrackMuteChanged { track_id, muted } =
room_updates.next().await.unwrap()
{
let remote_tracks = room_b.remote_audio_tracks("test-participant-1");
assert_eq!(remote_tracks[0].sid(), track_id);
assert!(!muted);
} else {
panic!("unexpected message");
}
println!("Pausing for 5 seconds to test audio, make some noise!");
let timer = cx.background_executor().timer(Duration::from_secs(5));
timer.await;
let remote_audio_track = room_b
.remote_audio_tracks("test-participant-1")
.pop()
.unwrap();
room_a.unpublish_track(audio_track_publication);
// Clear out any active speakers changed messages
let mut next = room_updates.next().await.unwrap();
while let RoomUpdate::ActiveSpeakersChanged { speakers } = next {
println!("Speakers changed: {:?}", speakers);
next = room_updates.next().await.unwrap();
}
if let RoomUpdate::UnsubscribedFromRemoteAudioTrack {
publisher_id,
track_id,
} = next
{
assert_eq!(publisher_id, "test-participant-1");
assert_eq!(remote_audio_track.sid(), track_id);
assert_eq!(room_b.remote_audio_tracks("test-participant-1").len(), 0);
} else {
panic!("unexpected message");
}
let displays = room_a.display_sources().await.unwrap();
let display = displays.into_iter().next().unwrap();
let local_video_track = LocalVideoTrack::screen_share_for_display(&display);
let local_video_track_publication =
room_a.publish_video_track(local_video_track).await.unwrap();
if let RoomUpdate::SubscribedToRemoteVideoTrack(track) =
room_updates.next().await.unwrap()
{
let remote_video_tracks = room_b.remote_video_tracks("test-participant-1");
assert_eq!(remote_video_tracks.len(), 1);
assert_eq!(remote_video_tracks[0].publisher_id(), "test-participant-1");
assert_eq!(track.publisher_id(), "test-participant-1");
} else {
panic!("unexpected message");
}
let remote_video_track = room_b
.remote_video_tracks("test-participant-1")
.pop()
.unwrap();
room_a.unpublish_track(local_video_track_publication);
if let RoomUpdate::UnsubscribedFromRemoteVideoTrack {
publisher_id,
track_id,
} = room_updates.next().await.unwrap()
{
assert_eq!(publisher_id, "test-participant-1");
assert_eq!(remote_video_track.sid(), track_id);
assert_eq!(room_b.remote_video_tracks("test-participant-1").len(), 0);
} else {
panic!("unexpected message");
}
cx.update(|cx| cx.shutdown()).ok();
})
.detach();
});
@ -97,340 +170,3 @@ fn main() {
fn quit(_: &Quit, cx: &mut gpui::AppContext) {
cx.quit();
}
struct LivekitWindow {
room: Room,
microphone_track: Option<LocalTrackPublication>,
screen_share_track: Option<LocalTrackPublication>,
microphone_stream: Option<AudioStream>,
screen_share_stream: Option<Box<dyn ScreenCaptureStream>>,
#[cfg(not(target_os = "windows"))]
remote_participants: Vec<(ParticipantIdentity, ParticipantState)>,
_events_task: Task<()>,
}
#[derive(Default)]
struct ParticipantState {
audio_output_stream: Option<(RemoteTrackPublication, AudioStream)>,
muted: bool,
screen_share_output_view: Option<(RemoteVideoTrack, View<RemoteVideoTrackView>)>,
speaking: bool,
}
#[cfg(not(windows))]
impl LivekitWindow {
async fn new(
url: &str,
token: &str,
bounds: Bounds<Pixels>,
cx: AsyncAppContext,
) -> WindowHandle<Self> {
let (room, mut events) = Room::connect(url, token, RoomOptions::default())
.await
.unwrap();
cx.update(|cx| {
cx.open_window(
WindowOptions {
window_bounds: Some(WindowBounds::Windowed(bounds)),
..Default::default()
},
|cx| {
cx.new_view(|cx| {
let _events_task = cx.spawn(|this, mut cx| async move {
while let Some(event) = events.recv().await {
this.update(&mut cx, |this: &mut LivekitWindow, cx| {
this.handle_room_event(event, cx)
})
.ok();
}
});
Self {
room,
microphone_track: None,
microphone_stream: None,
screen_share_track: None,
screen_share_stream: None,
remote_participants: Vec::new(),
_events_task,
}
})
},
)
.unwrap()
})
.unwrap()
}
fn handle_room_event(&mut self, event: RoomEvent, cx: &mut ViewContext<Self>) {
eprintln!("event: {event:?}");
match event {
RoomEvent::TrackUnpublished {
publication,
participant,
} => {
let output = self.remote_participant(participant);
let unpublish_sid = publication.sid();
if output
.audio_output_stream
.as_ref()
.map_or(false, |(track, _)| track.sid() == unpublish_sid)
{
output.audio_output_stream.take();
}
if output
.screen_share_output_view
.as_ref()
.map_or(false, |(track, _)| track.sid() == unpublish_sid)
{
output.screen_share_output_view.take();
}
cx.notify();
}
RoomEvent::TrackSubscribed {
publication,
participant,
track,
} => {
let output = self.remote_participant(participant);
match track {
RemoteTrack::Audio(track) => {
output.audio_output_stream =
Some((publication.clone(), play_remote_audio_track(&track, cx)));
}
RemoteTrack::Video(track) => {
output.screen_share_output_view = Some((
track.clone(),
cx.new_view(|cx| RemoteVideoTrackView::new(track, cx)),
));
}
}
cx.notify();
}
RoomEvent::TrackMuted { participant, .. } => {
if let Participant::Remote(participant) = participant {
self.remote_participant(participant).muted = true;
cx.notify();
}
}
RoomEvent::TrackUnmuted { participant, .. } => {
if let Participant::Remote(participant) = participant {
self.remote_participant(participant).muted = false;
cx.notify();
}
}
RoomEvent::ActiveSpeakersChanged { speakers } => {
for (identity, output) in &mut self.remote_participants {
output.speaking = speakers.iter().any(|speaker| {
if let Participant::Remote(speaker) = speaker {
speaker.identity() == *identity
} else {
false
}
});
}
cx.notify();
}
_ => {}
}
cx.notify();
}
fn remote_participant(&mut self, participant: RemoteParticipant) -> &mut ParticipantState {
match self
.remote_participants
.binary_search_by_key(&&participant.identity(), |row| &row.0)
{
Ok(ix) => &mut self.remote_participants[ix].1,
Err(ix) => {
self.remote_participants
.insert(ix, (participant.identity(), ParticipantState::default()));
&mut self.remote_participants[ix].1
}
}
}
fn toggle_mute(&mut self, cx: &mut ViewContext<Self>) {
if let Some(track) = &self.microphone_track {
if track.is_muted() {
track.unmute();
} else {
track.mute();
}
cx.notify();
} else {
let participant = self.room.local_participant();
cx.spawn(|this, mut cx| async move {
let (track, stream) = cx.update(|cx| capture_local_audio_track(cx))??;
let publication = participant
.publish_track(
LocalTrack::Audio(track),
TrackPublishOptions {
source: TrackSource::Microphone,
..Default::default()
},
)
.await
.unwrap();
this.update(&mut cx, |this, cx| {
this.microphone_track = Some(publication);
this.microphone_stream = Some(stream);
cx.notify();
})
})
.detach();
}
}
fn toggle_screen_share(&mut self, cx: &mut ViewContext<Self>) {
if let Some(track) = self.screen_share_track.take() {
self.screen_share_stream.take();
let participant = self.room.local_participant();
cx.background_executor()
.spawn(async move {
participant.unpublish_track(&track.sid()).await.unwrap();
})
.detach();
cx.notify();
} else {
let participant = self.room.local_participant();
let sources = cx.screen_capture_sources();
cx.spawn(|this, mut cx| async move {
let sources = sources.await.unwrap()?;
let source = sources.into_iter().next().unwrap();
let (track, stream) = capture_local_video_track(&*source).await?;
let publication = participant
.publish_track(
LocalTrack::Video(track),
TrackPublishOptions {
source: TrackSource::Screenshare,
video_codec: VideoCodec::H264,
..Default::default()
},
)
.await
.unwrap();
this.update(&mut cx, |this, cx| {
this.screen_share_track = Some(publication);
this.screen_share_stream = Some(stream);
cx.notify();
})
})
.detach();
}
}
fn toggle_remote_audio_for_participant(
&mut self,
identity: &ParticipantIdentity,
cx: &mut ViewContext<Self>,
) -> Option<()> {
let participant = self.remote_participants.iter().find_map(|(id, state)| {
if id == identity {
Some(state)
} else {
None
}
})?;
let publication = &participant.audio_output_stream.as_ref()?.0;
publication.set_enabled(!publication.is_enabled());
cx.notify();
Some(())
}
}
#[cfg(not(windows))]
impl Render for LivekitWindow {
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
fn button() -> gpui::Div {
div()
.w(px(180.0))
.h(px(30.0))
.px_2()
.m_2()
.bg(rgb(0x8888ff))
}
div()
.bg(rgb(0xffffff))
.size_full()
.flex()
.flex_col()
.child(
div().bg(rgb(0xffd4a8)).flex().flex_row().children([
button()
.id("toggle-mute")
.child(if let Some(track) = &self.microphone_track {
if track.is_muted() {
"Unmute"
} else {
"Mute"
}
} else {
"Publish mic"
})
.on_click(cx.listener(|this, _, cx| this.toggle_mute(cx))),
button()
.id("toggle-screen-share")
.child(if self.screen_share_track.is_none() {
"Share screen"
} else {
"Unshare screen"
})
.on_click(cx.listener(|this, _, cx| this.toggle_screen_share(cx))),
]),
)
.child(
div()
.id("remote-participants")
.overflow_y_scroll()
.flex()
.flex_col()
.flex_grow()
.children(self.remote_participants.iter().map(|(identity, state)| {
div()
.h(px(300.0))
.flex()
.flex_col()
.m_2()
.px_2()
.bg(rgb(0x8888ff))
.child(SharedString::from(if state.speaking {
format!("{} (speaking)", &identity.0)
} else if state.muted {
format!("{} (muted)", &identity.0)
} else {
identity.0.clone()
}))
.when_some(state.audio_output_stream.as_ref(), |el, state| {
el.child(
button()
.id(SharedString::from(identity.0.clone()))
.child(if state.0.is_enabled() {
"Deafen"
} else {
"Undeafen"
})
.on_click(cx.listener({
let identity = identity.clone();
move |this, _, cx| {
this.toggle_remote_audio_for_participant(
&identity, cx,
);
}
})),
)
})
.children(state.screen_share_output_view.as_ref().map(|e| e.1.clone()))
})),
)
}
}

View file

@ -1,387 +1,37 @@
#![cfg_attr(target_os = "windows", allow(unused))]
#![allow(clippy::arc_with_non_send_sync)]
mod remote_video_track_view;
#[cfg(any(test, feature = "test-support", target_os = "windows"))]
use std::sync::Arc;
#[cfg(all(target_os = "macos", not(any(test, feature = "test-support"))))]
pub mod prod;
#[cfg(all(target_os = "macos", not(any(test, feature = "test-support"))))]
pub use prod::*;
#[cfg(any(test, feature = "test-support", not(target_os = "macos")))]
pub mod test;
use anyhow::{anyhow, Context as _, Result};
use cpal::{
traits::{DeviceTrait, HostTrait, StreamTrait as _},
StreamConfig,
};
use futures::{io, Stream, StreamExt as _};
use gpui::{AppContext, ScreenCaptureFrame, ScreenCaptureSource, ScreenCaptureStream, Task};
use parking_lot::Mutex;
use std::{borrow::Cow, future::Future, pin::Pin, sync::Arc};
use util::{debug_panic, ResultExt as _, TryFutureExt};
#[cfg(not(target_os = "windows"))]
use webrtc::{
audio_frame::AudioFrame,
audio_source::{native::NativeAudioSource, AudioSourceOptions, RtcAudioSource},
audio_stream::native::NativeAudioStream,
video_frame::{VideoBuffer, VideoFrame, VideoRotation},
video_source::{native::NativeVideoSource, RtcVideoSource, VideoResolution},
video_stream::native::NativeVideoStream,
};
#[cfg(all(not(any(test, feature = "test-support")), not(target_os = "windows")))]
pub use livekit::*;
#[cfg(any(test, feature = "test-support", target_os = "windows"))]
#[cfg(any(test, feature = "test-support", not(target_os = "macos")))]
pub use test::*;
pub use remote_video_track_view::{RemoteVideoTrackView, RemoteVideoTrackViewEvent};
pub type Sid = String;
pub struct AudioStream {
_tasks: [Task<Option<()>>; 2],
#[derive(Clone, Eq, PartialEq)]
pub enum ConnectionState {
Disconnected,
Connected { url: String, token: String },
}
struct Dispatcher(Arc<dyn gpui::PlatformDispatcher>);
#[cfg(not(target_os = "windows"))]
impl livekit::dispatcher::Dispatcher for Dispatcher {
fn dispatch(&self, runnable: livekit::dispatcher::Runnable) {
self.0.dispatch(runnable, None);
}
fn dispatch_after(
&self,
duration: std::time::Duration,
runnable: livekit::dispatcher::Runnable,
) {
self.0.dispatch_after(duration, runnable);
}
}
struct HttpClientAdapter(Arc<dyn http_client::HttpClient>);
fn http_2_status(status: http_client::http::StatusCode) -> http_2::StatusCode {
http_2::StatusCode::from_u16(status.as_u16())
.expect("valid status code to status code conversion")
}
#[cfg(not(target_os = "windows"))]
impl livekit::dispatcher::HttpClient for HttpClientAdapter {
fn get(
&self,
url: &str,
) -> Pin<Box<dyn Future<Output = io::Result<livekit::dispatcher::Response>> + Send>> {
let http_client = self.0.clone();
let url = url.to_string();
Box::pin(async move {
let response = http_client
.get(&url, http_client::AsyncBody::empty(), false)
.await
.map_err(io::Error::other)?;
Ok(livekit::dispatcher::Response {
status: http_2_status(response.status()),
body: Box::pin(response.into_body()),
})
})
}
fn send_async(
&self,
request: http_2::Request<Vec<u8>>,
) -> Pin<Box<dyn Future<Output = io::Result<livekit::dispatcher::Response>> + Send>> {
let http_client = self.0.clone();
let mut builder = http_client::http::Request::builder()
.method(request.method().as_str())
.uri(request.uri().to_string());
for (key, value) in request.headers().iter() {
builder = builder.header(key.as_str(), value.as_bytes());
}
if !request.extensions().is_empty() {
debug_panic!(
"Livekit sent an HTTP request with a protocol extension that Zed doesn't support!"
);
}
let request = builder
.body(http_client::AsyncBody::from_bytes(
request.into_body().into(),
))
.unwrap();
Box::pin(async move {
let response = http_client.send(request).await.map_err(io::Error::other)?;
Ok(livekit::dispatcher::Response {
status: http_2_status(response.status()),
body: Box::pin(response.into_body()),
})
})
}
}
#[cfg(target_os = "windows")]
pub fn init(
dispatcher: Arc<dyn gpui::PlatformDispatcher>,
http_client: Arc<dyn http_client::HttpClient>,
) {
}
#[cfg(not(target_os = "windows"))]
pub fn init(
dispatcher: Arc<dyn gpui::PlatformDispatcher>,
http_client: Arc<dyn http_client::HttpClient>,
) {
livekit::dispatcher::set_dispatcher(Dispatcher(dispatcher));
livekit::dispatcher::set_http_client(HttpClientAdapter(http_client));
}
#[cfg(not(target_os = "windows"))]
pub async fn capture_local_video_track(
capture_source: &dyn ScreenCaptureSource,
) -> Result<(track::LocalVideoTrack, Box<dyn ScreenCaptureStream>)> {
let resolution = capture_source.resolution()?;
let track_source = NativeVideoSource::new(VideoResolution {
width: resolution.width.0 as u32,
height: resolution.height.0 as u32,
});
let capture_stream = capture_source
.stream({
let track_source = track_source.clone();
Box::new(move |frame| {
if let Some(buffer) = video_frame_buffer_to_webrtc(frame) {
track_source.capture_frame(&VideoFrame {
rotation: VideoRotation::VideoRotation0,
timestamp_us: 0,
buffer,
});
}
})
})
.await??;
Ok((
track::LocalVideoTrack::create_video_track(
"screen share",
RtcVideoSource::Native(track_source),
),
capture_stream,
))
}
#[cfg(not(target_os = "windows"))]
pub fn capture_local_audio_track(
cx: &mut AppContext,
) -> Result<(track::LocalAudioTrack, AudioStream)> {
let (frame_tx, mut frame_rx) = futures::channel::mpsc::unbounded();
let sample_rate;
let channels;
let stream;
if cfg!(any(test, feature = "test-support")) {
sample_rate = 1;
channels = 1;
stream = None;
} else {
let device = cpal::default_host()
.default_input_device()
.ok_or_else(|| anyhow!("no audio input device available"))?;
let config = device
.default_input_config()
.context("failed to get default input config")?;
sample_rate = config.sample_rate().0;
channels = config.channels() as u32;
stream = Some(
device
.build_input_stream_raw(
&config.config(),
cpal::SampleFormat::I16,
move |data, _: &_| {
frame_tx
.unbounded_send(AudioFrame {
data: Cow::Owned(data.as_slice::<i16>().unwrap().to_vec()),
sample_rate,
num_channels: channels,
samples_per_channel: data.len() as u32 / channels,
})
.ok();
},
|err| log::error!("error capturing audio track: {:?}", err),
None,
)
.context("failed to build input stream")?,
);
}
let source = NativeAudioSource::new(
AudioSourceOptions {
echo_cancellation: true,
noise_suppression: true,
auto_gain_control: false,
},
sample_rate,
channels,
// TODO livekit: Pull these out of a proto later
100,
);
let stream_task = cx.foreground_executor().spawn(async move {
if let Some(stream) = &stream {
stream.play().log_err();
}
futures::future::pending().await
});
let transmit_task = cx.background_executor().spawn({
let source = source.clone();
async move {
while let Some(frame) = frame_rx.next().await {
source.capture_frame(&frame).await.ok();
}
Some(())
}
});
let track =
track::LocalAudioTrack::create_audio_track("microphone", RtcAudioSource::Native(source));
Ok((
track,
AudioStream {
_tasks: [stream_task, transmit_task],
},
))
}
#[cfg(not(target_os = "windows"))]
pub fn play_remote_audio_track(
track: &track::RemoteAudioTrack,
cx: &mut AppContext,
) -> AudioStream {
let buffer = Arc::new(Mutex::new(Vec::<i16>::new()));
let (stream_config_tx, mut stream_config_rx) = futures::channel::mpsc::unbounded();
// TODO livekit: Pull these out of a proto later
let mut stream = NativeAudioStream::new(track.rtc_track(), 48000, 1);
let receive_task = cx.background_executor().spawn({
let buffer = buffer.clone();
async move {
let mut stream_config = None;
while let Some(frame) = stream.next().await {
let mut buffer = buffer.lock();
let buffer_size = frame.samples_per_channel * frame.num_channels;
debug_assert!(frame.data.len() == buffer_size as usize);
let frame_config = StreamConfig {
channels: frame.num_channels as u16,
sample_rate: cpal::SampleRate(frame.sample_rate),
buffer_size: cpal::BufferSize::Fixed(buffer_size),
};
if stream_config.as_ref().map_or(true, |c| *c != frame_config) {
buffer.resize(buffer_size as usize, 0);
stream_config = Some(frame_config.clone());
stream_config_tx.unbounded_send(frame_config).ok();
}
if frame.data.len() == buffer.len() {
buffer.copy_from_slice(&frame.data);
} else {
buffer.iter_mut().for_each(|x| *x = 0);
}
}
Some(())
}
});
let play_task = cx.foreground_executor().spawn(
{
let buffer = buffer.clone();
async move {
if cfg!(any(test, feature = "test-support")) {
return Err(anyhow!("can't play audio in tests"));
}
let device = cpal::default_host()
.default_output_device()
.ok_or_else(|| anyhow!("no audio output device available"))?;
let mut _output_stream = None;
while let Some(config) = stream_config_rx.next().await {
_output_stream = Some(device.build_output_stream(
&config,
{
let buffer = buffer.clone();
move |data, _info| {
let buffer = buffer.lock();
if data.len() == buffer.len() {
data.copy_from_slice(&buffer);
} else {
data.iter_mut().for_each(|x| *x = 0);
}
}
},
|error| log::error!("error playing audio track: {:?}", error),
None,
)?);
}
Ok(())
}
}
.log_err(),
);
AudioStream {
_tasks: [receive_task, play_task],
}
}
#[cfg(target_os = "windows")]
pub fn play_remote_video_track(
track: &track::RemoteVideoTrack,
) -> impl Stream<Item = ScreenCaptureFrame> {
futures::stream::empty()
}
#[cfg(not(target_os = "windows"))]
pub fn play_remote_video_track(
track: &track::RemoteVideoTrack,
) -> impl Stream<Item = ScreenCaptureFrame> {
NativeVideoStream::new(track.rtc_track())
.filter_map(|frame| async move { video_frame_buffer_from_webrtc(frame.buffer) })
}
#[cfg(target_os = "macos")]
fn video_frame_buffer_from_webrtc(buffer: Box<dyn VideoBuffer>) -> Option<ScreenCaptureFrame> {
use core_foundation::base::TCFType as _;
use media::core_video::CVImageBuffer;
let buffer = buffer.as_native()?;
let pixel_buffer = buffer.get_cv_pixel_buffer();
if pixel_buffer.is_null() {
return None;
}
unsafe {
Some(ScreenCaptureFrame(CVImageBuffer::wrap_under_get_rule(
pixel_buffer as _,
)))
}
}
#[cfg(not(any(target_os = "macos", target_os = "windows")))]
fn video_frame_buffer_from_webrtc(_buffer: Box<dyn VideoBuffer>) -> Option<ScreenCaptureFrame> {
None
}
#[cfg(target_os = "macos")]
fn video_frame_buffer_to_webrtc(frame: ScreenCaptureFrame) -> Option<impl AsRef<dyn VideoBuffer>> {
use core_foundation::base::TCFType as _;
let pixel_buffer = frame.0.as_concrete_TypeRef();
std::mem::forget(frame.0);
unsafe {
Some(webrtc::video_frame::native::NativeBuffer::from_cv_pixel_buffer(pixel_buffer as _))
}
}
#[cfg(not(any(target_os = "macos", target_os = "windows")))]
fn video_frame_buffer_to_webrtc(_frame: ScreenCaptureFrame) -> Option<impl AsRef<dyn VideoBuffer>> {
None as Option<Box<dyn VideoBuffer>>
#[derive(Clone)]
pub enum RoomUpdate {
ActiveSpeakersChanged { speakers: Vec<Sid> },
RemoteAudioTrackMuteChanged { track_id: Sid, muted: bool },
SubscribedToRemoteVideoTrack(Arc<RemoteVideoTrack>),
SubscribedToRemoteAudioTrack(Arc<RemoteAudioTrack>, Arc<RemoteTrackPublication>),
UnsubscribedFromRemoteVideoTrack { publisher_id: Sid, track_id: Sid },
UnsubscribedFromRemoteAudioTrack { publisher_id: Sid, track_id: Sid },
LocalAudioTrackPublished { publication: LocalTrackPublication },
LocalAudioTrackUnpublished { publication: LocalTrackPublication },
LocalVideoTrackPublished { publication: LocalTrackPublication },
LocalVideoTrackUnpublished { publication: LocalTrackPublication },
}

View file

@ -0,0 +1,981 @@
use crate::{ConnectionState, RoomUpdate, Sid};
use anyhow::{anyhow, Context, Result};
use core_foundation::{
array::{CFArray, CFArrayRef},
base::{CFRelease, CFRetain, TCFType},
string::{CFString, CFStringRef},
};
use futures::{
channel::{mpsc, oneshot},
Future,
};
pub use media::core_video::CVImageBuffer;
use media::core_video::CVImageBufferRef;
use parking_lot::Mutex;
use postage::watch;
use std::{
ffi::c_void,
sync::{Arc, Weak},
};
macro_rules! pointer_type {
($pointer_name:ident) => {
#[repr(transparent)]
#[derive(Copy, Clone, Debug)]
pub struct $pointer_name(pub *const std::ffi::c_void);
unsafe impl Send for $pointer_name {}
};
}
mod swift {
pointer_type!(Room);
pointer_type!(LocalAudioTrack);
pointer_type!(RemoteAudioTrack);
pointer_type!(LocalVideoTrack);
pointer_type!(RemoteVideoTrack);
pointer_type!(LocalTrackPublication);
pointer_type!(RemoteTrackPublication);
pointer_type!(MacOSDisplay);
pointer_type!(RoomDelegate);
}
extern "C" {
fn LKRoomDelegateCreate(
callback_data: *mut c_void,
on_did_disconnect: extern "C" fn(callback_data: *mut c_void),
on_did_subscribe_to_remote_audio_track: extern "C" fn(
callback_data: *mut c_void,
publisher_id: CFStringRef,
track_id: CFStringRef,
remote_track: swift::RemoteAudioTrack,
remote_publication: swift::RemoteTrackPublication,
),
on_did_unsubscribe_from_remote_audio_track: extern "C" fn(
callback_data: *mut c_void,
publisher_id: CFStringRef,
track_id: CFStringRef,
),
on_mute_changed_from_remote_audio_track: extern "C" fn(
callback_data: *mut c_void,
track_id: CFStringRef,
muted: bool,
),
on_active_speakers_changed: extern "C" fn(
callback_data: *mut c_void,
participants: CFArrayRef,
),
on_did_subscribe_to_remote_video_track: extern "C" fn(
callback_data: *mut c_void,
publisher_id: CFStringRef,
track_id: CFStringRef,
remote_track: swift::RemoteVideoTrack,
),
on_did_unsubscribe_from_remote_video_track: extern "C" fn(
callback_data: *mut c_void,
publisher_id: CFStringRef,
track_id: CFStringRef,
),
on_did_publish_or_unpublish_local_audio_track: extern "C" fn(
callback_data: *mut c_void,
publication: swift::LocalTrackPublication,
is_published: bool,
),
on_did_publish_or_unpublish_local_video_track: extern "C" fn(
callback_data: *mut c_void,
publication: swift::LocalTrackPublication,
is_published: bool,
),
) -> swift::RoomDelegate;
fn LKRoomCreate(delegate: swift::RoomDelegate) -> swift::Room;
fn LKRoomConnect(
room: swift::Room,
url: CFStringRef,
token: CFStringRef,
callback: extern "C" fn(*mut c_void, CFStringRef),
callback_data: *mut c_void,
);
fn LKRoomDisconnect(room: swift::Room);
fn LKRoomPublishVideoTrack(
room: swift::Room,
track: swift::LocalVideoTrack,
callback: extern "C" fn(*mut c_void, swift::LocalTrackPublication, CFStringRef),
callback_data: *mut c_void,
);
fn LKRoomPublishAudioTrack(
room: swift::Room,
track: swift::LocalAudioTrack,
callback: extern "C" fn(*mut c_void, swift::LocalTrackPublication, CFStringRef),
callback_data: *mut c_void,
);
fn LKRoomUnpublishTrack(room: swift::Room, publication: swift::LocalTrackPublication);
fn LKRoomAudioTracksForRemoteParticipant(
room: swift::Room,
participant_id: CFStringRef,
) -> CFArrayRef;
fn LKRoomAudioTrackPublicationsForRemoteParticipant(
room: swift::Room,
participant_id: CFStringRef,
) -> CFArrayRef;
fn LKRoomVideoTracksForRemoteParticipant(
room: swift::Room,
participant_id: CFStringRef,
) -> CFArrayRef;
fn LKVideoRendererCreate(
callback_data: *mut c_void,
on_frame: extern "C" fn(callback_data: *mut c_void, frame: CVImageBufferRef) -> bool,
on_drop: extern "C" fn(callback_data: *mut c_void),
) -> *const c_void;
fn LKRemoteAudioTrackGetSid(track: swift::RemoteAudioTrack) -> CFStringRef;
fn LKRemoteVideoTrackGetSid(track: swift::RemoteVideoTrack) -> CFStringRef;
fn LKRemoteAudioTrackStart(track: swift::RemoteAudioTrack);
fn LKRemoteAudioTrackStop(track: swift::RemoteAudioTrack);
fn LKVideoTrackAddRenderer(track: swift::RemoteVideoTrack, renderer: *const c_void);
fn LKDisplaySources(
callback_data: *mut c_void,
callback: extern "C" fn(
callback_data: *mut c_void,
sources: CFArrayRef,
error: CFStringRef,
),
);
fn LKCreateScreenShareTrackForDisplay(display: swift::MacOSDisplay) -> swift::LocalVideoTrack;
fn LKLocalAudioTrackCreateTrack() -> swift::LocalAudioTrack;
fn LKLocalTrackPublicationSetMute(
publication: swift::LocalTrackPublication,
muted: bool,
on_complete: extern "C" fn(callback_data: *mut c_void, error: CFStringRef),
callback_data: *mut c_void,
);
fn LKRemoteTrackPublicationSetEnabled(
publication: swift::RemoteTrackPublication,
enabled: bool,
on_complete: extern "C" fn(callback_data: *mut c_void, error: CFStringRef),
callback_data: *mut c_void,
);
fn LKLocalTrackPublicationIsMuted(publication: swift::LocalTrackPublication) -> bool;
fn LKRemoteTrackPublicationIsMuted(publication: swift::RemoteTrackPublication) -> bool;
fn LKLocalTrackPublicationGetSid(publication: swift::LocalTrackPublication) -> CFStringRef;
fn LKRemoteTrackPublicationGetSid(publication: swift::RemoteTrackPublication) -> CFStringRef;
}
pub struct Room {
native_room: swift::Room,
connection: Mutex<(
watch::Sender<ConnectionState>,
watch::Receiver<ConnectionState>,
)>,
update_subscribers: Mutex<Vec<mpsc::UnboundedSender<RoomUpdate>>>,
_delegate: RoomDelegate,
}
impl Room {
pub fn new() -> Arc<Self> {
Arc::new_cyclic(|weak_room| {
let delegate = RoomDelegate::new(weak_room.clone());
Self {
native_room: unsafe { LKRoomCreate(delegate.native_delegate) },
connection: Mutex::new(watch::channel_with(ConnectionState::Disconnected)),
update_subscribers: Default::default(),
_delegate: delegate,
}
})
}
pub fn status(&self) -> watch::Receiver<ConnectionState> {
self.connection.lock().1.clone()
}
pub fn connect(self: &Arc<Self>, url: &str, token: &str) -> impl Future<Output = Result<()>> {
let url = CFString::new(url);
let token = CFString::new(token);
let (did_connect, tx, rx) = Self::build_done_callback();
unsafe {
LKRoomConnect(
self.native_room,
url.as_concrete_TypeRef(),
token.as_concrete_TypeRef(),
did_connect,
tx,
)
}
let this = self.clone();
let url = url.to_string();
let token = token.to_string();
async move {
rx.await.unwrap().context("error connecting to room")?;
*this.connection.lock().0.borrow_mut() = ConnectionState::Connected { url, token };
Ok(())
}
}
fn did_disconnect(&self) {
*self.connection.lock().0.borrow_mut() = ConnectionState::Disconnected;
}
pub fn display_sources(self: &Arc<Self>) -> impl Future<Output = Result<Vec<MacOSDisplay>>> {
extern "C" fn callback(tx: *mut c_void, sources: CFArrayRef, error: CFStringRef) {
unsafe {
let tx = Box::from_raw(tx as *mut oneshot::Sender<Result<Vec<MacOSDisplay>>>);
if sources.is_null() {
let _ = tx.send(Err(anyhow!("{}", CFString::wrap_under_get_rule(error))));
} else {
let sources = CFArray::wrap_under_get_rule(sources)
.into_iter()
.map(|source| MacOSDisplay::new(swift::MacOSDisplay(*source)))
.collect();
let _ = tx.send(Ok(sources));
}
}
}
let (tx, rx) = oneshot::channel();
unsafe {
LKDisplaySources(Box::into_raw(Box::new(tx)) as *mut _, callback);
}
async move { rx.await.unwrap() }
}
pub fn publish_video_track(
self: &Arc<Self>,
track: LocalVideoTrack,
) -> impl Future<Output = Result<LocalTrackPublication>> {
let (tx, rx) = oneshot::channel::<Result<LocalTrackPublication>>();
extern "C" fn callback(
tx: *mut c_void,
publication: swift::LocalTrackPublication,
error: CFStringRef,
) {
let tx =
unsafe { Box::from_raw(tx as *mut oneshot::Sender<Result<LocalTrackPublication>>) };
if error.is_null() {
let _ = tx.send(Ok(LocalTrackPublication::new(publication)));
} else {
let error = unsafe { CFString::wrap_under_get_rule(error).to_string() };
let _ = tx.send(Err(anyhow!(error)));
}
}
unsafe {
LKRoomPublishVideoTrack(
self.native_room,
track.0,
callback,
Box::into_raw(Box::new(tx)) as *mut c_void,
);
}
async { rx.await.unwrap().context("error publishing video track") }
}
pub fn publish_audio_track(
self: &Arc<Self>,
track: LocalAudioTrack,
) -> impl Future<Output = Result<LocalTrackPublication>> {
let (tx, rx) = oneshot::channel::<Result<LocalTrackPublication>>();
extern "C" fn callback(
tx: *mut c_void,
publication: swift::LocalTrackPublication,
error: CFStringRef,
) {
let tx =
unsafe { Box::from_raw(tx as *mut oneshot::Sender<Result<LocalTrackPublication>>) };
if error.is_null() {
let _ = tx.send(Ok(LocalTrackPublication::new(publication)));
} else {
let error = unsafe { CFString::wrap_under_get_rule(error).to_string() };
let _ = tx.send(Err(anyhow!(error)));
}
}
unsafe {
LKRoomPublishAudioTrack(
self.native_room,
track.0,
callback,
Box::into_raw(Box::new(tx)) as *mut c_void,
);
}
async { rx.await.unwrap().context("error publishing audio track") }
}
pub fn unpublish_track(&self, publication: LocalTrackPublication) {
unsafe {
LKRoomUnpublishTrack(self.native_room, publication.0);
}
}
pub fn remote_video_tracks(&self, participant_id: &str) -> Vec<Arc<RemoteVideoTrack>> {
unsafe {
let tracks = LKRoomVideoTracksForRemoteParticipant(
self.native_room,
CFString::new(participant_id).as_concrete_TypeRef(),
);
if tracks.is_null() {
Vec::new()
} else {
let tracks = CFArray::wrap_under_get_rule(tracks);
tracks
.into_iter()
.map(|native_track| {
let native_track = swift::RemoteVideoTrack(*native_track);
let id =
CFString::wrap_under_get_rule(LKRemoteVideoTrackGetSid(native_track))
.to_string();
Arc::new(RemoteVideoTrack::new(
native_track,
id,
participant_id.into(),
))
})
.collect()
}
}
}
pub fn remote_audio_tracks(&self, participant_id: &str) -> Vec<Arc<RemoteAudioTrack>> {
unsafe {
let tracks = LKRoomAudioTracksForRemoteParticipant(
self.native_room,
CFString::new(participant_id).as_concrete_TypeRef(),
);
if tracks.is_null() {
Vec::new()
} else {
let tracks = CFArray::wrap_under_get_rule(tracks);
tracks
.into_iter()
.map(|native_track| {
let native_track = swift::RemoteAudioTrack(*native_track);
let id =
CFString::wrap_under_get_rule(LKRemoteAudioTrackGetSid(native_track))
.to_string();
Arc::new(RemoteAudioTrack::new(
native_track,
id,
participant_id.into(),
))
})
.collect()
}
}
}
pub fn remote_audio_track_publications(
&self,
participant_id: &str,
) -> Vec<Arc<RemoteTrackPublication>> {
unsafe {
let tracks = LKRoomAudioTrackPublicationsForRemoteParticipant(
self.native_room,
CFString::new(participant_id).as_concrete_TypeRef(),
);
if tracks.is_null() {
Vec::new()
} else {
let tracks = CFArray::wrap_under_get_rule(tracks);
tracks
.into_iter()
.map(|native_track_publication| {
let native_track_publication =
swift::RemoteTrackPublication(*native_track_publication);
Arc::new(RemoteTrackPublication::new(native_track_publication))
})
.collect()
}
}
}
pub fn updates(&self) -> mpsc::UnboundedReceiver<RoomUpdate> {
let (tx, rx) = mpsc::unbounded();
self.update_subscribers.lock().push(tx);
rx
}
fn did_subscribe_to_remote_audio_track(
&self,
track: RemoteAudioTrack,
publication: RemoteTrackPublication,
) {
let track = Arc::new(track);
let publication = Arc::new(publication);
self.update_subscribers.lock().retain(|tx| {
tx.unbounded_send(RoomUpdate::SubscribedToRemoteAudioTrack(
track.clone(),
publication.clone(),
))
.is_ok()
});
}
fn did_unsubscribe_from_remote_audio_track(&self, publisher_id: String, track_id: String) {
self.update_subscribers.lock().retain(|tx| {
tx.unbounded_send(RoomUpdate::UnsubscribedFromRemoteAudioTrack {
publisher_id: publisher_id.clone(),
track_id: track_id.clone(),
})
.is_ok()
});
}
fn mute_changed_from_remote_audio_track(&self, track_id: String, muted: bool) {
self.update_subscribers.lock().retain(|tx| {
tx.unbounded_send(RoomUpdate::RemoteAudioTrackMuteChanged {
track_id: track_id.clone(),
muted,
})
.is_ok()
});
}
fn active_speakers_changed(&self, speakers: Vec<String>) {
self.update_subscribers.lock().retain(move |tx| {
tx.unbounded_send(RoomUpdate::ActiveSpeakersChanged {
speakers: speakers.clone(),
})
.is_ok()
});
}
fn did_subscribe_to_remote_video_track(&self, track: RemoteVideoTrack) {
let track = Arc::new(track);
self.update_subscribers.lock().retain(|tx| {
tx.unbounded_send(RoomUpdate::SubscribedToRemoteVideoTrack(track.clone()))
.is_ok()
});
}
fn did_unsubscribe_from_remote_video_track(&self, publisher_id: String, track_id: String) {
self.update_subscribers.lock().retain(|tx| {
tx.unbounded_send(RoomUpdate::UnsubscribedFromRemoteVideoTrack {
publisher_id: publisher_id.clone(),
track_id: track_id.clone(),
})
.is_ok()
});
}
fn build_done_callback() -> (
extern "C" fn(*mut c_void, CFStringRef),
*mut c_void,
oneshot::Receiver<Result<()>>,
) {
let (tx, rx) = oneshot::channel();
extern "C" fn done_callback(tx: *mut c_void, error: CFStringRef) {
let tx = unsafe { Box::from_raw(tx as *mut oneshot::Sender<Result<()>>) };
if error.is_null() {
let _ = tx.send(Ok(()));
} else {
let error = unsafe { CFString::wrap_under_get_rule(error).to_string() };
let _ = tx.send(Err(anyhow!(error)));
}
}
(
done_callback,
Box::into_raw(Box::new(tx)) as *mut c_void,
rx,
)
}
pub fn set_display_sources(&self, _: Vec<MacOSDisplay>) {
unreachable!("This is a test-only function")
}
}
impl Drop for Room {
fn drop(&mut self) {
unsafe {
LKRoomDisconnect(self.native_room);
CFRelease(self.native_room.0);
}
}
}
struct RoomDelegate {
native_delegate: swift::RoomDelegate,
weak_room: *mut c_void,
}
impl RoomDelegate {
fn new(weak_room: Weak<Room>) -> Self {
let weak_room = weak_room.into_raw() as *mut c_void;
let native_delegate = unsafe {
LKRoomDelegateCreate(
weak_room,
Self::on_did_disconnect,
Self::on_did_subscribe_to_remote_audio_track,
Self::on_did_unsubscribe_from_remote_audio_track,
Self::on_mute_change_from_remote_audio_track,
Self::on_active_speakers_changed,
Self::on_did_subscribe_to_remote_video_track,
Self::on_did_unsubscribe_from_remote_video_track,
Self::on_did_publish_or_unpublish_local_audio_track,
Self::on_did_publish_or_unpublish_local_video_track,
)
};
Self {
native_delegate,
weak_room,
}
}
extern "C" fn on_did_disconnect(room: *mut c_void) {
let room = unsafe { Weak::from_raw(room as *mut Room) };
if let Some(room) = room.upgrade() {
room.did_disconnect();
}
let _ = Weak::into_raw(room);
}
extern "C" fn on_did_subscribe_to_remote_audio_track(
room: *mut c_void,
publisher_id: CFStringRef,
track_id: CFStringRef,
track: swift::RemoteAudioTrack,
publication: swift::RemoteTrackPublication,
) {
let room = unsafe { Weak::from_raw(room as *mut Room) };
let publisher_id = unsafe { CFString::wrap_under_get_rule(publisher_id).to_string() };
let track_id = unsafe { CFString::wrap_under_get_rule(track_id).to_string() };
let track = RemoteAudioTrack::new(track, track_id, publisher_id);
let publication = RemoteTrackPublication::new(publication);
if let Some(room) = room.upgrade() {
room.did_subscribe_to_remote_audio_track(track, publication);
}
let _ = Weak::into_raw(room);
}
extern "C" fn on_did_unsubscribe_from_remote_audio_track(
room: *mut c_void,
publisher_id: CFStringRef,
track_id: CFStringRef,
) {
let room = unsafe { Weak::from_raw(room as *mut Room) };
let publisher_id = unsafe { CFString::wrap_under_get_rule(publisher_id).to_string() };
let track_id = unsafe { CFString::wrap_under_get_rule(track_id).to_string() };
if let Some(room) = room.upgrade() {
room.did_unsubscribe_from_remote_audio_track(publisher_id, track_id);
}
let _ = Weak::into_raw(room);
}
extern "C" fn on_mute_change_from_remote_audio_track(
room: *mut c_void,
track_id: CFStringRef,
muted: bool,
) {
let room = unsafe { Weak::from_raw(room as *mut Room) };
let track_id = unsafe { CFString::wrap_under_get_rule(track_id).to_string() };
if let Some(room) = room.upgrade() {
room.mute_changed_from_remote_audio_track(track_id, muted);
}
let _ = Weak::into_raw(room);
}
extern "C" fn on_active_speakers_changed(room: *mut c_void, participants: CFArrayRef) {
if participants.is_null() {
return;
}
let room = unsafe { Weak::from_raw(room as *mut Room) };
let speakers = unsafe {
CFArray::wrap_under_get_rule(participants)
.into_iter()
.map(
|speaker: core_foundation::base::ItemRef<'_, *const c_void>| {
CFString::wrap_under_get_rule(*speaker as CFStringRef).to_string()
},
)
.collect()
};
if let Some(room) = room.upgrade() {
room.active_speakers_changed(speakers);
}
let _ = Weak::into_raw(room);
}
extern "C" fn on_did_subscribe_to_remote_video_track(
room: *mut c_void,
publisher_id: CFStringRef,
track_id: CFStringRef,
track: swift::RemoteVideoTrack,
) {
let room = unsafe { Weak::from_raw(room as *mut Room) };
let publisher_id = unsafe { CFString::wrap_under_get_rule(publisher_id).to_string() };
let track_id = unsafe { CFString::wrap_under_get_rule(track_id).to_string() };
let track = RemoteVideoTrack::new(track, track_id, publisher_id);
if let Some(room) = room.upgrade() {
room.did_subscribe_to_remote_video_track(track);
}
let _ = Weak::into_raw(room);
}
extern "C" fn on_did_unsubscribe_from_remote_video_track(
room: *mut c_void,
publisher_id: CFStringRef,
track_id: CFStringRef,
) {
let room = unsafe { Weak::from_raw(room as *mut Room) };
let publisher_id = unsafe { CFString::wrap_under_get_rule(publisher_id).to_string() };
let track_id = unsafe { CFString::wrap_under_get_rule(track_id).to_string() };
if let Some(room) = room.upgrade() {
room.did_unsubscribe_from_remote_video_track(publisher_id, track_id);
}
let _ = Weak::into_raw(room);
}
extern "C" fn on_did_publish_or_unpublish_local_audio_track(
room: *mut c_void,
publication: swift::LocalTrackPublication,
is_published: bool,
) {
let room = unsafe { Weak::from_raw(room as *mut Room) };
if let Some(room) = room.upgrade() {
let publication = LocalTrackPublication::new(publication);
let update = if is_published {
RoomUpdate::LocalAudioTrackPublished { publication }
} else {
RoomUpdate::LocalAudioTrackUnpublished { publication }
};
room.update_subscribers
.lock()
.retain(|tx| tx.unbounded_send(update.clone()).is_ok());
}
let _ = Weak::into_raw(room);
}
extern "C" fn on_did_publish_or_unpublish_local_video_track(
room: *mut c_void,
publication: swift::LocalTrackPublication,
is_published: bool,
) {
let room = unsafe { Weak::from_raw(room as *mut Room) };
if let Some(room) = room.upgrade() {
let publication = LocalTrackPublication::new(publication);
let update = if is_published {
RoomUpdate::LocalVideoTrackPublished { publication }
} else {
RoomUpdate::LocalVideoTrackUnpublished { publication }
};
room.update_subscribers
.lock()
.retain(|tx| tx.unbounded_send(update.clone()).is_ok());
}
let _ = Weak::into_raw(room);
}
}
impl Drop for RoomDelegate {
fn drop(&mut self) {
unsafe {
CFRelease(self.native_delegate.0);
let _ = Weak::from_raw(self.weak_room as *mut Room);
}
}
}
pub struct LocalAudioTrack(swift::LocalAudioTrack);
impl LocalAudioTrack {
pub fn create() -> Self {
Self(unsafe { LKLocalAudioTrackCreateTrack() })
}
}
impl Drop for LocalAudioTrack {
fn drop(&mut self) {
unsafe { CFRelease(self.0 .0) }
}
}
pub struct LocalVideoTrack(swift::LocalVideoTrack);
impl LocalVideoTrack {
pub fn screen_share_for_display(display: &MacOSDisplay) -> Self {
Self(unsafe { LKCreateScreenShareTrackForDisplay(display.0) })
}
}
impl Drop for LocalVideoTrack {
fn drop(&mut self) {
unsafe { CFRelease(self.0 .0) }
}
}
pub struct LocalTrackPublication(swift::LocalTrackPublication);
impl LocalTrackPublication {
pub fn new(native_track_publication: swift::LocalTrackPublication) -> Self {
unsafe {
CFRetain(native_track_publication.0);
}
Self(native_track_publication)
}
pub fn sid(&self) -> String {
unsafe { CFString::wrap_under_get_rule(LKLocalTrackPublicationGetSid(self.0)).to_string() }
}
pub fn set_mute(&self, muted: bool) -> impl Future<Output = Result<()>> {
let (tx, rx) = futures::channel::oneshot::channel();
extern "C" fn complete_callback(callback_data: *mut c_void, error: CFStringRef) {
let tx = unsafe { Box::from_raw(callback_data as *mut oneshot::Sender<Result<()>>) };
if error.is_null() {
tx.send(Ok(())).ok();
} else {
let error = unsafe { CFString::wrap_under_get_rule(error).to_string() };
tx.send(Err(anyhow!(error))).ok();
}
}
unsafe {
LKLocalTrackPublicationSetMute(
self.0,
muted,
complete_callback,
Box::into_raw(Box::new(tx)) as *mut c_void,
)
}
async move { rx.await.unwrap() }
}
pub fn is_muted(&self) -> bool {
unsafe { LKLocalTrackPublicationIsMuted(self.0) }
}
}
impl Clone for LocalTrackPublication {
fn clone(&self) -> Self {
unsafe {
CFRetain(self.0 .0);
}
Self(self.0)
}
}
impl Drop for LocalTrackPublication {
fn drop(&mut self) {
unsafe { CFRelease(self.0 .0) }
}
}
pub struct RemoteTrackPublication(swift::RemoteTrackPublication);
impl RemoteTrackPublication {
pub fn new(native_track_publication: swift::RemoteTrackPublication) -> Self {
unsafe {
CFRetain(native_track_publication.0);
}
Self(native_track_publication)
}
pub fn sid(&self) -> String {
unsafe { CFString::wrap_under_get_rule(LKRemoteTrackPublicationGetSid(self.0)).to_string() }
}
pub fn is_muted(&self) -> bool {
unsafe { LKRemoteTrackPublicationIsMuted(self.0) }
}
pub fn set_enabled(&self, enabled: bool) -> impl Future<Output = Result<()>> {
let (tx, rx) = futures::channel::oneshot::channel();
extern "C" fn complete_callback(callback_data: *mut c_void, error: CFStringRef) {
let tx = unsafe { Box::from_raw(callback_data as *mut oneshot::Sender<Result<()>>) };
if error.is_null() {
tx.send(Ok(())).ok();
} else {
let error = unsafe { CFString::wrap_under_get_rule(error).to_string() };
tx.send(Err(anyhow!(error))).ok();
}
}
unsafe {
LKRemoteTrackPublicationSetEnabled(
self.0,
enabled,
complete_callback,
Box::into_raw(Box::new(tx)) as *mut c_void,
)
}
async move { rx.await.unwrap() }
}
}
impl Drop for RemoteTrackPublication {
fn drop(&mut self) {
unsafe { CFRelease(self.0 .0) }
}
}
#[derive(Debug)]
pub struct RemoteAudioTrack {
native_track: swift::RemoteAudioTrack,
sid: Sid,
publisher_id: String,
}
impl RemoteAudioTrack {
fn new(native_track: swift::RemoteAudioTrack, sid: Sid, publisher_id: String) -> Self {
unsafe {
CFRetain(native_track.0);
}
Self {
native_track,
sid,
publisher_id,
}
}
pub fn sid(&self) -> &str {
&self.sid
}
pub fn publisher_id(&self) -> &str {
&self.publisher_id
}
pub fn start(&self) {
unsafe { LKRemoteAudioTrackStart(self.native_track) }
}
pub fn stop(&self) {
unsafe { LKRemoteAudioTrackStop(self.native_track) }
}
}
impl Drop for RemoteAudioTrack {
fn drop(&mut self) {
// todo: uncomment this `CFRelease`, unless we find that it was causing
// the crash in the `livekit.multicast` thread.
//
// unsafe { CFRelease(self.native_track.0) }
let _ = self.native_track;
}
}
#[derive(Debug)]
pub struct RemoteVideoTrack {
native_track: swift::RemoteVideoTrack,
sid: Sid,
publisher_id: String,
}
impl RemoteVideoTrack {
fn new(native_track: swift::RemoteVideoTrack, sid: Sid, publisher_id: String) -> Self {
unsafe {
CFRetain(native_track.0);
}
Self {
native_track,
sid,
publisher_id,
}
}
pub fn sid(&self) -> &str {
&self.sid
}
pub fn publisher_id(&self) -> &str {
&self.publisher_id
}
pub fn frames(&self) -> async_broadcast::Receiver<Frame> {
extern "C" fn on_frame(callback_data: *mut c_void, frame: CVImageBufferRef) -> bool {
unsafe {
let tx = Box::from_raw(callback_data as *mut async_broadcast::Sender<Frame>);
let buffer = CVImageBuffer::wrap_under_get_rule(frame);
let result = tx.try_broadcast(Frame(buffer));
let _ = Box::into_raw(tx);
match result {
Ok(_) => true,
Err(async_broadcast::TrySendError::Closed(_))
| Err(async_broadcast::TrySendError::Inactive(_)) => {
log::warn!("no active receiver for frame");
false
}
Err(async_broadcast::TrySendError::Full(_)) => {
log::warn!("skipping frame as receiver is not keeping up");
true
}
}
}
}
extern "C" fn on_drop(callback_data: *mut c_void) {
unsafe {
let _ = Box::from_raw(callback_data as *mut async_broadcast::Sender<Frame>);
}
}
let (tx, rx) = async_broadcast::broadcast(64);
unsafe {
let renderer = LKVideoRendererCreate(
Box::into_raw(Box::new(tx)) as *mut c_void,
on_frame,
on_drop,
);
LKVideoTrackAddRenderer(self.native_track, renderer);
rx
}
}
}
impl Drop for RemoteVideoTrack {
fn drop(&mut self) {
unsafe { CFRelease(self.native_track.0) }
}
}
pub struct MacOSDisplay(swift::MacOSDisplay);
impl MacOSDisplay {
fn new(ptr: swift::MacOSDisplay) -> Self {
unsafe {
CFRetain(ptr.0);
}
Self(ptr)
}
}
impl Drop for MacOSDisplay {
fn drop(&mut self) {
unsafe { CFRelease(self.0 .0) }
}
}
#[derive(Clone)]
pub struct Frame(CVImageBuffer);
impl Frame {
pub fn width(&self) -> usize {
self.0.width()
}
pub fn height(&self) -> usize {
self.0.height()
}
pub fn image(&self) -> CVImageBuffer {
self.0.clone()
}
}

View file

@ -1,61 +0,0 @@
use crate::track::RemoteVideoTrack;
use anyhow::Result;
use futures::StreamExt as _;
use gpui::{
Empty, EventEmitter, IntoElement, Render, ScreenCaptureFrame, Task, View, ViewContext,
VisualContext as _,
};
pub struct RemoteVideoTrackView {
track: RemoteVideoTrack,
frame: Option<ScreenCaptureFrame>,
_maintain_frame: Task<Result<()>>,
}
#[derive(Debug)]
pub enum RemoteVideoTrackViewEvent {
Close,
}
impl RemoteVideoTrackView {
pub fn new(track: RemoteVideoTrack, cx: &mut ViewContext<Self>) -> Self {
cx.focus_handle();
let frames = super::play_remote_video_track(&track);
Self {
track,
frame: None,
_maintain_frame: cx.spawn(|this, mut cx| async move {
futures::pin_mut!(frames);
while let Some(frame) = frames.next().await {
this.update(&mut cx, |this, cx| {
this.frame = Some(frame);
cx.notify();
})?;
}
this.update(&mut cx, |_, cx| cx.emit(RemoteVideoTrackViewEvent::Close))?;
Ok(())
}),
}
}
pub fn clone(&self, cx: &mut ViewContext<Self>) -> View<Self> {
cx.new_view(|cx| Self::new(self.track.clone(), cx))
}
}
impl EventEmitter<RemoteVideoTrackViewEvent> for RemoteVideoTrackView {}
impl Render for RemoteVideoTrackView {
fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
#[cfg(target_os = "macos")]
if let Some(frame) = &self.frame {
use gpui::Styled as _;
return gpui::surface(frame.0.clone())
.size_full()
.into_any_element();
}
Empty.into_any_element()
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,111 +0,0 @@
use super::*;
#[derive(Clone, Debug)]
pub enum Participant {
Local(LocalParticipant),
Remote(RemoteParticipant),
}
#[derive(Clone, Debug)]
pub struct LocalParticipant {
#[cfg(not(target_os = "windows"))]
pub(super) identity: ParticipantIdentity,
pub(super) room: Room,
}
#[derive(Clone, Debug)]
pub struct RemoteParticipant {
#[cfg(not(target_os = "windows"))]
pub(super) identity: ParticipantIdentity,
pub(super) room: WeakRoom,
}
#[cfg(not(target_os = "windows"))]
impl Participant {
pub fn identity(&self) -> ParticipantIdentity {
match self {
Participant::Local(participant) => participant.identity.clone(),
Participant::Remote(participant) => participant.identity.clone(),
}
}
}
#[cfg(not(target_os = "windows"))]
impl LocalParticipant {
pub async fn unpublish_track(&self, track: &TrackSid) -> Result<()> {
self.room
.test_server()
.unpublish_track(self.room.token(), track)
.await
}
pub async fn publish_track(
&self,
track: LocalTrack,
_options: TrackPublishOptions,
) -> Result<LocalTrackPublication> {
let this = self.clone();
let track = track.clone();
let server = this.room.test_server();
let sid = match track {
LocalTrack::Video(track) => {
server.publish_video_track(this.room.token(), track).await?
}
LocalTrack::Audio(track) => {
server
.publish_audio_track(this.room.token(), &track)
.await?
}
};
Ok(LocalTrackPublication {
room: self.room.downgrade(),
sid,
})
}
}
#[cfg(not(target_os = "windows"))]
impl RemoteParticipant {
pub fn track_publications(&self) -> HashMap<TrackSid, RemoteTrackPublication> {
if let Some(room) = self.room.upgrade() {
let server = room.test_server();
let audio = server
.audio_tracks(room.token())
.unwrap()
.into_iter()
.filter(|track| track.publisher_id() == self.identity)
.map(|track| {
(
track.sid(),
RemoteTrackPublication {
sid: track.sid(),
room: self.room.clone(),
track: RemoteTrack::Audio(track),
},
)
});
let video = server
.video_tracks(room.token())
.unwrap()
.into_iter()
.filter(|track| track.publisher_id() == self.identity)
.map(|track| {
(
track.sid(),
RemoteTrackPublication {
sid: track.sid(),
room: self.room.clone(),
track: RemoteTrack::Video(track),
},
)
});
audio.chain(video).collect()
} else {
HashMap::default()
}
}
pub fn identity(&self) -> ParticipantIdentity {
self.identity.clone()
}
}

View file

@ -1,116 +0,0 @@
use super::*;
#[derive(Clone, Debug)]
pub enum TrackPublication {
Local(LocalTrackPublication),
Remote(RemoteTrackPublication),
}
#[derive(Clone, Debug)]
pub struct LocalTrackPublication {
#[cfg(not(target_os = "windows"))]
pub(crate) sid: TrackSid,
pub(crate) room: WeakRoom,
}
#[derive(Clone, Debug)]
pub struct RemoteTrackPublication {
#[cfg(not(target_os = "windows"))]
pub(crate) sid: TrackSid,
pub(crate) room: WeakRoom,
pub(crate) track: RemoteTrack,
}
#[cfg(not(target_os = "windows"))]
impl TrackPublication {
pub fn sid(&self) -> TrackSid {
match self {
TrackPublication::Local(track) => track.sid(),
TrackPublication::Remote(track) => track.sid(),
}
}
pub fn is_muted(&self) -> bool {
match self {
TrackPublication::Local(track) => track.is_muted(),
TrackPublication::Remote(track) => track.is_muted(),
}
}
}
#[cfg(not(target_os = "windows"))]
impl LocalTrackPublication {
pub fn sid(&self) -> TrackSid {
self.sid.clone()
}
pub fn mute(&self) {
self.set_mute(true)
}
pub fn unmute(&self) {
self.set_mute(false)
}
fn set_mute(&self, mute: bool) {
if let Some(room) = self.room.upgrade() {
room.test_server()
.set_track_muted(&room.token(), &self.sid, mute)
.ok();
}
}
pub fn is_muted(&self) -> bool {
if let Some(room) = self.room.upgrade() {
room.test_server()
.is_track_muted(&room.token(), &self.sid)
.unwrap_or(false)
} else {
false
}
}
}
#[cfg(not(target_os = "windows"))]
impl RemoteTrackPublication {
pub fn sid(&self) -> TrackSid {
self.sid.clone()
}
pub fn track(&self) -> Option<RemoteTrack> {
Some(self.track.clone())
}
pub fn kind(&self) -> TrackKind {
self.track.kind()
}
pub fn is_muted(&self) -> bool {
if let Some(room) = self.room.upgrade() {
room.test_server()
.is_track_muted(&room.token(), &self.sid)
.unwrap_or(false)
} else {
false
}
}
pub fn is_enabled(&self) -> bool {
if let Some(room) = self.room.upgrade() {
!room.0.lock().paused_audio_tracks.contains(&self.sid)
} else {
false
}
}
pub fn set_enabled(&self, enabled: bool) {
if let Some(room) = self.room.upgrade() {
let paused_audio_tracks = &mut room.0.lock().paused_audio_tracks;
if enabled {
paused_audio_tracks.remove(&self.sid);
} else {
paused_audio_tracks.insert(self.sid.clone());
}
}
}
}

View file

@ -1,201 +0,0 @@
use super::*;
#[cfg(not(windows))]
use webrtc::{audio_source::RtcAudioSource, video_source::RtcVideoSource};
#[cfg(not(windows))]
pub use livekit::track::{TrackKind, TrackSource};
#[derive(Clone, Debug)]
pub enum LocalTrack {
Audio(LocalAudioTrack),
Video(LocalVideoTrack),
}
#[derive(Clone, Debug)]
pub enum RemoteTrack {
Audio(RemoteAudioTrack),
Video(RemoteVideoTrack),
}
#[derive(Clone, Debug)]
pub struct LocalVideoTrack {}
#[derive(Clone, Debug)]
pub struct LocalAudioTrack {}
#[derive(Clone, Debug)]
pub struct RemoteVideoTrack {
#[cfg(not(target_os = "windows"))]
pub(super) server_track: Arc<TestServerVideoTrack>,
pub(super) _room: WeakRoom,
}
#[derive(Clone, Debug)]
pub struct RemoteAudioTrack {
#[cfg(not(target_os = "windows"))]
pub(super) server_track: Arc<TestServerAudioTrack>,
pub(super) room: WeakRoom,
}
pub enum RtcTrack {
Audio(RtcAudioTrack),
Video(RtcVideoTrack),
}
pub struct RtcAudioTrack {
#[cfg(not(target_os = "windows"))]
pub(super) server_track: Arc<TestServerAudioTrack>,
pub(super) room: WeakRoom,
}
pub struct RtcVideoTrack {
#[cfg(not(target_os = "windows"))]
pub(super) _server_track: Arc<TestServerVideoTrack>,
}
#[cfg(not(target_os = "windows"))]
impl RemoteTrack {
pub fn sid(&self) -> TrackSid {
match self {
RemoteTrack::Audio(track) => track.sid(),
RemoteTrack::Video(track) => track.sid(),
}
}
pub fn kind(&self) -> TrackKind {
match self {
RemoteTrack::Audio(_) => TrackKind::Audio,
RemoteTrack::Video(_) => TrackKind::Video,
}
}
pub fn publisher_id(&self) -> ParticipantIdentity {
match self {
RemoteTrack::Audio(track) => track.publisher_id(),
RemoteTrack::Video(track) => track.publisher_id(),
}
}
pub fn rtc_track(&self) -> RtcTrack {
match self {
RemoteTrack::Audio(track) => RtcTrack::Audio(track.rtc_track()),
RemoteTrack::Video(track) => RtcTrack::Video(track.rtc_track()),
}
}
}
#[cfg(not(windows))]
impl LocalVideoTrack {
pub fn create_video_track(_name: &str, _source: RtcVideoSource) -> Self {
Self {}
}
}
#[cfg(not(windows))]
impl LocalAudioTrack {
pub fn create_audio_track(_name: &str, _source: RtcAudioSource) -> Self {
Self {}
}
}
#[cfg(not(target_os = "windows"))]
impl RemoteAudioTrack {
pub fn sid(&self) -> TrackSid {
self.server_track.sid.clone()
}
pub fn publisher_id(&self) -> ParticipantIdentity {
self.server_track.publisher_id.clone()
}
pub fn start(&self) {
if let Some(room) = self.room.upgrade() {
room.0
.lock()
.paused_audio_tracks
.remove(&self.server_track.sid);
}
}
pub fn stop(&self) {
if let Some(room) = self.room.upgrade() {
room.0
.lock()
.paused_audio_tracks
.insert(self.server_track.sid.clone());
}
}
pub fn rtc_track(&self) -> RtcAudioTrack {
RtcAudioTrack {
server_track: self.server_track.clone(),
room: self.room.clone(),
}
}
}
#[cfg(not(target_os = "windows"))]
impl RemoteVideoTrack {
pub fn sid(&self) -> TrackSid {
self.server_track.sid.clone()
}
pub fn publisher_id(&self) -> ParticipantIdentity {
self.server_track.publisher_id.clone()
}
pub fn rtc_track(&self) -> RtcVideoTrack {
RtcVideoTrack {
_server_track: self.server_track.clone(),
}
}
}
#[cfg(not(target_os = "windows"))]
impl RtcTrack {
pub fn enabled(&self) -> bool {
match self {
RtcTrack::Audio(track) => track.enabled(),
RtcTrack::Video(track) => track.enabled(),
}
}
pub fn set_enabled(&self, enabled: bool) {
match self {
RtcTrack::Audio(track) => track.set_enabled(enabled),
RtcTrack::Video(_) => {}
}
}
}
#[cfg(not(target_os = "windows"))]
impl RtcAudioTrack {
pub fn set_enabled(&self, enabled: bool) {
if let Some(room) = self.room.upgrade() {
let paused_audio_tracks = &mut room.0.lock().paused_audio_tracks;
if enabled {
paused_audio_tracks.remove(&self.server_track.sid);
} else {
paused_audio_tracks.insert(self.server_track.sid.clone());
}
}
}
pub fn enabled(&self) -> bool {
if let Some(room) = self.room.upgrade() {
!room
.0
.lock()
.paused_audio_tracks
.contains(&self.server_track.sid)
} else {
false
}
}
}
impl RtcVideoTrack {
pub fn enabled(&self) -> bool {
true
}
}

View file

@ -1,136 +0,0 @@
use super::track::{RtcAudioTrack, RtcVideoTrack};
use futures::Stream;
use livekit::webrtc as real;
use std::{
pin::Pin,
task::{Context, Poll},
};
pub mod video_stream {
use super::*;
pub mod native {
use super::*;
use real::video_frame::BoxVideoFrame;
pub struct NativeVideoStream {
pub track: RtcVideoTrack,
}
impl NativeVideoStream {
pub fn new(track: RtcVideoTrack) -> Self {
Self { track }
}
}
impl Stream for NativeVideoStream {
type Item = BoxVideoFrame;
fn poll_next(self: Pin<&mut Self>, _cx: &mut Context) -> Poll<Option<Self::Item>> {
Poll::Pending
}
}
}
}
pub mod audio_stream {
use super::*;
pub mod native {
use super::*;
use real::audio_frame::AudioFrame;
pub struct NativeAudioStream {
pub track: RtcAudioTrack,
}
impl NativeAudioStream {
pub fn new(track: RtcAudioTrack, _sample_rate: i32, _num_channels: i32) -> Self {
Self { track }
}
}
impl Stream for NativeAudioStream {
type Item = AudioFrame<'static>;
fn poll_next(self: Pin<&mut Self>, _cx: &mut Context) -> Poll<Option<Self::Item>> {
Poll::Pending
}
}
}
}
pub mod audio_source {
use super::*;
pub use real::audio_source::AudioSourceOptions;
pub mod native {
use std::sync::Arc;
use super::*;
use real::{audio_frame::AudioFrame, RtcError};
#[derive(Clone)]
pub struct NativeAudioSource {
pub options: Arc<AudioSourceOptions>,
pub sample_rate: u32,
pub num_channels: u32,
}
impl NativeAudioSource {
pub fn new(
options: AudioSourceOptions,
sample_rate: u32,
num_channels: u32,
_queue_size_ms: u32,
) -> Self {
Self {
options: Arc::new(options),
sample_rate,
num_channels,
}
}
pub async fn capture_frame(&self, _frame: &AudioFrame<'_>) -> Result<(), RtcError> {
Ok(())
}
}
}
pub enum RtcAudioSource {
Native(native::NativeAudioSource),
}
}
pub use livekit::webrtc::audio_frame;
pub use livekit::webrtc::video_frame;
pub mod video_source {
use super::*;
pub use real::video_source::VideoResolution;
pub struct RTCVideoSource;
pub mod native {
use super::*;
use real::video_frame::{VideoBuffer, VideoFrame};
#[derive(Clone)]
pub struct NativeVideoSource {
pub resolution: VideoResolution,
}
impl NativeVideoSource {
pub fn new(resolution: super::VideoResolution) -> Self {
Self { resolution }
}
pub fn capture_frame<T: AsRef<dyn VideoBuffer>>(&self, _frame: &VideoFrame<T>) {}
}
}
pub enum RtcVideoSource {
Native(native::NativeVideoSource),
}
}