Use livekit's Rust SDK instead of their swift SDK (#13343)

See https://github.com/livekit/rust-sdks/pull/355

Todo:

* [x] make `call` / `live_kit_client` crates use the livekit rust sdk
* [x] create a fake version of livekit rust API for integration tests
* [x] capture local audio
* [x] play remote audio
* [x] capture local video tracks
* [x] play remote video tracks
* [x] tests passing
* bugs
* [x] deafening does not work
(https://github.com/livekit/rust-sdks/issues/359)
* [x] mute and speaking status are not replicated properly:
(https://github.com/livekit/rust-sdks/issues/358)
* [x] **linux** - crash due to symbol conflict between WebRTC's
BoringSSL and libcurl's openssl
(https://github.com/livekit/rust-sdks/issues/89)
* [x] **linux** - libwebrtc-sys adds undesired dependencies on `libGL`
and `libXext`
* [x] **windows** - linker error, maybe related to the C++ stdlib
(https://github.com/livekit/rust-sdks/issues/364)
        ```
libwebrtc_sys-54978c6ad5066a35.rlib(video_frame.obj) : error LNK2038:
mismatch detected for 'RuntimeLibrary': value 'MT_StaticRelease' doesn't
match value 'MD_DynamicRelease' in
libtree_sitter_yaml-df6b0adf8f009e8f.rlib(2e40c9e35e9506f4-scanner.o)
        ```
    * [x] audio problems

Release Notes:

- Switch from Swift to Rust LiveKit SDK 🦀

---------

Co-authored-by: Mikayla Maki <mikayla@zed.dev>
Co-authored-by: Conrad Irwin <conrad@zed.dev>
Co-authored-by: Kirill Bulatov <kirill@zed.dev>
Co-authored-by: Michael Sloan <michael@zed.dev>
This commit is contained in:
Max Brunsfeld 2024-11-15 13:18:50 -08:00 committed by GitHub
parent 6ff69faf37
commit 1235d0808e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
48 changed files with 3212 additions and 2805 deletions

View file

@ -1,37 +1,387 @@
#![allow(clippy::arc_with_non_send_sync)]
#![cfg_attr(target_os = "windows", allow(unused))]
use std::sync::Arc;
#[cfg(all(target_os = "macos", not(any(test, feature = "test-support"))))]
pub mod prod;
#[cfg(all(target_os = "macos", not(any(test, feature = "test-support"))))]
pub use prod::*;
#[cfg(any(test, feature = "test-support", not(target_os = "macos")))]
mod remote_video_track_view;
#[cfg(any(test, feature = "test-support", target_os = "windows"))]
pub mod test;
#[cfg(any(test, feature = "test-support", not(target_os = "macos")))]
use anyhow::{anyhow, Context as _, Result};
use cpal::{
traits::{DeviceTrait, HostTrait, StreamTrait as _},
StreamConfig,
};
use futures::{io, Stream, StreamExt as _};
use gpui::{AppContext, ScreenCaptureFrame, ScreenCaptureSource, ScreenCaptureStream, Task};
use parking_lot::Mutex;
use std::{borrow::Cow, future::Future, pin::Pin, sync::Arc};
use util::{debug_panic, ResultExt as _, TryFutureExt};
#[cfg(not(target_os = "windows"))]
use webrtc::{
audio_frame::AudioFrame,
audio_source::{native::NativeAudioSource, AudioSourceOptions, RtcAudioSource},
audio_stream::native::NativeAudioStream,
video_frame::{VideoBuffer, VideoFrame, VideoRotation},
video_source::{native::NativeVideoSource, RtcVideoSource, VideoResolution},
video_stream::native::NativeVideoStream,
};
#[cfg(all(not(any(test, feature = "test-support")), not(target_os = "windows")))]
pub use livekit::*;
#[cfg(any(test, feature = "test-support", target_os = "windows"))]
pub use test::*;
pub type Sid = String;
pub use remote_video_track_view::{RemoteVideoTrackView, RemoteVideoTrackViewEvent};
#[derive(Clone, Eq, PartialEq)]
pub enum ConnectionState {
Disconnected,
Connected { url: String, token: String },
pub struct AudioStream {
_tasks: [Task<Option<()>>; 2],
}
#[derive(Clone)]
pub enum RoomUpdate {
ActiveSpeakersChanged { speakers: Vec<Sid> },
RemoteAudioTrackMuteChanged { track_id: Sid, muted: bool },
SubscribedToRemoteVideoTrack(Arc<RemoteVideoTrack>),
SubscribedToRemoteAudioTrack(Arc<RemoteAudioTrack>, Arc<RemoteTrackPublication>),
UnsubscribedFromRemoteVideoTrack { publisher_id: Sid, track_id: Sid },
UnsubscribedFromRemoteAudioTrack { publisher_id: Sid, track_id: Sid },
LocalAudioTrackPublished { publication: LocalTrackPublication },
LocalAudioTrackUnpublished { publication: LocalTrackPublication },
LocalVideoTrackPublished { publication: LocalTrackPublication },
LocalVideoTrackUnpublished { publication: LocalTrackPublication },
struct Dispatcher(Arc<dyn gpui::PlatformDispatcher>);
#[cfg(not(target_os = "windows"))]
impl livekit::dispatcher::Dispatcher for Dispatcher {
fn dispatch(&self, runnable: livekit::dispatcher::Runnable) {
self.0.dispatch(runnable, None);
}
fn dispatch_after(
&self,
duration: std::time::Duration,
runnable: livekit::dispatcher::Runnable,
) {
self.0.dispatch_after(duration, runnable);
}
}
struct HttpClientAdapter(Arc<dyn http_client::HttpClient>);
fn http_2_status(status: http_client::http::StatusCode) -> http_2::StatusCode {
http_2::StatusCode::from_u16(status.as_u16())
.expect("valid status code to status code conversion")
}
#[cfg(not(target_os = "windows"))]
impl livekit::dispatcher::HttpClient for HttpClientAdapter {
fn get(
&self,
url: &str,
) -> Pin<Box<dyn Future<Output = io::Result<livekit::dispatcher::Response>> + Send>> {
let http_client = self.0.clone();
let url = url.to_string();
Box::pin(async move {
let response = http_client
.get(&url, http_client::AsyncBody::empty(), false)
.await
.map_err(io::Error::other)?;
Ok(livekit::dispatcher::Response {
status: http_2_status(response.status()),
body: Box::pin(response.into_body()),
})
})
}
fn send_async(
&self,
request: http_2::Request<Vec<u8>>,
) -> Pin<Box<dyn Future<Output = io::Result<livekit::dispatcher::Response>> + Send>> {
let http_client = self.0.clone();
let mut builder = http_client::http::Request::builder()
.method(request.method().as_str())
.uri(request.uri().to_string());
for (key, value) in request.headers().iter() {
builder = builder.header(key.as_str(), value.as_bytes());
}
if !request.extensions().is_empty() {
debug_panic!(
"Livekit sent an HTTP request with a protocol extension that Zed doesn't support!"
);
}
let request = builder
.body(http_client::AsyncBody::from_bytes(
request.into_body().into(),
))
.unwrap();
Box::pin(async move {
let response = http_client.send(request).await.map_err(io::Error::other)?;
Ok(livekit::dispatcher::Response {
status: http_2_status(response.status()),
body: Box::pin(response.into_body()),
})
})
}
}
#[cfg(target_os = "windows")]
pub fn init(
dispatcher: Arc<dyn gpui::PlatformDispatcher>,
http_client: Arc<dyn http_client::HttpClient>,
) {
}
#[cfg(not(target_os = "windows"))]
pub fn init(
dispatcher: Arc<dyn gpui::PlatformDispatcher>,
http_client: Arc<dyn http_client::HttpClient>,
) {
livekit::dispatcher::set_dispatcher(Dispatcher(dispatcher));
livekit::dispatcher::set_http_client(HttpClientAdapter(http_client));
}
#[cfg(not(target_os = "windows"))]
pub async fn capture_local_video_track(
capture_source: &dyn ScreenCaptureSource,
) -> Result<(track::LocalVideoTrack, Box<dyn ScreenCaptureStream>)> {
let resolution = capture_source.resolution()?;
let track_source = NativeVideoSource::new(VideoResolution {
width: resolution.width.0 as u32,
height: resolution.height.0 as u32,
});
let capture_stream = capture_source
.stream({
let track_source = track_source.clone();
Box::new(move |frame| {
if let Some(buffer) = video_frame_buffer_to_webrtc(frame) {
track_source.capture_frame(&VideoFrame {
rotation: VideoRotation::VideoRotation0,
timestamp_us: 0,
buffer,
});
}
})
})
.await??;
Ok((
track::LocalVideoTrack::create_video_track(
"screen share",
RtcVideoSource::Native(track_source),
),
capture_stream,
))
}
#[cfg(not(target_os = "windows"))]
pub fn capture_local_audio_track(
cx: &mut AppContext,
) -> Result<(track::LocalAudioTrack, AudioStream)> {
let (frame_tx, mut frame_rx) = futures::channel::mpsc::unbounded();
let sample_rate;
let channels;
let stream;
if cfg!(any(test, feature = "test-support")) {
sample_rate = 1;
channels = 1;
stream = None;
} else {
let device = cpal::default_host()
.default_input_device()
.ok_or_else(|| anyhow!("no audio input device available"))?;
let config = device
.default_input_config()
.context("failed to get default input config")?;
sample_rate = config.sample_rate().0;
channels = config.channels() as u32;
stream = Some(
device
.build_input_stream_raw(
&config.config(),
cpal::SampleFormat::I16,
move |data, _: &_| {
frame_tx
.unbounded_send(AudioFrame {
data: Cow::Owned(data.as_slice::<i16>().unwrap().to_vec()),
sample_rate,
num_channels: channels,
samples_per_channel: data.len() as u32 / channels,
})
.ok();
},
|err| log::error!("error capturing audio track: {:?}", err),
None,
)
.context("failed to build input stream")?,
);
}
let source = NativeAudioSource::new(
AudioSourceOptions {
echo_cancellation: true,
noise_suppression: true,
auto_gain_control: false,
},
sample_rate,
channels,
// TODO livekit: Pull these out of a proto later
100,
);
let stream_task = cx.foreground_executor().spawn(async move {
if let Some(stream) = &stream {
stream.play().log_err();
}
futures::future::pending().await
});
let transmit_task = cx.background_executor().spawn({
let source = source.clone();
async move {
while let Some(frame) = frame_rx.next().await {
source.capture_frame(&frame).await.ok();
}
Some(())
}
});
let track =
track::LocalAudioTrack::create_audio_track("microphone", RtcAudioSource::Native(source));
Ok((
track,
AudioStream {
_tasks: [stream_task, transmit_task],
},
))
}
#[cfg(not(target_os = "windows"))]
pub fn play_remote_audio_track(
track: &track::RemoteAudioTrack,
cx: &mut AppContext,
) -> AudioStream {
let buffer = Arc::new(Mutex::new(Vec::<i16>::new()));
let (stream_config_tx, mut stream_config_rx) = futures::channel::mpsc::unbounded();
// TODO livekit: Pull these out of a proto later
let mut stream = NativeAudioStream::new(track.rtc_track(), 48000, 1);
let receive_task = cx.background_executor().spawn({
let buffer = buffer.clone();
async move {
let mut stream_config = None;
while let Some(frame) = stream.next().await {
let mut buffer = buffer.lock();
let buffer_size = frame.samples_per_channel * frame.num_channels;
debug_assert!(frame.data.len() == buffer_size as usize);
let frame_config = StreamConfig {
channels: frame.num_channels as u16,
sample_rate: cpal::SampleRate(frame.sample_rate),
buffer_size: cpal::BufferSize::Fixed(buffer_size),
};
if stream_config.as_ref().map_or(true, |c| *c != frame_config) {
buffer.resize(buffer_size as usize, 0);
stream_config = Some(frame_config.clone());
stream_config_tx.unbounded_send(frame_config).ok();
}
if frame.data.len() == buffer.len() {
buffer.copy_from_slice(&frame.data);
} else {
buffer.iter_mut().for_each(|x| *x = 0);
}
}
Some(())
}
});
let play_task = cx.foreground_executor().spawn(
{
let buffer = buffer.clone();
async move {
if cfg!(any(test, feature = "test-support")) {
return Err(anyhow!("can't play audio in tests"));
}
let device = cpal::default_host()
.default_output_device()
.ok_or_else(|| anyhow!("no audio output device available"))?;
let mut _output_stream = None;
while let Some(config) = stream_config_rx.next().await {
_output_stream = Some(device.build_output_stream(
&config,
{
let buffer = buffer.clone();
move |data, _info| {
let buffer = buffer.lock();
if data.len() == buffer.len() {
data.copy_from_slice(&buffer);
} else {
data.iter_mut().for_each(|x| *x = 0);
}
}
},
|error| log::error!("error playing audio track: {:?}", error),
None,
)?);
}
Ok(())
}
}
.log_err(),
);
AudioStream {
_tasks: [receive_task, play_task],
}
}
#[cfg(target_os = "windows")]
pub fn play_remote_video_track(
track: &track::RemoteVideoTrack,
) -> impl Stream<Item = ScreenCaptureFrame> {
futures::stream::empty()
}
#[cfg(not(target_os = "windows"))]
pub fn play_remote_video_track(
track: &track::RemoteVideoTrack,
) -> impl Stream<Item = ScreenCaptureFrame> {
NativeVideoStream::new(track.rtc_track())
.filter_map(|frame| async move { video_frame_buffer_from_webrtc(frame.buffer) })
}
#[cfg(target_os = "macos")]
fn video_frame_buffer_from_webrtc(buffer: Box<dyn VideoBuffer>) -> Option<ScreenCaptureFrame> {
use core_foundation::base::TCFType as _;
use media::core_video::CVImageBuffer;
let buffer = buffer.as_native()?;
let pixel_buffer = buffer.get_cv_pixel_buffer();
if pixel_buffer.is_null() {
return None;
}
unsafe {
Some(ScreenCaptureFrame(CVImageBuffer::wrap_under_get_rule(
pixel_buffer as _,
)))
}
}
#[cfg(not(any(target_os = "macos", target_os = "windows")))]
fn video_frame_buffer_from_webrtc(_buffer: Box<dyn VideoBuffer>) -> Option<ScreenCaptureFrame> {
None
}
#[cfg(target_os = "macos")]
fn video_frame_buffer_to_webrtc(frame: ScreenCaptureFrame) -> Option<impl AsRef<dyn VideoBuffer>> {
use core_foundation::base::TCFType as _;
let pixel_buffer = frame.0.as_concrete_TypeRef();
std::mem::forget(frame.0);
unsafe {
Some(webrtc::video_frame::native::NativeBuffer::from_cv_pixel_buffer(pixel_buffer as _))
}
}
#[cfg(not(any(target_os = "macos", target_os = "windows")))]
fn video_frame_buffer_to_webrtc(_frame: ScreenCaptureFrame) -> Option<impl AsRef<dyn VideoBuffer>> {
None as Option<Box<dyn VideoBuffer>>
}

View file

@ -1,981 +0,0 @@
use crate::{ConnectionState, RoomUpdate, Sid};
use anyhow::{anyhow, Context, Result};
use core_foundation::{
array::{CFArray, CFArrayRef},
base::{CFRelease, CFRetain, TCFType},
string::{CFString, CFStringRef},
};
use futures::{
channel::{mpsc, oneshot},
Future,
};
pub use media::core_video::CVImageBuffer;
use media::core_video::CVImageBufferRef;
use parking_lot::Mutex;
use postage::watch;
use std::{
ffi::c_void,
sync::{Arc, Weak},
};
macro_rules! pointer_type {
($pointer_name:ident) => {
#[repr(transparent)]
#[derive(Copy, Clone, Debug)]
pub struct $pointer_name(pub *const std::ffi::c_void);
unsafe impl Send for $pointer_name {}
};
}
mod swift {
pointer_type!(Room);
pointer_type!(LocalAudioTrack);
pointer_type!(RemoteAudioTrack);
pointer_type!(LocalVideoTrack);
pointer_type!(RemoteVideoTrack);
pointer_type!(LocalTrackPublication);
pointer_type!(RemoteTrackPublication);
pointer_type!(MacOSDisplay);
pointer_type!(RoomDelegate);
}
extern "C" {
fn LKRoomDelegateCreate(
callback_data: *mut c_void,
on_did_disconnect: extern "C" fn(callback_data: *mut c_void),
on_did_subscribe_to_remote_audio_track: extern "C" fn(
callback_data: *mut c_void,
publisher_id: CFStringRef,
track_id: CFStringRef,
remote_track: swift::RemoteAudioTrack,
remote_publication: swift::RemoteTrackPublication,
),
on_did_unsubscribe_from_remote_audio_track: extern "C" fn(
callback_data: *mut c_void,
publisher_id: CFStringRef,
track_id: CFStringRef,
),
on_mute_changed_from_remote_audio_track: extern "C" fn(
callback_data: *mut c_void,
track_id: CFStringRef,
muted: bool,
),
on_active_speakers_changed: extern "C" fn(
callback_data: *mut c_void,
participants: CFArrayRef,
),
on_did_subscribe_to_remote_video_track: extern "C" fn(
callback_data: *mut c_void,
publisher_id: CFStringRef,
track_id: CFStringRef,
remote_track: swift::RemoteVideoTrack,
),
on_did_unsubscribe_from_remote_video_track: extern "C" fn(
callback_data: *mut c_void,
publisher_id: CFStringRef,
track_id: CFStringRef,
),
on_did_publish_or_unpublish_local_audio_track: extern "C" fn(
callback_data: *mut c_void,
publication: swift::LocalTrackPublication,
is_published: bool,
),
on_did_publish_or_unpublish_local_video_track: extern "C" fn(
callback_data: *mut c_void,
publication: swift::LocalTrackPublication,
is_published: bool,
),
) -> swift::RoomDelegate;
fn LKRoomCreate(delegate: swift::RoomDelegate) -> swift::Room;
fn LKRoomConnect(
room: swift::Room,
url: CFStringRef,
token: CFStringRef,
callback: extern "C" fn(*mut c_void, CFStringRef),
callback_data: *mut c_void,
);
fn LKRoomDisconnect(room: swift::Room);
fn LKRoomPublishVideoTrack(
room: swift::Room,
track: swift::LocalVideoTrack,
callback: extern "C" fn(*mut c_void, swift::LocalTrackPublication, CFStringRef),
callback_data: *mut c_void,
);
fn LKRoomPublishAudioTrack(
room: swift::Room,
track: swift::LocalAudioTrack,
callback: extern "C" fn(*mut c_void, swift::LocalTrackPublication, CFStringRef),
callback_data: *mut c_void,
);
fn LKRoomUnpublishTrack(room: swift::Room, publication: swift::LocalTrackPublication);
fn LKRoomAudioTracksForRemoteParticipant(
room: swift::Room,
participant_id: CFStringRef,
) -> CFArrayRef;
fn LKRoomAudioTrackPublicationsForRemoteParticipant(
room: swift::Room,
participant_id: CFStringRef,
) -> CFArrayRef;
fn LKRoomVideoTracksForRemoteParticipant(
room: swift::Room,
participant_id: CFStringRef,
) -> CFArrayRef;
fn LKVideoRendererCreate(
callback_data: *mut c_void,
on_frame: extern "C" fn(callback_data: *mut c_void, frame: CVImageBufferRef) -> bool,
on_drop: extern "C" fn(callback_data: *mut c_void),
) -> *const c_void;
fn LKRemoteAudioTrackGetSid(track: swift::RemoteAudioTrack) -> CFStringRef;
fn LKRemoteVideoTrackGetSid(track: swift::RemoteVideoTrack) -> CFStringRef;
fn LKRemoteAudioTrackStart(track: swift::RemoteAudioTrack);
fn LKRemoteAudioTrackStop(track: swift::RemoteAudioTrack);
fn LKVideoTrackAddRenderer(track: swift::RemoteVideoTrack, renderer: *const c_void);
fn LKDisplaySources(
callback_data: *mut c_void,
callback: extern "C" fn(
callback_data: *mut c_void,
sources: CFArrayRef,
error: CFStringRef,
),
);
fn LKCreateScreenShareTrackForDisplay(display: swift::MacOSDisplay) -> swift::LocalVideoTrack;
fn LKLocalAudioTrackCreateTrack() -> swift::LocalAudioTrack;
fn LKLocalTrackPublicationSetMute(
publication: swift::LocalTrackPublication,
muted: bool,
on_complete: extern "C" fn(callback_data: *mut c_void, error: CFStringRef),
callback_data: *mut c_void,
);
fn LKRemoteTrackPublicationSetEnabled(
publication: swift::RemoteTrackPublication,
enabled: bool,
on_complete: extern "C" fn(callback_data: *mut c_void, error: CFStringRef),
callback_data: *mut c_void,
);
fn LKLocalTrackPublicationIsMuted(publication: swift::LocalTrackPublication) -> bool;
fn LKRemoteTrackPublicationIsMuted(publication: swift::RemoteTrackPublication) -> bool;
fn LKLocalTrackPublicationGetSid(publication: swift::LocalTrackPublication) -> CFStringRef;
fn LKRemoteTrackPublicationGetSid(publication: swift::RemoteTrackPublication) -> CFStringRef;
}
pub struct Room {
native_room: swift::Room,
connection: Mutex<(
watch::Sender<ConnectionState>,
watch::Receiver<ConnectionState>,
)>,
update_subscribers: Mutex<Vec<mpsc::UnboundedSender<RoomUpdate>>>,
_delegate: RoomDelegate,
}
impl Room {
pub fn new() -> Arc<Self> {
Arc::new_cyclic(|weak_room| {
let delegate = RoomDelegate::new(weak_room.clone());
Self {
native_room: unsafe { LKRoomCreate(delegate.native_delegate) },
connection: Mutex::new(watch::channel_with(ConnectionState::Disconnected)),
update_subscribers: Default::default(),
_delegate: delegate,
}
})
}
pub fn status(&self) -> watch::Receiver<ConnectionState> {
self.connection.lock().1.clone()
}
pub fn connect(self: &Arc<Self>, url: &str, token: &str) -> impl Future<Output = Result<()>> {
let url = CFString::new(url);
let token = CFString::new(token);
let (did_connect, tx, rx) = Self::build_done_callback();
unsafe {
LKRoomConnect(
self.native_room,
url.as_concrete_TypeRef(),
token.as_concrete_TypeRef(),
did_connect,
tx,
)
}
let this = self.clone();
let url = url.to_string();
let token = token.to_string();
async move {
rx.await.unwrap().context("error connecting to room")?;
*this.connection.lock().0.borrow_mut() = ConnectionState::Connected { url, token };
Ok(())
}
}
fn did_disconnect(&self) {
*self.connection.lock().0.borrow_mut() = ConnectionState::Disconnected;
}
pub fn display_sources(self: &Arc<Self>) -> impl Future<Output = Result<Vec<MacOSDisplay>>> {
extern "C" fn callback(tx: *mut c_void, sources: CFArrayRef, error: CFStringRef) {
unsafe {
let tx = Box::from_raw(tx as *mut oneshot::Sender<Result<Vec<MacOSDisplay>>>);
if sources.is_null() {
let _ = tx.send(Err(anyhow!("{}", CFString::wrap_under_get_rule(error))));
} else {
let sources = CFArray::wrap_under_get_rule(sources)
.into_iter()
.map(|source| MacOSDisplay::new(swift::MacOSDisplay(*source)))
.collect();
let _ = tx.send(Ok(sources));
}
}
}
let (tx, rx) = oneshot::channel();
unsafe {
LKDisplaySources(Box::into_raw(Box::new(tx)) as *mut _, callback);
}
async move { rx.await.unwrap() }
}
pub fn publish_video_track(
self: &Arc<Self>,
track: LocalVideoTrack,
) -> impl Future<Output = Result<LocalTrackPublication>> {
let (tx, rx) = oneshot::channel::<Result<LocalTrackPublication>>();
extern "C" fn callback(
tx: *mut c_void,
publication: swift::LocalTrackPublication,
error: CFStringRef,
) {
let tx =
unsafe { Box::from_raw(tx as *mut oneshot::Sender<Result<LocalTrackPublication>>) };
if error.is_null() {
let _ = tx.send(Ok(LocalTrackPublication::new(publication)));
} else {
let error = unsafe { CFString::wrap_under_get_rule(error).to_string() };
let _ = tx.send(Err(anyhow!(error)));
}
}
unsafe {
LKRoomPublishVideoTrack(
self.native_room,
track.0,
callback,
Box::into_raw(Box::new(tx)) as *mut c_void,
);
}
async { rx.await.unwrap().context("error publishing video track") }
}
pub fn publish_audio_track(
self: &Arc<Self>,
track: LocalAudioTrack,
) -> impl Future<Output = Result<LocalTrackPublication>> {
let (tx, rx) = oneshot::channel::<Result<LocalTrackPublication>>();
extern "C" fn callback(
tx: *mut c_void,
publication: swift::LocalTrackPublication,
error: CFStringRef,
) {
let tx =
unsafe { Box::from_raw(tx as *mut oneshot::Sender<Result<LocalTrackPublication>>) };
if error.is_null() {
let _ = tx.send(Ok(LocalTrackPublication::new(publication)));
} else {
let error = unsafe { CFString::wrap_under_get_rule(error).to_string() };
let _ = tx.send(Err(anyhow!(error)));
}
}
unsafe {
LKRoomPublishAudioTrack(
self.native_room,
track.0,
callback,
Box::into_raw(Box::new(tx)) as *mut c_void,
);
}
async { rx.await.unwrap().context("error publishing audio track") }
}
pub fn unpublish_track(&self, publication: LocalTrackPublication) {
unsafe {
LKRoomUnpublishTrack(self.native_room, publication.0);
}
}
pub fn remote_video_tracks(&self, participant_id: &str) -> Vec<Arc<RemoteVideoTrack>> {
unsafe {
let tracks = LKRoomVideoTracksForRemoteParticipant(
self.native_room,
CFString::new(participant_id).as_concrete_TypeRef(),
);
if tracks.is_null() {
Vec::new()
} else {
let tracks = CFArray::wrap_under_get_rule(tracks);
tracks
.into_iter()
.map(|native_track| {
let native_track = swift::RemoteVideoTrack(*native_track);
let id =
CFString::wrap_under_get_rule(LKRemoteVideoTrackGetSid(native_track))
.to_string();
Arc::new(RemoteVideoTrack::new(
native_track,
id,
participant_id.into(),
))
})
.collect()
}
}
}
pub fn remote_audio_tracks(&self, participant_id: &str) -> Vec<Arc<RemoteAudioTrack>> {
unsafe {
let tracks = LKRoomAudioTracksForRemoteParticipant(
self.native_room,
CFString::new(participant_id).as_concrete_TypeRef(),
);
if tracks.is_null() {
Vec::new()
} else {
let tracks = CFArray::wrap_under_get_rule(tracks);
tracks
.into_iter()
.map(|native_track| {
let native_track = swift::RemoteAudioTrack(*native_track);
let id =
CFString::wrap_under_get_rule(LKRemoteAudioTrackGetSid(native_track))
.to_string();
Arc::new(RemoteAudioTrack::new(
native_track,
id,
participant_id.into(),
))
})
.collect()
}
}
}
pub fn remote_audio_track_publications(
&self,
participant_id: &str,
) -> Vec<Arc<RemoteTrackPublication>> {
unsafe {
let tracks = LKRoomAudioTrackPublicationsForRemoteParticipant(
self.native_room,
CFString::new(participant_id).as_concrete_TypeRef(),
);
if tracks.is_null() {
Vec::new()
} else {
let tracks = CFArray::wrap_under_get_rule(tracks);
tracks
.into_iter()
.map(|native_track_publication| {
let native_track_publication =
swift::RemoteTrackPublication(*native_track_publication);
Arc::new(RemoteTrackPublication::new(native_track_publication))
})
.collect()
}
}
}
pub fn updates(&self) -> mpsc::UnboundedReceiver<RoomUpdate> {
let (tx, rx) = mpsc::unbounded();
self.update_subscribers.lock().push(tx);
rx
}
fn did_subscribe_to_remote_audio_track(
&self,
track: RemoteAudioTrack,
publication: RemoteTrackPublication,
) {
let track = Arc::new(track);
let publication = Arc::new(publication);
self.update_subscribers.lock().retain(|tx| {
tx.unbounded_send(RoomUpdate::SubscribedToRemoteAudioTrack(
track.clone(),
publication.clone(),
))
.is_ok()
});
}
fn did_unsubscribe_from_remote_audio_track(&self, publisher_id: String, track_id: String) {
self.update_subscribers.lock().retain(|tx| {
tx.unbounded_send(RoomUpdate::UnsubscribedFromRemoteAudioTrack {
publisher_id: publisher_id.clone(),
track_id: track_id.clone(),
})
.is_ok()
});
}
fn mute_changed_from_remote_audio_track(&self, track_id: String, muted: bool) {
self.update_subscribers.lock().retain(|tx| {
tx.unbounded_send(RoomUpdate::RemoteAudioTrackMuteChanged {
track_id: track_id.clone(),
muted,
})
.is_ok()
});
}
fn active_speakers_changed(&self, speakers: Vec<String>) {
self.update_subscribers.lock().retain(move |tx| {
tx.unbounded_send(RoomUpdate::ActiveSpeakersChanged {
speakers: speakers.clone(),
})
.is_ok()
});
}
fn did_subscribe_to_remote_video_track(&self, track: RemoteVideoTrack) {
let track = Arc::new(track);
self.update_subscribers.lock().retain(|tx| {
tx.unbounded_send(RoomUpdate::SubscribedToRemoteVideoTrack(track.clone()))
.is_ok()
});
}
fn did_unsubscribe_from_remote_video_track(&self, publisher_id: String, track_id: String) {
self.update_subscribers.lock().retain(|tx| {
tx.unbounded_send(RoomUpdate::UnsubscribedFromRemoteVideoTrack {
publisher_id: publisher_id.clone(),
track_id: track_id.clone(),
})
.is_ok()
});
}
fn build_done_callback() -> (
extern "C" fn(*mut c_void, CFStringRef),
*mut c_void,
oneshot::Receiver<Result<()>>,
) {
let (tx, rx) = oneshot::channel();
extern "C" fn done_callback(tx: *mut c_void, error: CFStringRef) {
let tx = unsafe { Box::from_raw(tx as *mut oneshot::Sender<Result<()>>) };
if error.is_null() {
let _ = tx.send(Ok(()));
} else {
let error = unsafe { CFString::wrap_under_get_rule(error).to_string() };
let _ = tx.send(Err(anyhow!(error)));
}
}
(
done_callback,
Box::into_raw(Box::new(tx)) as *mut c_void,
rx,
)
}
pub fn set_display_sources(&self, _: Vec<MacOSDisplay>) {
unreachable!("This is a test-only function")
}
}
impl Drop for Room {
fn drop(&mut self) {
unsafe {
LKRoomDisconnect(self.native_room);
CFRelease(self.native_room.0);
}
}
}
struct RoomDelegate {
native_delegate: swift::RoomDelegate,
weak_room: *mut c_void,
}
impl RoomDelegate {
fn new(weak_room: Weak<Room>) -> Self {
let weak_room = weak_room.into_raw() as *mut c_void;
let native_delegate = unsafe {
LKRoomDelegateCreate(
weak_room,
Self::on_did_disconnect,
Self::on_did_subscribe_to_remote_audio_track,
Self::on_did_unsubscribe_from_remote_audio_track,
Self::on_mute_change_from_remote_audio_track,
Self::on_active_speakers_changed,
Self::on_did_subscribe_to_remote_video_track,
Self::on_did_unsubscribe_from_remote_video_track,
Self::on_did_publish_or_unpublish_local_audio_track,
Self::on_did_publish_or_unpublish_local_video_track,
)
};
Self {
native_delegate,
weak_room,
}
}
extern "C" fn on_did_disconnect(room: *mut c_void) {
let room = unsafe { Weak::from_raw(room as *mut Room) };
if let Some(room) = room.upgrade() {
room.did_disconnect();
}
let _ = Weak::into_raw(room);
}
extern "C" fn on_did_subscribe_to_remote_audio_track(
room: *mut c_void,
publisher_id: CFStringRef,
track_id: CFStringRef,
track: swift::RemoteAudioTrack,
publication: swift::RemoteTrackPublication,
) {
let room = unsafe { Weak::from_raw(room as *mut Room) };
let publisher_id = unsafe { CFString::wrap_under_get_rule(publisher_id).to_string() };
let track_id = unsafe { CFString::wrap_under_get_rule(track_id).to_string() };
let track = RemoteAudioTrack::new(track, track_id, publisher_id);
let publication = RemoteTrackPublication::new(publication);
if let Some(room) = room.upgrade() {
room.did_subscribe_to_remote_audio_track(track, publication);
}
let _ = Weak::into_raw(room);
}
extern "C" fn on_did_unsubscribe_from_remote_audio_track(
room: *mut c_void,
publisher_id: CFStringRef,
track_id: CFStringRef,
) {
let room = unsafe { Weak::from_raw(room as *mut Room) };
let publisher_id = unsafe { CFString::wrap_under_get_rule(publisher_id).to_string() };
let track_id = unsafe { CFString::wrap_under_get_rule(track_id).to_string() };
if let Some(room) = room.upgrade() {
room.did_unsubscribe_from_remote_audio_track(publisher_id, track_id);
}
let _ = Weak::into_raw(room);
}
extern "C" fn on_mute_change_from_remote_audio_track(
room: *mut c_void,
track_id: CFStringRef,
muted: bool,
) {
let room = unsafe { Weak::from_raw(room as *mut Room) };
let track_id = unsafe { CFString::wrap_under_get_rule(track_id).to_string() };
if let Some(room) = room.upgrade() {
room.mute_changed_from_remote_audio_track(track_id, muted);
}
let _ = Weak::into_raw(room);
}
extern "C" fn on_active_speakers_changed(room: *mut c_void, participants: CFArrayRef) {
if participants.is_null() {
return;
}
let room = unsafe { Weak::from_raw(room as *mut Room) };
let speakers = unsafe {
CFArray::wrap_under_get_rule(participants)
.into_iter()
.map(
|speaker: core_foundation::base::ItemRef<'_, *const c_void>| {
CFString::wrap_under_get_rule(*speaker as CFStringRef).to_string()
},
)
.collect()
};
if let Some(room) = room.upgrade() {
room.active_speakers_changed(speakers);
}
let _ = Weak::into_raw(room);
}
extern "C" fn on_did_subscribe_to_remote_video_track(
room: *mut c_void,
publisher_id: CFStringRef,
track_id: CFStringRef,
track: swift::RemoteVideoTrack,
) {
let room = unsafe { Weak::from_raw(room as *mut Room) };
let publisher_id = unsafe { CFString::wrap_under_get_rule(publisher_id).to_string() };
let track_id = unsafe { CFString::wrap_under_get_rule(track_id).to_string() };
let track = RemoteVideoTrack::new(track, track_id, publisher_id);
if let Some(room) = room.upgrade() {
room.did_subscribe_to_remote_video_track(track);
}
let _ = Weak::into_raw(room);
}
extern "C" fn on_did_unsubscribe_from_remote_video_track(
room: *mut c_void,
publisher_id: CFStringRef,
track_id: CFStringRef,
) {
let room = unsafe { Weak::from_raw(room as *mut Room) };
let publisher_id = unsafe { CFString::wrap_under_get_rule(publisher_id).to_string() };
let track_id = unsafe { CFString::wrap_under_get_rule(track_id).to_string() };
if let Some(room) = room.upgrade() {
room.did_unsubscribe_from_remote_video_track(publisher_id, track_id);
}
let _ = Weak::into_raw(room);
}
extern "C" fn on_did_publish_or_unpublish_local_audio_track(
room: *mut c_void,
publication: swift::LocalTrackPublication,
is_published: bool,
) {
let room = unsafe { Weak::from_raw(room as *mut Room) };
if let Some(room) = room.upgrade() {
let publication = LocalTrackPublication::new(publication);
let update = if is_published {
RoomUpdate::LocalAudioTrackPublished { publication }
} else {
RoomUpdate::LocalAudioTrackUnpublished { publication }
};
room.update_subscribers
.lock()
.retain(|tx| tx.unbounded_send(update.clone()).is_ok());
}
let _ = Weak::into_raw(room);
}
extern "C" fn on_did_publish_or_unpublish_local_video_track(
room: *mut c_void,
publication: swift::LocalTrackPublication,
is_published: bool,
) {
let room = unsafe { Weak::from_raw(room as *mut Room) };
if let Some(room) = room.upgrade() {
let publication = LocalTrackPublication::new(publication);
let update = if is_published {
RoomUpdate::LocalVideoTrackPublished { publication }
} else {
RoomUpdate::LocalVideoTrackUnpublished { publication }
};
room.update_subscribers
.lock()
.retain(|tx| tx.unbounded_send(update.clone()).is_ok());
}
let _ = Weak::into_raw(room);
}
}
impl Drop for RoomDelegate {
fn drop(&mut self) {
unsafe {
CFRelease(self.native_delegate.0);
let _ = Weak::from_raw(self.weak_room as *mut Room);
}
}
}
pub struct LocalAudioTrack(swift::LocalAudioTrack);
impl LocalAudioTrack {
pub fn create() -> Self {
Self(unsafe { LKLocalAudioTrackCreateTrack() })
}
}
impl Drop for LocalAudioTrack {
fn drop(&mut self) {
unsafe { CFRelease(self.0 .0) }
}
}
pub struct LocalVideoTrack(swift::LocalVideoTrack);
impl LocalVideoTrack {
pub fn screen_share_for_display(display: &MacOSDisplay) -> Self {
Self(unsafe { LKCreateScreenShareTrackForDisplay(display.0) })
}
}
impl Drop for LocalVideoTrack {
fn drop(&mut self) {
unsafe { CFRelease(self.0 .0) }
}
}
pub struct LocalTrackPublication(swift::LocalTrackPublication);
impl LocalTrackPublication {
pub fn new(native_track_publication: swift::LocalTrackPublication) -> Self {
unsafe {
CFRetain(native_track_publication.0);
}
Self(native_track_publication)
}
pub fn sid(&self) -> String {
unsafe { CFString::wrap_under_get_rule(LKLocalTrackPublicationGetSid(self.0)).to_string() }
}
pub fn set_mute(&self, muted: bool) -> impl Future<Output = Result<()>> {
let (tx, rx) = futures::channel::oneshot::channel();
extern "C" fn complete_callback(callback_data: *mut c_void, error: CFStringRef) {
let tx = unsafe { Box::from_raw(callback_data as *mut oneshot::Sender<Result<()>>) };
if error.is_null() {
tx.send(Ok(())).ok();
} else {
let error = unsafe { CFString::wrap_under_get_rule(error).to_string() };
tx.send(Err(anyhow!(error))).ok();
}
}
unsafe {
LKLocalTrackPublicationSetMute(
self.0,
muted,
complete_callback,
Box::into_raw(Box::new(tx)) as *mut c_void,
)
}
async move { rx.await.unwrap() }
}
pub fn is_muted(&self) -> bool {
unsafe { LKLocalTrackPublicationIsMuted(self.0) }
}
}
impl Clone for LocalTrackPublication {
fn clone(&self) -> Self {
unsafe {
CFRetain(self.0 .0);
}
Self(self.0)
}
}
impl Drop for LocalTrackPublication {
fn drop(&mut self) {
unsafe { CFRelease(self.0 .0) }
}
}
pub struct RemoteTrackPublication(swift::RemoteTrackPublication);
impl RemoteTrackPublication {
pub fn new(native_track_publication: swift::RemoteTrackPublication) -> Self {
unsafe {
CFRetain(native_track_publication.0);
}
Self(native_track_publication)
}
pub fn sid(&self) -> String {
unsafe { CFString::wrap_under_get_rule(LKRemoteTrackPublicationGetSid(self.0)).to_string() }
}
pub fn is_muted(&self) -> bool {
unsafe { LKRemoteTrackPublicationIsMuted(self.0) }
}
pub fn set_enabled(&self, enabled: bool) -> impl Future<Output = Result<()>> {
let (tx, rx) = futures::channel::oneshot::channel();
extern "C" fn complete_callback(callback_data: *mut c_void, error: CFStringRef) {
let tx = unsafe { Box::from_raw(callback_data as *mut oneshot::Sender<Result<()>>) };
if error.is_null() {
tx.send(Ok(())).ok();
} else {
let error = unsafe { CFString::wrap_under_get_rule(error).to_string() };
tx.send(Err(anyhow!(error))).ok();
}
}
unsafe {
LKRemoteTrackPublicationSetEnabled(
self.0,
enabled,
complete_callback,
Box::into_raw(Box::new(tx)) as *mut c_void,
)
}
async move { rx.await.unwrap() }
}
}
impl Drop for RemoteTrackPublication {
fn drop(&mut self) {
unsafe { CFRelease(self.0 .0) }
}
}
#[derive(Debug)]
pub struct RemoteAudioTrack {
native_track: swift::RemoteAudioTrack,
sid: Sid,
publisher_id: String,
}
impl RemoteAudioTrack {
fn new(native_track: swift::RemoteAudioTrack, sid: Sid, publisher_id: String) -> Self {
unsafe {
CFRetain(native_track.0);
}
Self {
native_track,
sid,
publisher_id,
}
}
pub fn sid(&self) -> &str {
&self.sid
}
pub fn publisher_id(&self) -> &str {
&self.publisher_id
}
pub fn start(&self) {
unsafe { LKRemoteAudioTrackStart(self.native_track) }
}
pub fn stop(&self) {
unsafe { LKRemoteAudioTrackStop(self.native_track) }
}
}
impl Drop for RemoteAudioTrack {
fn drop(&mut self) {
// todo: uncomment this `CFRelease`, unless we find that it was causing
// the crash in the `livekit.multicast` thread.
//
// unsafe { CFRelease(self.native_track.0) }
let _ = self.native_track;
}
}
#[derive(Debug)]
pub struct RemoteVideoTrack {
native_track: swift::RemoteVideoTrack,
sid: Sid,
publisher_id: String,
}
impl RemoteVideoTrack {
fn new(native_track: swift::RemoteVideoTrack, sid: Sid, publisher_id: String) -> Self {
unsafe {
CFRetain(native_track.0);
}
Self {
native_track,
sid,
publisher_id,
}
}
pub fn sid(&self) -> &str {
&self.sid
}
pub fn publisher_id(&self) -> &str {
&self.publisher_id
}
pub fn frames(&self) -> async_broadcast::Receiver<Frame> {
extern "C" fn on_frame(callback_data: *mut c_void, frame: CVImageBufferRef) -> bool {
unsafe {
let tx = Box::from_raw(callback_data as *mut async_broadcast::Sender<Frame>);
let buffer = CVImageBuffer::wrap_under_get_rule(frame);
let result = tx.try_broadcast(Frame(buffer));
let _ = Box::into_raw(tx);
match result {
Ok(_) => true,
Err(async_broadcast::TrySendError::Closed(_))
| Err(async_broadcast::TrySendError::Inactive(_)) => {
log::warn!("no active receiver for frame");
false
}
Err(async_broadcast::TrySendError::Full(_)) => {
log::warn!("skipping frame as receiver is not keeping up");
true
}
}
}
}
extern "C" fn on_drop(callback_data: *mut c_void) {
unsafe {
let _ = Box::from_raw(callback_data as *mut async_broadcast::Sender<Frame>);
}
}
let (tx, rx) = async_broadcast::broadcast(64);
unsafe {
let renderer = LKVideoRendererCreate(
Box::into_raw(Box::new(tx)) as *mut c_void,
on_frame,
on_drop,
);
LKVideoTrackAddRenderer(self.native_track, renderer);
rx
}
}
}
impl Drop for RemoteVideoTrack {
fn drop(&mut self) {
unsafe { CFRelease(self.native_track.0) }
}
}
pub struct MacOSDisplay(swift::MacOSDisplay);
impl MacOSDisplay {
fn new(ptr: swift::MacOSDisplay) -> Self {
unsafe {
CFRetain(ptr.0);
}
Self(ptr)
}
}
impl Drop for MacOSDisplay {
fn drop(&mut self) {
unsafe { CFRelease(self.0 .0) }
}
}
#[derive(Clone)]
pub struct Frame(CVImageBuffer);
impl Frame {
pub fn width(&self) -> usize {
self.0.width()
}
pub fn height(&self) -> usize {
self.0.height()
}
pub fn image(&self) -> CVImageBuffer {
self.0.clone()
}
}

View file

@ -0,0 +1,61 @@
use crate::track::RemoteVideoTrack;
use anyhow::Result;
use futures::StreamExt as _;
use gpui::{
Empty, EventEmitter, IntoElement, Render, ScreenCaptureFrame, Task, View, ViewContext,
VisualContext as _,
};
pub struct RemoteVideoTrackView {
track: RemoteVideoTrack,
frame: Option<ScreenCaptureFrame>,
_maintain_frame: Task<Result<()>>,
}
#[derive(Debug)]
pub enum RemoteVideoTrackViewEvent {
Close,
}
impl RemoteVideoTrackView {
pub fn new(track: RemoteVideoTrack, cx: &mut ViewContext<Self>) -> Self {
cx.focus_handle();
let frames = super::play_remote_video_track(&track);
Self {
track,
frame: None,
_maintain_frame: cx.spawn(|this, mut cx| async move {
futures::pin_mut!(frames);
while let Some(frame) = frames.next().await {
this.update(&mut cx, |this, cx| {
this.frame = Some(frame);
cx.notify();
})?;
}
this.update(&mut cx, |_, cx| cx.emit(RemoteVideoTrackViewEvent::Close))?;
Ok(())
}),
}
}
pub fn clone(&self, cx: &mut ViewContext<Self>) -> View<Self> {
cx.new_view(|cx| Self::new(self.track.clone(), cx))
}
}
impl EventEmitter<RemoteVideoTrackViewEvent> for RemoteVideoTrackView {}
impl Render for RemoteVideoTrackView {
fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
#[cfg(target_os = "macos")]
if let Some(frame) = &self.frame {
use gpui::Styled as _;
return gpui::surface(frame.0.clone())
.size_full()
.into_any_element();
}
Empty.into_any_element()
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,111 @@
use super::*;
#[derive(Clone, Debug)]
pub enum Participant {
Local(LocalParticipant),
Remote(RemoteParticipant),
}
#[derive(Clone, Debug)]
pub struct LocalParticipant {
#[cfg(not(target_os = "windows"))]
pub(super) identity: ParticipantIdentity,
pub(super) room: Room,
}
#[derive(Clone, Debug)]
pub struct RemoteParticipant {
#[cfg(not(target_os = "windows"))]
pub(super) identity: ParticipantIdentity,
pub(super) room: WeakRoom,
}
#[cfg(not(target_os = "windows"))]
impl Participant {
pub fn identity(&self) -> ParticipantIdentity {
match self {
Participant::Local(participant) => participant.identity.clone(),
Participant::Remote(participant) => participant.identity.clone(),
}
}
}
#[cfg(not(target_os = "windows"))]
impl LocalParticipant {
pub async fn unpublish_track(&self, track: &TrackSid) -> Result<()> {
self.room
.test_server()
.unpublish_track(self.room.token(), track)
.await
}
pub async fn publish_track(
&self,
track: LocalTrack,
_options: TrackPublishOptions,
) -> Result<LocalTrackPublication> {
let this = self.clone();
let track = track.clone();
let server = this.room.test_server();
let sid = match track {
LocalTrack::Video(track) => {
server.publish_video_track(this.room.token(), track).await?
}
LocalTrack::Audio(track) => {
server
.publish_audio_track(this.room.token(), &track)
.await?
}
};
Ok(LocalTrackPublication {
room: self.room.downgrade(),
sid,
})
}
}
#[cfg(not(target_os = "windows"))]
impl RemoteParticipant {
pub fn track_publications(&self) -> HashMap<TrackSid, RemoteTrackPublication> {
if let Some(room) = self.room.upgrade() {
let server = room.test_server();
let audio = server
.audio_tracks(room.token())
.unwrap()
.into_iter()
.filter(|track| track.publisher_id() == self.identity)
.map(|track| {
(
track.sid(),
RemoteTrackPublication {
sid: track.sid(),
room: self.room.clone(),
track: RemoteTrack::Audio(track),
},
)
});
let video = server
.video_tracks(room.token())
.unwrap()
.into_iter()
.filter(|track| track.publisher_id() == self.identity)
.map(|track| {
(
track.sid(),
RemoteTrackPublication {
sid: track.sid(),
room: self.room.clone(),
track: RemoteTrack::Video(track),
},
)
});
audio.chain(video).collect()
} else {
HashMap::default()
}
}
pub fn identity(&self) -> ParticipantIdentity {
self.identity.clone()
}
}

View file

@ -0,0 +1,116 @@
use super::*;
#[derive(Clone, Debug)]
pub enum TrackPublication {
Local(LocalTrackPublication),
Remote(RemoteTrackPublication),
}
#[derive(Clone, Debug)]
pub struct LocalTrackPublication {
#[cfg(not(target_os = "windows"))]
pub(crate) sid: TrackSid,
pub(crate) room: WeakRoom,
}
#[derive(Clone, Debug)]
pub struct RemoteTrackPublication {
#[cfg(not(target_os = "windows"))]
pub(crate) sid: TrackSid,
pub(crate) room: WeakRoom,
pub(crate) track: RemoteTrack,
}
#[cfg(not(target_os = "windows"))]
impl TrackPublication {
pub fn sid(&self) -> TrackSid {
match self {
TrackPublication::Local(track) => track.sid(),
TrackPublication::Remote(track) => track.sid(),
}
}
pub fn is_muted(&self) -> bool {
match self {
TrackPublication::Local(track) => track.is_muted(),
TrackPublication::Remote(track) => track.is_muted(),
}
}
}
#[cfg(not(target_os = "windows"))]
impl LocalTrackPublication {
pub fn sid(&self) -> TrackSid {
self.sid.clone()
}
pub fn mute(&self) {
self.set_mute(true)
}
pub fn unmute(&self) {
self.set_mute(false)
}
fn set_mute(&self, mute: bool) {
if let Some(room) = self.room.upgrade() {
room.test_server()
.set_track_muted(&room.token(), &self.sid, mute)
.ok();
}
}
pub fn is_muted(&self) -> bool {
if let Some(room) = self.room.upgrade() {
room.test_server()
.is_track_muted(&room.token(), &self.sid)
.unwrap_or(false)
} else {
false
}
}
}
#[cfg(not(target_os = "windows"))]
impl RemoteTrackPublication {
pub fn sid(&self) -> TrackSid {
self.sid.clone()
}
pub fn track(&self) -> Option<RemoteTrack> {
Some(self.track.clone())
}
pub fn kind(&self) -> TrackKind {
self.track.kind()
}
pub fn is_muted(&self) -> bool {
if let Some(room) = self.room.upgrade() {
room.test_server()
.is_track_muted(&room.token(), &self.sid)
.unwrap_or(false)
} else {
false
}
}
pub fn is_enabled(&self) -> bool {
if let Some(room) = self.room.upgrade() {
!room.0.lock().paused_audio_tracks.contains(&self.sid)
} else {
false
}
}
pub fn set_enabled(&self, enabled: bool) {
if let Some(room) = self.room.upgrade() {
let paused_audio_tracks = &mut room.0.lock().paused_audio_tracks;
if enabled {
paused_audio_tracks.remove(&self.sid);
} else {
paused_audio_tracks.insert(self.sid.clone());
}
}
}
}

View file

@ -0,0 +1,201 @@
use super::*;
#[cfg(not(windows))]
use webrtc::{audio_source::RtcAudioSource, video_source::RtcVideoSource};
#[cfg(not(windows))]
pub use livekit::track::{TrackKind, TrackSource};
#[derive(Clone, Debug)]
pub enum LocalTrack {
Audio(LocalAudioTrack),
Video(LocalVideoTrack),
}
#[derive(Clone, Debug)]
pub enum RemoteTrack {
Audio(RemoteAudioTrack),
Video(RemoteVideoTrack),
}
#[derive(Clone, Debug)]
pub struct LocalVideoTrack {}
#[derive(Clone, Debug)]
pub struct LocalAudioTrack {}
#[derive(Clone, Debug)]
pub struct RemoteVideoTrack {
#[cfg(not(target_os = "windows"))]
pub(super) server_track: Arc<TestServerVideoTrack>,
pub(super) _room: WeakRoom,
}
#[derive(Clone, Debug)]
pub struct RemoteAudioTrack {
#[cfg(not(target_os = "windows"))]
pub(super) server_track: Arc<TestServerAudioTrack>,
pub(super) room: WeakRoom,
}
pub enum RtcTrack {
Audio(RtcAudioTrack),
Video(RtcVideoTrack),
}
pub struct RtcAudioTrack {
#[cfg(not(target_os = "windows"))]
pub(super) server_track: Arc<TestServerAudioTrack>,
pub(super) room: WeakRoom,
}
pub struct RtcVideoTrack {
#[cfg(not(target_os = "windows"))]
pub(super) _server_track: Arc<TestServerVideoTrack>,
}
#[cfg(not(target_os = "windows"))]
impl RemoteTrack {
pub fn sid(&self) -> TrackSid {
match self {
RemoteTrack::Audio(track) => track.sid(),
RemoteTrack::Video(track) => track.sid(),
}
}
pub fn kind(&self) -> TrackKind {
match self {
RemoteTrack::Audio(_) => TrackKind::Audio,
RemoteTrack::Video(_) => TrackKind::Video,
}
}
pub fn publisher_id(&self) -> ParticipantIdentity {
match self {
RemoteTrack::Audio(track) => track.publisher_id(),
RemoteTrack::Video(track) => track.publisher_id(),
}
}
pub fn rtc_track(&self) -> RtcTrack {
match self {
RemoteTrack::Audio(track) => RtcTrack::Audio(track.rtc_track()),
RemoteTrack::Video(track) => RtcTrack::Video(track.rtc_track()),
}
}
}
#[cfg(not(windows))]
impl LocalVideoTrack {
pub fn create_video_track(_name: &str, _source: RtcVideoSource) -> Self {
Self {}
}
}
#[cfg(not(windows))]
impl LocalAudioTrack {
pub fn create_audio_track(_name: &str, _source: RtcAudioSource) -> Self {
Self {}
}
}
#[cfg(not(target_os = "windows"))]
impl RemoteAudioTrack {
pub fn sid(&self) -> TrackSid {
self.server_track.sid.clone()
}
pub fn publisher_id(&self) -> ParticipantIdentity {
self.server_track.publisher_id.clone()
}
pub fn start(&self) {
if let Some(room) = self.room.upgrade() {
room.0
.lock()
.paused_audio_tracks
.remove(&self.server_track.sid);
}
}
pub fn stop(&self) {
if let Some(room) = self.room.upgrade() {
room.0
.lock()
.paused_audio_tracks
.insert(self.server_track.sid.clone());
}
}
pub fn rtc_track(&self) -> RtcAudioTrack {
RtcAudioTrack {
server_track: self.server_track.clone(),
room: self.room.clone(),
}
}
}
#[cfg(not(target_os = "windows"))]
impl RemoteVideoTrack {
pub fn sid(&self) -> TrackSid {
self.server_track.sid.clone()
}
pub fn publisher_id(&self) -> ParticipantIdentity {
self.server_track.publisher_id.clone()
}
pub fn rtc_track(&self) -> RtcVideoTrack {
RtcVideoTrack {
_server_track: self.server_track.clone(),
}
}
}
#[cfg(not(target_os = "windows"))]
impl RtcTrack {
pub fn enabled(&self) -> bool {
match self {
RtcTrack::Audio(track) => track.enabled(),
RtcTrack::Video(track) => track.enabled(),
}
}
pub fn set_enabled(&self, enabled: bool) {
match self {
RtcTrack::Audio(track) => track.set_enabled(enabled),
RtcTrack::Video(_) => {}
}
}
}
#[cfg(not(target_os = "windows"))]
impl RtcAudioTrack {
pub fn set_enabled(&self, enabled: bool) {
if let Some(room) = self.room.upgrade() {
let paused_audio_tracks = &mut room.0.lock().paused_audio_tracks;
if enabled {
paused_audio_tracks.remove(&self.server_track.sid);
} else {
paused_audio_tracks.insert(self.server_track.sid.clone());
}
}
}
pub fn enabled(&self) -> bool {
if let Some(room) = self.room.upgrade() {
!room
.0
.lock()
.paused_audio_tracks
.contains(&self.server_track.sid)
} else {
false
}
}
}
impl RtcVideoTrack {
pub fn enabled(&self) -> bool {
true
}
}

View file

@ -0,0 +1,136 @@
use super::track::{RtcAudioTrack, RtcVideoTrack};
use futures::Stream;
use livekit::webrtc as real;
use std::{
pin::Pin,
task::{Context, Poll},
};
pub mod video_stream {
use super::*;
pub mod native {
use super::*;
use real::video_frame::BoxVideoFrame;
pub struct NativeVideoStream {
pub track: RtcVideoTrack,
}
impl NativeVideoStream {
pub fn new(track: RtcVideoTrack) -> Self {
Self { track }
}
}
impl Stream for NativeVideoStream {
type Item = BoxVideoFrame;
fn poll_next(self: Pin<&mut Self>, _cx: &mut Context) -> Poll<Option<Self::Item>> {
Poll::Pending
}
}
}
}
pub mod audio_stream {
use super::*;
pub mod native {
use super::*;
use real::audio_frame::AudioFrame;
pub struct NativeAudioStream {
pub track: RtcAudioTrack,
}
impl NativeAudioStream {
pub fn new(track: RtcAudioTrack, _sample_rate: i32, _num_channels: i32) -> Self {
Self { track }
}
}
impl Stream for NativeAudioStream {
type Item = AudioFrame<'static>;
fn poll_next(self: Pin<&mut Self>, _cx: &mut Context) -> Poll<Option<Self::Item>> {
Poll::Pending
}
}
}
}
pub mod audio_source {
use super::*;
pub use real::audio_source::AudioSourceOptions;
pub mod native {
use std::sync::Arc;
use super::*;
use real::{audio_frame::AudioFrame, RtcError};
#[derive(Clone)]
pub struct NativeAudioSource {
pub options: Arc<AudioSourceOptions>,
pub sample_rate: u32,
pub num_channels: u32,
}
impl NativeAudioSource {
pub fn new(
options: AudioSourceOptions,
sample_rate: u32,
num_channels: u32,
_queue_size_ms: u32,
) -> Self {
Self {
options: Arc::new(options),
sample_rate,
num_channels,
}
}
pub async fn capture_frame(&self, _frame: &AudioFrame<'_>) -> Result<(), RtcError> {
Ok(())
}
}
}
pub enum RtcAudioSource {
Native(native::NativeAudioSource),
}
}
pub use livekit::webrtc::audio_frame;
pub use livekit::webrtc::video_frame;
pub mod video_source {
use super::*;
pub use real::video_source::VideoResolution;
pub struct RTCVideoSource;
pub mod native {
use super::*;
use real::video_frame::{VideoBuffer, VideoFrame};
#[derive(Clone)]
pub struct NativeVideoSource {
pub resolution: VideoResolution,
}
impl NativeVideoSource {
pub fn new(resolution: super::VideoResolution) -> Self {
Self { resolution }
}
pub fn capture_frame<T: AsRef<dyn VideoBuffer>>(&self, _frame: &VideoFrame<T>) {}
}
}
pub enum RtcVideoSource {
Native(native::NativeVideoSource),
}
}