Add component NotificationFrame & CaptureAudio parts for testing (#36081)
Adds component NotificationFrame. It implements a subset of MessageNotification as a Component and refactors MessageNotification to use NotificationFrame. Having some notification UI Component is nice as it allows us to easily build new types of notifications. Uses the new NotificationFrame component for CaptureAudioNotification. Adds a CaptureAudio action in the dev namespace (not meant for end-users). It records 10 seconds of audio and saves that to a wav file. Release Notes: - N/A --------- Co-authored-by: Mikayla <mikayla@zed.dev>
This commit is contained in:
parent
a3dcc76687
commit
4f0b00b0d9
13 changed files with 448 additions and 127 deletions
|
@ -39,6 +39,8 @@ tokio-tungstenite.workspace = true
|
|||
util.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
rodio = { workspace = true, features = ["wav_output"] }
|
||||
|
||||
[target.'cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))'.dependencies]
|
||||
libwebrtc = { rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d", git = "https://github.com/zed-industries/livekit-rust-sdks" }
|
||||
livekit = { rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d", git = "https://github.com/zed-industries/livekit-rust-sdks", features = [
|
||||
|
|
|
@ -1,7 +1,13 @@
|
|||
use anyhow::Context as _;
|
||||
use collections::HashMap;
|
||||
|
||||
mod remote_video_track_view;
|
||||
use cpal::traits::HostTrait as _;
|
||||
pub use remote_video_track_view::{RemoteVideoTrackView, RemoteVideoTrackViewEvent};
|
||||
use rodio::DeviceTrait as _;
|
||||
|
||||
mod record;
|
||||
pub use record::CaptureInput;
|
||||
|
||||
#[cfg(not(any(
|
||||
test,
|
||||
|
@ -18,6 +24,8 @@ mod livekit_client;
|
|||
)))]
|
||||
pub use livekit_client::*;
|
||||
|
||||
// If you need proper LSP in livekit_client you've got to comment out
|
||||
// the mocks and test
|
||||
#[cfg(any(
|
||||
test,
|
||||
feature = "test-support",
|
||||
|
@ -168,3 +176,59 @@ pub enum RoomEvent {
|
|||
Reconnecting,
|
||||
Reconnected,
|
||||
}
|
||||
|
||||
pub(crate) fn default_device(
|
||||
input: bool,
|
||||
) -> anyhow::Result<(cpal::Device, cpal::SupportedStreamConfig)> {
|
||||
let device;
|
||||
let config;
|
||||
if input {
|
||||
device = cpal::default_host()
|
||||
.default_input_device()
|
||||
.context("no audio input device available")?;
|
||||
config = device
|
||||
.default_input_config()
|
||||
.context("failed to get default input config")?;
|
||||
} else {
|
||||
device = cpal::default_host()
|
||||
.default_output_device()
|
||||
.context("no audio output device available")?;
|
||||
config = device
|
||||
.default_output_config()
|
||||
.context("failed to get default output config")?;
|
||||
}
|
||||
Ok((device, config))
|
||||
}
|
||||
|
||||
pub(crate) fn get_sample_data(
|
||||
sample_format: cpal::SampleFormat,
|
||||
data: &cpal::Data,
|
||||
) -> anyhow::Result<Vec<i16>> {
|
||||
match sample_format {
|
||||
cpal::SampleFormat::I8 => Ok(convert_sample_data::<i8, i16>(data)),
|
||||
cpal::SampleFormat::I16 => Ok(data.as_slice::<i16>().unwrap().to_vec()),
|
||||
cpal::SampleFormat::I24 => Ok(convert_sample_data::<cpal::I24, i16>(data)),
|
||||
cpal::SampleFormat::I32 => Ok(convert_sample_data::<i32, i16>(data)),
|
||||
cpal::SampleFormat::I64 => Ok(convert_sample_data::<i64, i16>(data)),
|
||||
cpal::SampleFormat::U8 => Ok(convert_sample_data::<u8, i16>(data)),
|
||||
cpal::SampleFormat::U16 => Ok(convert_sample_data::<u16, i16>(data)),
|
||||
cpal::SampleFormat::U32 => Ok(convert_sample_data::<u32, i16>(data)),
|
||||
cpal::SampleFormat::U64 => Ok(convert_sample_data::<u64, i16>(data)),
|
||||
cpal::SampleFormat::F32 => Ok(convert_sample_data::<f32, i16>(data)),
|
||||
cpal::SampleFormat::F64 => Ok(convert_sample_data::<f64, i16>(data)),
|
||||
_ => anyhow::bail!("Unsupported sample format"),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn convert_sample_data<
|
||||
TSource: cpal::SizedSample,
|
||||
TDest: cpal::SizedSample + cpal::FromSample<TSource>,
|
||||
>(
|
||||
data: &cpal::Data,
|
||||
) -> Vec<TDest> {
|
||||
data.as_slice::<TSource>()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|e| e.to_sample::<TDest>())
|
||||
.collect()
|
||||
}
|
||||
|
|
|
@ -8,6 +8,8 @@ use gpui_tokio::Tokio;
|
|||
use playback::capture_local_video_track;
|
||||
|
||||
mod playback;
|
||||
#[cfg(feature = "record-microphone")]
|
||||
mod record;
|
||||
|
||||
use crate::{LocalTrack, Participant, RemoteTrack, RoomEvent, TrackPublication};
|
||||
pub use playback::AudioStream;
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
use anyhow::{Context as _, Result};
|
||||
|
||||
use cpal::traits::{DeviceTrait, HostTrait, StreamTrait as _};
|
||||
use cpal::{Data, FromSample, I24, SampleFormat, SizedSample};
|
||||
use cpal::traits::{DeviceTrait, StreamTrait as _};
|
||||
use futures::channel::mpsc::UnboundedSender;
|
||||
use futures::{Stream, StreamExt as _};
|
||||
use gpui::{
|
||||
|
@ -166,7 +165,7 @@ impl AudioStack {
|
|||
) -> Result<()> {
|
||||
loop {
|
||||
let mut device_change_listener = DeviceChangeListener::new(false)?;
|
||||
let (output_device, output_config) = default_device(false)?;
|
||||
let (output_device, output_config) = crate::default_device(false)?;
|
||||
let (end_on_drop_tx, end_on_drop_rx) = std::sync::mpsc::channel::<()>();
|
||||
let mixer = mixer.clone();
|
||||
let apm = apm.clone();
|
||||
|
@ -238,7 +237,7 @@ impl AudioStack {
|
|||
) -> Result<()> {
|
||||
loop {
|
||||
let mut device_change_listener = DeviceChangeListener::new(true)?;
|
||||
let (device, config) = default_device(true)?;
|
||||
let (device, config) = crate::default_device(true)?;
|
||||
let (end_on_drop_tx, end_on_drop_rx) = std::sync::mpsc::channel::<()>();
|
||||
let apm = apm.clone();
|
||||
let frame_tx = frame_tx.clone();
|
||||
|
@ -262,7 +261,7 @@ impl AudioStack {
|
|||
config.sample_format(),
|
||||
move |data, _: &_| {
|
||||
let data =
|
||||
Self::get_sample_data(config.sample_format(), data).log_err();
|
||||
crate::get_sample_data(config.sample_format(), data).log_err();
|
||||
let Some(data) = data else {
|
||||
return;
|
||||
};
|
||||
|
@ -320,33 +319,6 @@ impl AudioStack {
|
|||
drop(end_on_drop_tx)
|
||||
}
|
||||
}
|
||||
|
||||
fn get_sample_data(sample_format: SampleFormat, data: &Data) -> Result<Vec<i16>> {
|
||||
match sample_format {
|
||||
SampleFormat::I8 => Ok(Self::convert_sample_data::<i8, i16>(data)),
|
||||
SampleFormat::I16 => Ok(data.as_slice::<i16>().unwrap().to_vec()),
|
||||
SampleFormat::I24 => Ok(Self::convert_sample_data::<I24, i16>(data)),
|
||||
SampleFormat::I32 => Ok(Self::convert_sample_data::<i32, i16>(data)),
|
||||
SampleFormat::I64 => Ok(Self::convert_sample_data::<i64, i16>(data)),
|
||||
SampleFormat::U8 => Ok(Self::convert_sample_data::<u8, i16>(data)),
|
||||
SampleFormat::U16 => Ok(Self::convert_sample_data::<u16, i16>(data)),
|
||||
SampleFormat::U32 => Ok(Self::convert_sample_data::<u32, i16>(data)),
|
||||
SampleFormat::U64 => Ok(Self::convert_sample_data::<u64, i16>(data)),
|
||||
SampleFormat::F32 => Ok(Self::convert_sample_data::<f32, i16>(data)),
|
||||
SampleFormat::F64 => Ok(Self::convert_sample_data::<f64, i16>(data)),
|
||||
_ => anyhow::bail!("Unsupported sample format"),
|
||||
}
|
||||
}
|
||||
|
||||
fn convert_sample_data<TSource: SizedSample, TDest: SizedSample + FromSample<TSource>>(
|
||||
data: &Data,
|
||||
) -> Vec<TDest> {
|
||||
data.as_slice::<TSource>()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|e| e.to_sample::<TDest>())
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
use super::LocalVideoTrack;
|
||||
|
@ -393,27 +365,6 @@ pub(crate) async fn capture_local_video_track(
|
|||
))
|
||||
}
|
||||
|
||||
fn default_device(input: bool) -> Result<(cpal::Device, cpal::SupportedStreamConfig)> {
|
||||
let device;
|
||||
let config;
|
||||
if input {
|
||||
device = cpal::default_host()
|
||||
.default_input_device()
|
||||
.context("no audio input device available")?;
|
||||
config = device
|
||||
.default_input_config()
|
||||
.context("failed to get default input config")?;
|
||||
} else {
|
||||
device = cpal::default_host()
|
||||
.default_output_device()
|
||||
.context("no audio output device available")?;
|
||||
config = device
|
||||
.default_output_config()
|
||||
.context("failed to get default output config")?;
|
||||
}
|
||||
Ok((device, config))
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct AudioMixerSource {
|
||||
ssrc: i32,
|
||||
|
|
91
crates/livekit_client/src/record.rs
Normal file
91
crates/livekit_client/src/record.rs
Normal file
|
@ -0,0 +1,91 @@
|
|||
use std::{
|
||||
env,
|
||||
path::{Path, PathBuf},
|
||||
sync::{Arc, Mutex},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use cpal::traits::{DeviceTrait, StreamTrait};
|
||||
use rodio::{buffer::SamplesBuffer, conversions::SampleTypeConverter};
|
||||
use util::ResultExt;
|
||||
|
||||
pub struct CaptureInput {
|
||||
pub name: String,
|
||||
config: cpal::SupportedStreamConfig,
|
||||
samples: Arc<Mutex<Vec<i16>>>,
|
||||
_stream: cpal::Stream,
|
||||
}
|
||||
|
||||
impl CaptureInput {
|
||||
pub fn start() -> anyhow::Result<Self> {
|
||||
let (device, config) = crate::default_device(true)?;
|
||||
let name = device.name().unwrap_or("<unknown>".to_string());
|
||||
log::info!("Using microphone: {}", name);
|
||||
|
||||
let samples = Arc::new(Mutex::new(Vec::new()));
|
||||
let stream = start_capture(device, config.clone(), samples.clone())?;
|
||||
|
||||
Ok(Self {
|
||||
name,
|
||||
_stream: stream,
|
||||
config,
|
||||
samples,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn finish(self) -> Result<PathBuf> {
|
||||
let name = self.name;
|
||||
let mut path = env::current_dir().context("Could not get current dir")?;
|
||||
path.push(&format!("test_recording_{name}.wav"));
|
||||
log::info!("Test recording written to: {}", path.display());
|
||||
write_out(self.samples, self.config, &path)?;
|
||||
Ok(path)
|
||||
}
|
||||
}
|
||||
|
||||
fn start_capture(
|
||||
device: cpal::Device,
|
||||
config: cpal::SupportedStreamConfig,
|
||||
samples: Arc<Mutex<Vec<i16>>>,
|
||||
) -> Result<cpal::Stream> {
|
||||
let stream = device
|
||||
.build_input_stream_raw(
|
||||
&config.config(),
|
||||
config.sample_format(),
|
||||
move |data, _: &_| {
|
||||
let data = crate::get_sample_data(config.sample_format(), data).log_err();
|
||||
let Some(data) = data else {
|
||||
return;
|
||||
};
|
||||
samples
|
||||
.try_lock()
|
||||
.expect("Only locked after stream ends")
|
||||
.extend_from_slice(&data);
|
||||
},
|
||||
|err| log::error!("error capturing audio track: {:?}", err),
|
||||
Some(Duration::from_millis(100)),
|
||||
)
|
||||
.context("failed to build input stream")?;
|
||||
|
||||
stream.play()?;
|
||||
Ok(stream)
|
||||
}
|
||||
|
||||
fn write_out(
|
||||
samples: Arc<Mutex<Vec<i16>>>,
|
||||
config: cpal::SupportedStreamConfig,
|
||||
path: &Path,
|
||||
) -> Result<()> {
|
||||
let samples = std::mem::take(
|
||||
&mut *samples
|
||||
.try_lock()
|
||||
.expect("Stream has ended, callback cant hold the lock"),
|
||||
);
|
||||
let samples: Vec<f32> = SampleTypeConverter::<_, f32>::new(samples.into_iter()).collect();
|
||||
let mut samples = SamplesBuffer::new(config.channels(), config.sample_rate().0, samples);
|
||||
match rodio::output_to_wav(&mut samples, path) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(e) => Err(anyhow::anyhow!("Failed to write wav file: {}", e)),
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue