[FEATURE ⭐]HOW TO GET AUDIO BUFFER?
Is your feature request related to a problem? Please describe. 📝 I use rust . how to get audio buffer, wo use to tauri achieve area capture,but video no audio, so i should how to do ?
Describe the solution you'd like 🔑 I use rust . how to get audio buffer, wo use to tauri achieve area capture,but video no audio, so i should how to do ? windows11 system capture https://github.com/user-attachments/assets/cfe3bfae-94c6-474a-98b3-f317e9a31bf5
window-capture https://github.com/user-attachments/assets/b3df53ca-1a5d-473a-bb5c-e757d390f8a3
Additional context ➕ code
//////////////////////////////////////////
use std::{
io::{self, Write}, time::Instant
};
use windows_capture::{
capture::GraphicsCaptureApiHandler,
encoder::{AudioSettingsBuilder, ContainerSettingsBuilder, VideoEncoder, VideoSettingsBuilder},
frame::Frame,
graphics_capture_api::InternalCaptureControl,
monitor::Monitor,
settings::{ColorFormat, CursorCaptureSettings, DrawBorderSettings, Settings},
};
struct Flagset {
x:u32,
y:u32,
width:u32,
height:u32,
path:String,
}
struct Capture {
encoder: Option<VideoEncoder>,
start: Instant,
flag:Flagset,
}
use lazy_static::lazy_static;
use std::sync::{Arc, Mutex};
lazy_static! {
static ref CAPTURE_STATUS: Arc<Mutex<String>> = Arc::new(Mutex::new(String::from("")));
}
impl GraphicsCaptureApiHandler for Capture {
type Flags = Flagset;
type Error = Box<dyn std::error::Error + Send + Sync>;
fn new(flagset: Self::Flags) -> Result<Self, Self::Error> {
let encoder = VideoEncoder::new(
VideoSettingsBuilder::new(flagset.width, flagset.height),
AudioSettingsBuilder::default().disabled(true),
ContainerSettingsBuilder::default(),
flagset.path.clone()
)?;
Ok(Self {
encoder: Some(encoder),
start: Instant::now(),
flag:flagset,
})
}
fn on_frame_arrived(
&mut self ,
frame: &mut Frame,
capture_control: InternalCaptureControl,
) -> Result<(), Self::Error> {
print!(
"\rRecording for: {} seconds",
self.start.elapsed().as_secs()
);
io::stdout().flush()?;
let time = frame.timespan().Duration;
let mut framecrop = frame.buffer_crop(self.flag.x, self.flag.y, self.flag.x+self.flag.width, self.flag.y+self.flag.height).unwrap();
let w = framecrop.width() as usize;
let h = framecrop.height() as usize;
let nopad = framecrop.as_raw_nopadding_buffer().unwrap();
let b = convert_rgba_to_bgra_and_flip(&nopad,w,h);
self.encoder.as_mut().unwrap().send_frame_buffer(&b,time)?; //纯视频
// self.encoder.as_mut().unwrap().send_frame(frame)?; // 未裁剪视频
// self.encoder.as_mut().unwrap().send_audio_buffer(&b, time)?;
// self.encoder.as_mut().unwrap().send_frame_with_audio(frame, audio_buffer)
let mut stauts = CAPTURE_STATUS.lock().unwrap();
if *stauts == "\"STOP\"".to_string() {
*stauts = "".to_string();
self.encoder.take().unwrap().finish()?;
capture_control.stop();
}
Ok(())
}
fn on_closed(&mut self) -> Result<(), Self::Error> {
println!("Capture Session Closed");
Ok(())
}
}
fn convert_rgba_to_bgra_and_flip(rgba_data: &[u8], width: usize, height: usize) -> Vec<u8> {
let mut bgra_data = vec![0; rgba_data.len()];
for y in 0..height {
for x in 0..width {
let rgba_index = (y * width + x) * 4;
let bgra_index = ((height - y - 1) * width + x) * 4;
// 转换为 BGRA
bgra_data[bgra_index] = rgba_data[rgba_index + 2]; // B
bgra_data[bgra_index + 1] = rgba_data[rgba_index + 1]; // G
bgra_data[bgra_index + 2] = rgba_data[rgba_index]; // R
bgra_data[bgra_index + 3] = rgba_data[rgba_index + 3]; // A
}
}
bgra_data
}
#[tauri::command]
fn start_capture(app: AppHandle,x:u32,y:u32,width:u32,height:u32,monitorname:String,path:String){
tauri::async_runtime::spawn(async move {
let flagset = Flagset {
x,
y,
width,
height,
path
};
let monitors = Monitor::enumerate().unwrap();
let mut monitor:Monitor = Monitor::primary().unwrap();
for m in monitors {
if m.device_name().unwrap() == monitorname {
monitor = m;
}
}
let settings = Settings::new(
monitor,
CursorCaptureSettings::Default,
DrawBorderSettings::WithoutBorder,
ColorFormat::Rgba8,
flagset
);
Capture::start_free_threaded(settings).unwrap();
app.listen("capture", |event| {
let mut stauts = CAPTURE_STATUS.lock().unwrap();
*stauts = event.payload().to_string();
});
// Capture::start(settings).expect("Screen Capture Failed");
});
}
/////////////////////////////////////////
Hey, currently its not possible to capture the audio buffer directly.
@angelbests Hi, it is possible to capture desktop audio in rust using cpal, here's my example: https://github.com/mycrl/mirror/blob/main/capture/src/audio.rs, list all the output devices and just bind the input stream.
Hey, currently its not possible to capture the audio buffer directly.
How do I capture audio? Will it be supported in the future?
@angelbests Hi, it is possible to capture desktop audio in rust using cpal, here's my example: https://github.com/mycrl/mirror/blob/main/capture/src/audio.rs, list all the output devices and just bind the input stream.
Thank you very much, but I didn't understand this code very clearly. Is there a simpler one?