Require user interaction to start audio

This commit is contained in:
Skyler Lehmkuhl 2025-01-25 18:48:34 -05:00
parent cdd1ff2cbf
commit e12c2e8877
7 changed files with 63 additions and 7 deletions

View File

@ -3,6 +3,11 @@ use cpal::{Sample};
use std::sync::{Arc, Mutex};
use crate::{TrackManager, Timestamp, Duration, SampleCount, AudioOutput, PlaybackState};
#[derive(PartialEq)]
enum AudioState {
Suspended,
Running,
}
// #[cfg(feature = "wasm")]
// use wasm_bindgen::prelude::*;
@ -13,6 +18,7 @@ pub struct CpalAudioOutput {
track_manager: Option<Arc<Mutex<TrackManager>>>,
_stream: Option<cpal::Stream>,
playback_state: PlaybackState,
audio_state: AudioState,
timestamp: Arc<Mutex<Timestamp>>,
chunk_size: usize,
sample_rate: u32,
@ -26,6 +32,7 @@ impl CpalAudioOutput {
track_manager: None,
_stream: None,
playback_state: PlaybackState::Stopped,
audio_state: AudioState::Suspended,
timestamp: Arc::new(Mutex::new(Timestamp::from_seconds(0.0))),
chunk_size: 0,
sample_rate: 44100, // Default sample rate, updated later
@ -110,11 +117,6 @@ impl AudioOutput for CpalAudioOutput {
.ok_or_else(|| "No output device available")?;
let supported_config = device.default_output_config()?;
self._stream = Some(self.build_stream::<f32>(&device, supported_config)?);
if let Some(stream) = self._stream.as_ref() {
stream.play().unwrap();
} else {
eprintln!("Stream is not initialized!");
}
Ok(())
}
@ -126,6 +128,17 @@ impl AudioOutput for CpalAudioOutput {
fn stop(&mut self) {
self.playback_state = PlaybackState::Stopped;
}
fn resume(&mut self) -> Result<(), anyhow::Error> {
if self.audio_state == AudioState::Suspended {
if let Some(stream) = &self._stream {
stream.play()?;
self.audio_state = AudioState::Running;
log::info!("Audio resumed");
}
}
Ok(())
}
fn register_track_manager(&mut self, track_manager: Arc<Mutex<TrackManager>>) {
self.track_manager = Some(track_manager);

View File

@ -95,6 +95,7 @@ pub trait AudioOutput {
fn start(&mut self) -> Result<(), Box<dyn std::error::Error>>;
fn play(&mut self, start_timestamp: Timestamp);
fn stop(&mut self);
fn resume(&mut self) -> Result<(), anyhow::Error>;
fn register_track_manager(&mut self, track_manager: Arc<Mutex<TrackManager>>);
fn get_timestamp(&mut self) -> Timestamp;
fn set_chunk_size(&mut self, chunk_size: usize);
@ -218,6 +219,11 @@ impl CoreInterface {
let mut track_manager = self.track_manager.lock().unwrap();
track_manager.stop();
}
pub fn resume_audio(&mut self) -> Result<(), JsValue> {
// Call this on user gestures if audio gets suspended
self.cpal_audio_output.resume()
.map_err(|e| JsValue::from_str(&format!("Failed to resume audio: {}", e)))
}
pub fn add_sine_track(&mut self, frequency: f32) -> Result<(), String> {
if frequency.is_nan() || frequency.is_infinite() || frequency <= 0.0 {
return Err(format!("Invalid frequency: {}", frequency));

View File

@ -8419,5 +8419,26 @@ async function testAudio() {
coreInterface.init()
coreInterface.play(0.0)
console.log(coreInterface)
let audioStarted = false;
const startCoreInterfaceAudio = () => {
if (!audioStarted) {
try {
coreInterface.resume_audio();
audioStarted = true;
console.log("Started CoreInterface Audio!")
} catch (err) {
console.error("Audio resume failed:", err);
}
}
// Remove the event listeners to prevent them from firing again
document.removeEventListener("click", startCoreInterfaceAudio);
document.removeEventListener("keydown", startCoreInterfaceAudio);
};
// Add event listeners for mouse click and key press
document.addEventListener("click", startCoreInterfaceAudio);
document.addEventListener("keydown", startCoreInterfaceAudio);
}
testAudio()

View File

@ -7,6 +7,7 @@ export class CoreInterface {
init(): void;
play(timestamp: number): void;
stop(): void;
resume_audio(): void;
add_sine_track(frequency: number): void;
get_timestamp(): number;
get_tracks(): JsTrack[];
@ -30,6 +31,7 @@ export interface InitOutput {
readonly coreinterface_init: (a: number) => void;
readonly coreinterface_play: (a: number, b: number) => void;
readonly coreinterface_stop: (a: number) => void;
readonly coreinterface_resume_audio: (a: number, b: number) => void;
readonly coreinterface_add_sine_track: (a: number, b: number, c: number) => void;
readonly coreinterface_get_timestamp: (a: number) => number;
readonly coreinterface_get_tracks: (a: number, b: number) => void;

View File

@ -288,6 +288,19 @@ export class CoreInterface {
stop() {
wasm.coreinterface_stop(this.__wbg_ptr);
}
resume_audio() {
try {
const retptr = wasm.__wbindgen_add_to_stack_pointer(-16);
wasm.coreinterface_resume_audio(retptr, this.__wbg_ptr);
var r0 = getDataViewMemory0().getInt32(retptr + 4 * 0, true);
var r1 = getDataViewMemory0().getInt32(retptr + 4 * 1, true);
if (r1) {
throw takeObject(r0);
}
} finally {
wasm.__wbindgen_add_to_stack_pointer(16);
}
}
/**
* @param {number} frequency
*/
@ -549,8 +562,8 @@ function __wbg_get_imports() {
const ret = false;
return ret;
};
imports.wbg.__wbindgen_closure_wrapper99 = function(arg0, arg1, arg2) {
const ret = makeMutClosure(arg0, arg1, 40, __wbg_adapter_18);
imports.wbg.__wbindgen_closure_wrapper87 = function(arg0, arg1, arg2) {
const ret = makeMutClosure(arg0, arg1, 31, __wbg_adapter_18);
return addHeapObject(ret);
};
imports.wbg.__wbindgen_debug_string = function(arg0, arg1) {

Binary file not shown.

View File

@ -9,6 +9,7 @@ export const coreinterface_new: () => number;
export const coreinterface_init: (a: number) => void;
export const coreinterface_play: (a: number, b: number) => void;
export const coreinterface_stop: (a: number) => void;
export const coreinterface_resume_audio: (a: number, b: number) => void;
export const coreinterface_add_sine_track: (a: number, b: number, c: number) => void;
export const coreinterface_get_timestamp: (a: number) => number;
export const coreinterface_get_tracks: (a: number, b: number) => void;