523 lines
16 KiB
Rust
523 lines
16 KiB
Rust
use daw_backend::{AudioEvent, AudioSystem, EngineController, EventEmitter, WaveformPeak};
|
|
use std::sync::{Arc, Mutex};
|
|
use tauri::{Emitter};
|
|
|
|
#[derive(serde::Serialize)]
|
|
pub struct AudioFileMetadata {
|
|
pub pool_index: usize,
|
|
pub duration: f64,
|
|
pub sample_rate: u32,
|
|
pub channels: u32,
|
|
pub waveform: Vec<WaveformPeak>,
|
|
}
|
|
|
|
#[derive(serde::Serialize)]
|
|
pub struct MidiNote {
|
|
pub note: u8, // MIDI note number (0-127)
|
|
pub start_time: f64, // Start time in seconds
|
|
pub duration: f64, // Note duration in seconds
|
|
pub velocity: u8, // Note velocity (0-127)
|
|
}
|
|
|
|
#[derive(serde::Serialize)]
|
|
pub struct MidiFileMetadata {
|
|
pub duration: f64,
|
|
pub notes: Vec<MidiNote>,
|
|
}
|
|
|
|
pub struct AudioState {
|
|
controller: Option<EngineController>,
|
|
sample_rate: u32,
|
|
channels: u32,
|
|
next_track_id: u32,
|
|
next_pool_index: usize,
|
|
}
|
|
|
|
impl Default for AudioState {
|
|
fn default() -> Self {
|
|
Self {
|
|
controller: None,
|
|
sample_rate: 0,
|
|
channels: 0,
|
|
next_track_id: 0,
|
|
next_pool_index: 0,
|
|
}
|
|
}
|
|
}
|
|
|
|
/// Implementation of EventEmitter that uses Tauri's event system
|
|
struct TauriEventEmitter {
|
|
app_handle: tauri::AppHandle,
|
|
}
|
|
|
|
impl EventEmitter for TauriEventEmitter {
|
|
fn emit(&self, event: AudioEvent) {
|
|
// Serialize the event to the format expected by the frontend
|
|
let serialized_event = match event {
|
|
AudioEvent::PlaybackPosition(time) => {
|
|
SerializedAudioEvent::PlaybackPosition { time }
|
|
}
|
|
AudioEvent::RecordingStarted(track_id, clip_id) => {
|
|
SerializedAudioEvent::RecordingStarted { track_id, clip_id }
|
|
}
|
|
AudioEvent::RecordingProgress(clip_id, duration) => {
|
|
SerializedAudioEvent::RecordingProgress { clip_id, duration }
|
|
}
|
|
AudioEvent::RecordingStopped(clip_id, pool_index, waveform) => {
|
|
SerializedAudioEvent::RecordingStopped { clip_id, pool_index, waveform }
|
|
}
|
|
AudioEvent::RecordingError(message) => {
|
|
SerializedAudioEvent::RecordingError { message }
|
|
}
|
|
AudioEvent::NoteOn(note, velocity) => {
|
|
SerializedAudioEvent::NoteOn { note, velocity }
|
|
}
|
|
AudioEvent::NoteOff(note) => {
|
|
SerializedAudioEvent::NoteOff { note }
|
|
}
|
|
_ => return, // Ignore other event types for now
|
|
};
|
|
|
|
// Emit the event via Tauri
|
|
if let Err(e) = self.app_handle.emit("audio-event", serialized_event) {
|
|
eprintln!("Failed to emit audio event: {}", e);
|
|
}
|
|
}
|
|
}
|
|
|
|
#[tauri::command]
|
|
pub async fn audio_init(
|
|
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
|
|
app_handle: tauri::AppHandle,
|
|
) -> Result<String, String> {
|
|
let mut audio_state = state.lock().unwrap();
|
|
|
|
// Check if already initialized - if so, reset DAW state (for hot-reload)
|
|
if let Some(controller) = &mut audio_state.controller {
|
|
controller.reset();
|
|
audio_state.next_track_id = 0;
|
|
audio_state.next_pool_index = 0;
|
|
return Ok(format!(
|
|
"Audio already initialized (DAW state reset): {} Hz, {} ch",
|
|
audio_state.sample_rate, audio_state.channels
|
|
));
|
|
}
|
|
|
|
// Create TauriEventEmitter
|
|
let emitter = Arc::new(TauriEventEmitter { app_handle });
|
|
|
|
// AudioSystem handles all cpal initialization internally
|
|
let system = AudioSystem::new(Some(emitter))?;
|
|
|
|
let info = format!(
|
|
"Audio initialized: {} Hz, {} ch",
|
|
system.sample_rate, system.channels
|
|
);
|
|
|
|
// Leak the stream to keep it alive for the lifetime of the app
|
|
// This is intentional - we want the audio stream to run until app closes
|
|
Box::leak(Box::new(system.stream));
|
|
|
|
audio_state.controller = Some(system.controller);
|
|
audio_state.sample_rate = system.sample_rate;
|
|
audio_state.channels = system.channels;
|
|
audio_state.next_track_id = 0;
|
|
audio_state.next_pool_index = 0;
|
|
|
|
Ok(info)
|
|
}
|
|
|
|
#[tauri::command]
|
|
pub async fn audio_play(state: tauri::State<'_, Arc<Mutex<AudioState>>>) -> Result<(), String> {
|
|
let mut audio_state = state.lock().unwrap();
|
|
if let Some(controller) = &mut audio_state.controller {
|
|
controller.play();
|
|
Ok(())
|
|
} else {
|
|
Err("Audio not initialized".to_string())
|
|
}
|
|
}
|
|
|
|
#[tauri::command]
|
|
pub async fn audio_stop(state: tauri::State<'_, Arc<Mutex<AudioState>>>) -> Result<(), String> {
|
|
let mut audio_state = state.lock().unwrap();
|
|
if let Some(controller) = &mut audio_state.controller {
|
|
controller.stop();
|
|
Ok(())
|
|
} else {
|
|
Err("Audio not initialized".to_string())
|
|
}
|
|
}
|
|
|
|
#[tauri::command]
|
|
pub async fn audio_test_beep(state: tauri::State<'_, Arc<Mutex<AudioState>>>) -> Result<(), String> {
|
|
let mut audio_state = state.lock().unwrap();
|
|
if let Some(controller) = &mut audio_state.controller {
|
|
// Create MIDI track
|
|
controller.create_midi_track("Test".to_string());
|
|
|
|
// Note: Track ID will be 0 (first track created)
|
|
// Create MIDI clip and add notes for a C major chord arpeggio
|
|
controller.create_midi_clip(0, 0.0, 2.0);
|
|
controller.add_midi_note(0, 0, 0.0, 60, 100, 0.5); // C
|
|
controller.add_midi_note(0, 0, 0.5, 64, 100, 0.5); // E
|
|
controller.add_midi_note(0, 0, 1.0, 67, 100, 0.5); // G
|
|
|
|
Ok(())
|
|
} else {
|
|
Err("Audio not initialized".to_string())
|
|
}
|
|
}
|
|
|
|
#[tauri::command]
|
|
pub async fn audio_seek(
|
|
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
|
|
seconds: f64,
|
|
) -> Result<(), String> {
|
|
let mut audio_state = state.lock().unwrap();
|
|
if let Some(controller) = &mut audio_state.controller {
|
|
controller.seek(seconds);
|
|
Ok(())
|
|
} else {
|
|
Err("Audio not initialized".to_string())
|
|
}
|
|
}
|
|
|
|
#[tauri::command]
|
|
pub async fn audio_set_track_parameter(
|
|
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
|
|
track_id: u32,
|
|
parameter: String,
|
|
value: f32,
|
|
) -> Result<(), String> {
|
|
let mut audio_state = state.lock().unwrap();
|
|
if let Some(controller) = &mut audio_state.controller {
|
|
match parameter.as_str() {
|
|
"volume" => controller.set_track_volume(track_id, value),
|
|
"mute" => controller.set_track_mute(track_id, value > 0.5),
|
|
"solo" => controller.set_track_solo(track_id, value > 0.5),
|
|
"pan" => {
|
|
// Pan effect - would need to add this via effects system
|
|
controller.add_pan_effect(track_id, value);
|
|
}
|
|
"gain_db" => {
|
|
controller.add_gain_effect(track_id, value);
|
|
}
|
|
_ => return Err(format!("Unknown parameter: {}", parameter)),
|
|
}
|
|
Ok(())
|
|
} else {
|
|
Err("Audio not initialized".to_string())
|
|
}
|
|
}
|
|
|
|
#[tauri::command]
|
|
pub async fn audio_get_available_instruments() -> Result<Vec<String>, String> {
|
|
// Return list of available instruments
|
|
// For now, only SimpleSynth is available
|
|
Ok(vec!["SimpleSynth".to_string()])
|
|
}
|
|
|
|
#[tauri::command]
|
|
pub async fn audio_create_track(
|
|
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
|
|
name: String,
|
|
track_type: String,
|
|
instrument: Option<String>,
|
|
) -> Result<u32, String> {
|
|
let mut audio_state = state.lock().unwrap();
|
|
|
|
// Get track ID and increment counter before borrowing controller
|
|
let track_id = audio_state.next_track_id;
|
|
audio_state.next_track_id += 1;
|
|
|
|
if let Some(controller) = &mut audio_state.controller {
|
|
match track_type.as_str() {
|
|
"audio" => controller.create_audio_track(name),
|
|
"midi" => {
|
|
// Validate instrument for MIDI tracks
|
|
let inst = instrument.unwrap_or_else(|| "SimpleSynth".to_string());
|
|
if inst != "SimpleSynth" {
|
|
return Err(format!("Unknown instrument: {}", inst));
|
|
}
|
|
controller.create_midi_track(name)
|
|
},
|
|
_ => return Err(format!("Unknown track type: {}", track_type)),
|
|
}
|
|
Ok(track_id)
|
|
} else {
|
|
Err("Audio not initialized".to_string())
|
|
}
|
|
}
|
|
|
|
#[tauri::command]
|
|
pub async fn audio_load_file(
|
|
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
|
|
path: String,
|
|
) -> Result<AudioFileMetadata, String> {
|
|
// Load the audio file from disk
|
|
let audio_file = daw_backend::io::AudioFile::load(&path)?;
|
|
|
|
// Calculate duration
|
|
let duration = audio_file.duration();
|
|
|
|
// Generate adaptive waveform peaks based on duration
|
|
// Aim for ~300 peaks per second, with min 1000 and max 20000
|
|
let target_peaks = ((duration * 300.0) as usize).clamp(1000, 20000);
|
|
let waveform = audio_file.generate_waveform_overview(target_peaks);
|
|
let sample_rate = audio_file.sample_rate;
|
|
let channels = audio_file.channels;
|
|
|
|
// Get a lock on the audio state and send the loaded data to the audio thread
|
|
let mut audio_state = state.lock().unwrap();
|
|
|
|
// Get pool index and increment counter before borrowing controller
|
|
let pool_index = audio_state.next_pool_index;
|
|
audio_state.next_pool_index += 1;
|
|
|
|
if let Some(controller) = &mut audio_state.controller {
|
|
controller.add_audio_file(
|
|
path,
|
|
audio_file.data,
|
|
audio_file.channels,
|
|
audio_file.sample_rate,
|
|
);
|
|
|
|
Ok(AudioFileMetadata {
|
|
pool_index,
|
|
duration,
|
|
sample_rate,
|
|
channels,
|
|
waveform,
|
|
})
|
|
} else {
|
|
Err("Audio not initialized".to_string())
|
|
}
|
|
}
|
|
|
|
#[tauri::command]
|
|
pub async fn audio_add_clip(
|
|
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
|
|
track_id: u32,
|
|
pool_index: usize,
|
|
start_time: f64,
|
|
duration: f64,
|
|
offset: f64,
|
|
) -> Result<(), String> {
|
|
let mut audio_state = state.lock().unwrap();
|
|
if let Some(controller) = &mut audio_state.controller {
|
|
controller.add_audio_clip(track_id, pool_index, start_time, duration, offset);
|
|
Ok(())
|
|
} else {
|
|
Err("Audio not initialized".to_string())
|
|
}
|
|
}
|
|
|
|
#[tauri::command]
|
|
pub async fn audio_move_clip(
|
|
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
|
|
track_id: u32,
|
|
clip_id: u32,
|
|
new_start_time: f64,
|
|
) -> Result<(), String> {
|
|
let mut audio_state = state.lock().unwrap();
|
|
if let Some(controller) = &mut audio_state.controller {
|
|
controller.move_clip(track_id, clip_id, new_start_time);
|
|
Ok(())
|
|
} else {
|
|
Err("Audio not initialized".to_string())
|
|
}
|
|
}
|
|
|
|
#[tauri::command]
|
|
pub async fn audio_start_recording(
|
|
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
|
|
track_id: u32,
|
|
start_time: f64,
|
|
) -> Result<(), String> {
|
|
let mut audio_state = state.lock().unwrap();
|
|
if let Some(controller) = &mut audio_state.controller {
|
|
controller.start_recording(track_id, start_time);
|
|
Ok(())
|
|
} else {
|
|
Err("Audio not initialized".to_string())
|
|
}
|
|
}
|
|
|
|
#[tauri::command]
|
|
pub async fn audio_stop_recording(
|
|
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
|
|
) -> Result<(), String> {
|
|
let mut audio_state = state.lock().unwrap();
|
|
if let Some(controller) = &mut audio_state.controller {
|
|
controller.stop_recording();
|
|
Ok(())
|
|
} else {
|
|
Err("Audio not initialized".to_string())
|
|
}
|
|
}
|
|
|
|
#[tauri::command]
|
|
pub async fn audio_pause_recording(
|
|
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
|
|
) -> Result<(), String> {
|
|
let mut audio_state = state.lock().unwrap();
|
|
if let Some(controller) = &mut audio_state.controller {
|
|
controller.pause_recording();
|
|
Ok(())
|
|
} else {
|
|
Err("Audio not initialized".to_string())
|
|
}
|
|
}
|
|
|
|
#[tauri::command]
|
|
pub async fn audio_resume_recording(
|
|
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
|
|
) -> Result<(), String> {
|
|
let mut audio_state = state.lock().unwrap();
|
|
if let Some(controller) = &mut audio_state.controller {
|
|
controller.resume_recording();
|
|
Ok(())
|
|
} else {
|
|
Err("Audio not initialized".to_string())
|
|
}
|
|
}
|
|
|
|
#[tauri::command]
|
|
pub async fn audio_create_midi_clip(
|
|
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
|
|
track_id: u32,
|
|
start_time: f64,
|
|
duration: f64,
|
|
) -> Result<u32, String> {
|
|
let mut audio_state = state.lock().unwrap();
|
|
if let Some(controller) = &mut audio_state.controller {
|
|
controller.create_midi_clip(track_id, start_time, duration);
|
|
// Return a clip ID (for now, just use 0 as clips are managed internally)
|
|
Ok(0)
|
|
} else {
|
|
Err("Audio not initialized".to_string())
|
|
}
|
|
}
|
|
|
|
#[tauri::command]
|
|
pub async fn audio_add_midi_note(
|
|
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
|
|
track_id: u32,
|
|
clip_id: u32,
|
|
time_offset: f64,
|
|
note: u8,
|
|
velocity: u8,
|
|
duration: f64,
|
|
) -> Result<(), String> {
|
|
let mut audio_state = state.lock().unwrap();
|
|
if let Some(controller) = &mut audio_state.controller {
|
|
controller.add_midi_note(track_id, clip_id, time_offset, note, velocity, duration);
|
|
Ok(())
|
|
} else {
|
|
Err("Audio not initialized".to_string())
|
|
}
|
|
}
|
|
|
|
#[tauri::command]
|
|
pub async fn audio_send_midi_note_on(
|
|
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
|
|
track_id: u32,
|
|
note: u8,
|
|
velocity: u8,
|
|
) -> Result<(), String> {
|
|
let mut audio_state = state.lock().unwrap();
|
|
if let Some(controller) = &mut audio_state.controller {
|
|
// For now, send to the first MIDI track (track_id 0)
|
|
// TODO: Make this configurable to select which track to send to
|
|
controller.send_midi_note_on(track_id, note, velocity);
|
|
Ok(())
|
|
} else {
|
|
Err("Audio not initialized".to_string())
|
|
}
|
|
}
|
|
|
|
#[tauri::command]
|
|
pub async fn audio_send_midi_note_off(
|
|
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
|
|
track_id: u32,
|
|
note: u8,
|
|
) -> Result<(), String> {
|
|
let mut audio_state = state.lock().unwrap();
|
|
if let Some(controller) = &mut audio_state.controller {
|
|
controller.send_midi_note_off(track_id, note);
|
|
Ok(())
|
|
} else {
|
|
Err("Audio not initialized".to_string())
|
|
}
|
|
}
|
|
|
|
#[tauri::command]
|
|
pub async fn audio_load_midi_file(
|
|
state: tauri::State<'_, Arc<Mutex<AudioState>>>,
|
|
track_id: u32,
|
|
path: String,
|
|
start_time: f64,
|
|
) -> Result<MidiFileMetadata, String> {
|
|
let mut audio_state = state.lock().unwrap();
|
|
|
|
// Extract sample_rate before the mutable borrow
|
|
let sample_rate = audio_state.sample_rate;
|
|
|
|
if let Some(controller) = &mut audio_state.controller {
|
|
// Load and parse the MIDI file
|
|
let mut clip = daw_backend::load_midi_file(&path, 0, sample_rate)?;
|
|
|
|
// Set the start time
|
|
clip.start_time = start_time;
|
|
let duration = clip.duration;
|
|
|
|
// Extract note data from MIDI events
|
|
let mut notes = Vec::new();
|
|
let mut active_notes: std::collections::HashMap<u8, (f64, u8)> = std::collections::HashMap::new();
|
|
|
|
for event in &clip.events {
|
|
let time_seconds = event.timestamp as f64 / sample_rate as f64;
|
|
|
|
if event.is_note_on() {
|
|
// Store note on event (time and velocity)
|
|
active_notes.insert(event.data1, (time_seconds, event.data2));
|
|
} else if event.is_note_off() {
|
|
// Find matching note on and create a MidiNote
|
|
if let Some((start, velocity)) = active_notes.remove(&event.data1) {
|
|
notes.push(MidiNote {
|
|
note: event.data1,
|
|
start_time: start,
|
|
duration: time_seconds - start,
|
|
velocity,
|
|
});
|
|
}
|
|
}
|
|
}
|
|
|
|
// Add the loaded MIDI clip to the track
|
|
controller.add_loaded_midi_clip(track_id, clip);
|
|
|
|
Ok(MidiFileMetadata {
|
|
duration,
|
|
notes,
|
|
})
|
|
} else {
|
|
Err("Audio not initialized".to_string())
|
|
}
|
|
}
|
|
|
|
#[derive(serde::Serialize, Clone)]
|
|
#[serde(tag = "type")]
|
|
pub enum SerializedAudioEvent {
|
|
PlaybackPosition { time: f64 },
|
|
RecordingStarted { track_id: u32, clip_id: u32 },
|
|
RecordingProgress { clip_id: u32, duration: f64 },
|
|
RecordingStopped { clip_id: u32, pool_index: usize, waveform: Vec<WaveformPeak> },
|
|
RecordingError { message: String },
|
|
NoteOn { note: u8, velocity: u8 },
|
|
NoteOff { note: u8 },
|
|
}
|
|
|
|
// audio_get_events command removed - events are now pushed via Tauri event system
|