add recording and reset function

This commit is contained in:
Skyler Lehmkuhl 2025-10-22 20:06:02 -04:00
parent 9699e1e1ea
commit 48ec738027
13 changed files with 1468 additions and 8 deletions

11
daw-backend/Cargo.lock generated
View File

@ -234,6 +234,7 @@ dependencies = [
"cpal", "cpal",
"midly", "midly",
"rtrb", "rtrb",
"serde",
"symphonia", "symphonia",
] ]
@ -638,6 +639,16 @@ dependencies = [
"winapi-util", "winapi-util",
] ]
[[package]]
name = "serde"
version = "1.0.228"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
dependencies = [
"serde_core",
"serde_derive",
]
[[package]] [[package]]
name = "serde_core" name = "serde_core"
version = "1.0.228" version = "1.0.228"

View File

@ -8,6 +8,7 @@ cpal = "0.15"
symphonia = { version = "0.5", features = ["all"] } symphonia = { version = "0.5", features = ["all"] }
rtrb = "0.3" rtrb = "0.3"
midly = "0.5" midly = "0.5"
serde = { version = "1.0", features = ["derive"] }
[dev-dependencies] [dev-dependencies]

View File

@ -0,0 +1,279 @@
/// Automation system for parameter modulation over time
use serde::{Deserialize, Serialize};
/// Unique identifier for automation lanes
pub type AutomationLaneId = u32;
/// Unique identifier for parameters that can be automated
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum ParameterId {
/// Track volume
TrackVolume,
/// Track pan
TrackPan,
/// Effect parameter (effect_index, param_id)
EffectParameter(usize, u32),
/// Metatrack time stretch
TimeStretch,
/// Metatrack offset
TimeOffset,
}
/// Type of interpolation curve between automation points
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
pub enum CurveType {
/// Linear interpolation (straight line)
Linear,
/// Exponential curve (smooth acceleration)
Exponential,
/// S-curve (ease in/out)
SCurve,
/// Step (no interpolation, jump to next value)
Step,
}
/// A single automation point
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
pub struct AutomationPoint {
/// Time in seconds
pub time: f64,
/// Parameter value (normalized 0.0 to 1.0, or actual value depending on parameter)
pub value: f32,
/// Curve type to next point
pub curve: CurveType,
}
impl AutomationPoint {
/// Create a new automation point
pub fn new(time: f64, value: f32, curve: CurveType) -> Self {
Self { time, value, curve }
}
}
/// An automation lane for a specific parameter
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AutomationLane {
/// Unique identifier for this lane
pub id: AutomationLaneId,
/// Which parameter this lane controls
pub parameter_id: ParameterId,
/// Sorted list of automation points
points: Vec<AutomationPoint>,
/// Whether this lane is enabled
pub enabled: bool,
}
impl AutomationLane {
/// Create a new automation lane
pub fn new(id: AutomationLaneId, parameter_id: ParameterId) -> Self {
Self {
id,
parameter_id,
points: Vec::new(),
enabled: true,
}
}
/// Add an automation point, maintaining sorted order
pub fn add_point(&mut self, point: AutomationPoint) {
// Find insertion position to maintain sorted order
let pos = self.points.binary_search_by(|p| {
p.time.partial_cmp(&point.time).unwrap_or(std::cmp::Ordering::Equal)
});
match pos {
Ok(idx) => {
// Replace existing point at same time
self.points[idx] = point;
}
Err(idx) => {
// Insert at correct position
self.points.insert(idx, point);
}
}
}
/// Remove point at specific time
pub fn remove_point_at_time(&mut self, time: f64, tolerance: f64) -> bool {
if let Some(idx) = self.points.iter().position(|p| (p.time - time).abs() < tolerance) {
self.points.remove(idx);
true
} else {
false
}
}
/// Remove all points
pub fn clear(&mut self) {
self.points.clear();
}
/// Get all points
pub fn points(&self) -> &[AutomationPoint] {
&self.points
}
/// Get value at a specific time with interpolation
pub fn evaluate(&self, time: f64) -> Option<f32> {
if !self.enabled || self.points.is_empty() {
return None;
}
// Before first point
if time <= self.points[0].time {
return Some(self.points[0].value);
}
// After last point
if time >= self.points[self.points.len() - 1].time {
return Some(self.points[self.points.len() - 1].value);
}
// Find surrounding points
for i in 0..self.points.len() - 1 {
let p1 = &self.points[i];
let p2 = &self.points[i + 1];
if time >= p1.time && time <= p2.time {
return Some(interpolate(p1, p2, time));
}
}
None
}
/// Get number of points
pub fn point_count(&self) -> usize {
self.points.len()
}
}
/// Interpolate between two automation points based on curve type
fn interpolate(p1: &AutomationPoint, p2: &AutomationPoint, time: f64) -> f32 {
// Calculate normalized position between points (0.0 to 1.0)
let t = if p2.time == p1.time {
0.0
} else {
((time - p1.time) / (p2.time - p1.time)) as f32
};
// Apply curve
let curved_t = match p1.curve {
CurveType::Linear => t,
CurveType::Exponential => {
// Exponential curve: y = x^2
t * t
}
CurveType::SCurve => {
// Smooth S-curve using smoothstep
smoothstep(t)
}
CurveType::Step => {
// Step: hold value until next point
return p1.value;
}
};
// Linear interpolation with curved t
p1.value + (p2.value - p1.value) * curved_t
}
/// Smoothstep function for S-curve interpolation
/// Returns a smooth curve from 0 to 1
#[inline]
fn smoothstep(t: f32) -> f32 {
// Clamp to [0, 1]
let t = t.clamp(0.0, 1.0);
// 3t^2 - 2t^3
t * t * (3.0 - 2.0 * t)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_add_points_sorted() {
let mut lane = AutomationLane::new(0, ParameterId::TrackVolume);
lane.add_point(AutomationPoint::new(2.0, 0.5, CurveType::Linear));
lane.add_point(AutomationPoint::new(1.0, 0.3, CurveType::Linear));
lane.add_point(AutomationPoint::new(3.0, 0.8, CurveType::Linear));
assert_eq!(lane.points().len(), 3);
assert_eq!(lane.points()[0].time, 1.0);
assert_eq!(lane.points()[1].time, 2.0);
assert_eq!(lane.points()[2].time, 3.0);
}
#[test]
fn test_replace_point_at_same_time() {
let mut lane = AutomationLane::new(0, ParameterId::TrackVolume);
lane.add_point(AutomationPoint::new(1.0, 0.3, CurveType::Linear));
lane.add_point(AutomationPoint::new(1.0, 0.5, CurveType::Linear));
assert_eq!(lane.points().len(), 1);
assert_eq!(lane.points()[0].value, 0.5);
}
#[test]
fn test_linear_interpolation() {
let mut lane = AutomationLane::new(0, ParameterId::TrackVolume);
lane.add_point(AutomationPoint::new(0.0, 0.0, CurveType::Linear));
lane.add_point(AutomationPoint::new(1.0, 1.0, CurveType::Linear));
assert_eq!(lane.evaluate(0.0), Some(0.0));
assert_eq!(lane.evaluate(0.5), Some(0.5));
assert_eq!(lane.evaluate(1.0), Some(1.0));
}
#[test]
fn test_step_interpolation() {
let mut lane = AutomationLane::new(0, ParameterId::TrackVolume);
lane.add_point(AutomationPoint::new(0.0, 0.5, CurveType::Step));
lane.add_point(AutomationPoint::new(1.0, 1.0, CurveType::Step));
assert_eq!(lane.evaluate(0.0), Some(0.5));
assert_eq!(lane.evaluate(0.5), Some(0.5));
assert_eq!(lane.evaluate(0.99), Some(0.5));
assert_eq!(lane.evaluate(1.0), Some(1.0));
}
#[test]
fn test_evaluate_outside_range() {
let mut lane = AutomationLane::new(0, ParameterId::TrackVolume);
lane.add_point(AutomationPoint::new(1.0, 0.5, CurveType::Linear));
lane.add_point(AutomationPoint::new(2.0, 1.0, CurveType::Linear));
// Before first point
assert_eq!(lane.evaluate(0.0), Some(0.5));
// After last point
assert_eq!(lane.evaluate(3.0), Some(1.0));
}
#[test]
fn test_disabled_lane() {
let mut lane = AutomationLane::new(0, ParameterId::TrackVolume);
lane.add_point(AutomationPoint::new(0.0, 0.5, CurveType::Linear));
lane.enabled = false;
assert_eq!(lane.evaluate(0.0), None);
}
#[test]
fn test_remove_point() {
let mut lane = AutomationLane::new(0, ParameterId::TrackVolume);
lane.add_point(AutomationPoint::new(1.0, 0.5, CurveType::Linear));
lane.add_point(AutomationPoint::new(2.0, 0.8, CurveType::Linear));
assert!(lane.remove_point_at_time(1.0, 0.001));
assert_eq!(lane.points().len(), 1);
assert_eq!(lane.points()[0].time, 2.0);
}
}

View File

@ -3,6 +3,7 @@ use crate::audio::clip::ClipId;
use crate::audio::midi::{MidiClip, MidiClipId, MidiEvent}; use crate::audio::midi::{MidiClip, MidiClipId, MidiEvent};
use crate::audio::pool::AudioPool; use crate::audio::pool::AudioPool;
use crate::audio::project::Project; use crate::audio::project::Project;
use crate::audio::recording::RecordingState;
use crate::audio::track::{Track, TrackId}; use crate::audio::track::{Track, TrackId};
use crate::command::{AudioEvent, Command}; use crate::command::{AudioEvent, Command};
use crate::effects::{Effect, GainEffect, PanEffect, SimpleEQ}; use crate::effects::{Effect, GainEffect, PanEffect, SimpleEQ};
@ -35,6 +36,11 @@ pub struct Engine {
// ID counters // ID counters
next_midi_clip_id: MidiClipId, next_midi_clip_id: MidiClipId,
next_clip_id: ClipId,
// Recording state
recording_state: Option<RecordingState>,
input_rx: Option<rtrb::Consumer<f32>>,
} }
impl Engine { impl Engine {
@ -65,9 +71,17 @@ impl Engine {
event_interval_frames, event_interval_frames,
mix_buffer: Vec::new(), mix_buffer: Vec::new(),
next_midi_clip_id: 0, next_midi_clip_id: 0,
next_clip_id: 0,
recording_state: None,
input_rx: None,
} }
} }
/// Set the input ringbuffer consumer for recording
pub fn set_input_rx(&mut self, input_rx: rtrb::Consumer<f32>) {
self.input_rx = Some(input_rx);
}
/// Add an audio track to the engine /// Add an audio track to the engine
pub fn add_track(&mut self, track: Track) -> TrackId { pub fn add_track(&mut self, track: Track) -> TrackId {
// For backwards compatibility, we'll extract the track data and add it to the project // For backwards compatibility, we'll extract the track data and add it to the project
@ -190,6 +204,49 @@ impl Engine {
// Not playing, output silence // Not playing, output silence
output.fill(0.0); output.fill(0.0);
} }
// Process recording if active (independent of playback state)
if let Some(recording) = &mut self.recording_state {
if let Some(input_rx) = &mut self.input_rx {
// Pull samples from input ringbuffer
let mut samples = Vec::new();
while let Ok(sample) = input_rx.pop() {
samples.push(sample);
}
// Add samples to recording
if !samples.is_empty() {
match recording.add_samples(&samples) {
Ok(flushed) => {
if flushed {
// A flush occurred, update clip duration and send progress event
let duration = recording.duration();
let clip_id = recording.clip_id;
let track_id = recording.track_id;
// Update clip duration in project
if let Some(crate::audio::track::TrackNode::Audio(track)) = self.project.get_track_mut(track_id) {
if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) {
clip.duration = duration;
}
}
// Send progress event
let _ = self.event_tx.push(AudioEvent::RecordingProgress(clip_id, duration));
}
}
Err(e) => {
// Recording error occurred
let _ = self.event_tx.push(AudioEvent::RecordingError(
format!("Recording write error: {}", e)
));
// Stop recording on error
self.recording_state = None;
}
}
}
}
}
} }
/// Handle a command from the UI thread /// Handle a command from the UI thread
@ -362,6 +419,42 @@ impl Engine {
metatrack.pitch_shift = semitones; metatrack.pitch_shift = semitones;
} }
} }
Command::CreateAudioTrack(name) => {
let track_id = self.project.add_audio_track(name.clone(), None);
// Notify UI about the new audio track
let _ = self.event_tx.push(AudioEvent::TrackCreated(track_id, false, name));
}
Command::AddAudioFile(path, data, channels, sample_rate) => {
// Create AudioFile and add to pool
let audio_file = crate::audio::pool::AudioFile::new(
std::path::PathBuf::from(path.clone()),
data,
channels,
sample_rate,
);
let pool_index = self.audio_pool.add_file(audio_file);
// Notify UI about the new audio file
let _ = self.event_tx.push(AudioEvent::AudioFileAdded(pool_index, path));
}
Command::AddAudioClip(track_id, pool_index, start_time, duration, offset) => {
// Create a new clip with unique ID
let clip_id = self.next_clip_id;
self.next_clip_id += 1;
let clip = crate::audio::clip::Clip::new(
clip_id,
pool_index,
start_time,
duration,
offset,
);
// Add clip to track
if let Some(crate::audio::track::TrackNode::Audio(track)) = self.project.get_track_mut(track_id) {
track.clips.push(clip);
// Notify UI about the new clip
let _ = self.event_tx.push(AudioEvent::ClipAdded(track_id, clip_id));
}
}
Command::CreateMidiTrack(name) => { Command::CreateMidiTrack(name) => {
let track_id = self.project.add_midi_track(name.clone(), None); let track_id = self.project.add_midi_track(name.clone(), None);
// Notify UI about the new MIDI track // Notify UI about the new MIDI track
@ -402,6 +495,301 @@ impl Engine {
let stats = self.buffer_pool.stats(); let stats = self.buffer_pool.stats();
let _ = self.event_tx.push(AudioEvent::BufferPoolStats(stats)); let _ = self.event_tx.push(AudioEvent::BufferPoolStats(stats));
} }
Command::CreateAutomationLane(track_id, parameter_id) => {
// Create a new automation lane on the specified track
let lane_id = match self.project.get_track_mut(track_id) {
Some(crate::audio::track::TrackNode::Audio(track)) => {
Some(track.add_automation_lane(parameter_id))
}
Some(crate::audio::track::TrackNode::Midi(track)) => {
Some(track.add_automation_lane(parameter_id))
}
Some(crate::audio::track::TrackNode::Group(group)) => {
Some(group.add_automation_lane(parameter_id))
}
None => None,
};
if let Some(lane_id) = lane_id {
let _ = self.event_tx.push(AudioEvent::AutomationLaneCreated(
track_id,
lane_id,
parameter_id,
));
}
}
Command::AddAutomationPoint(track_id, lane_id, time, value, curve) => {
// Add an automation point to the specified lane
let point = crate::audio::AutomationPoint::new(time, value, curve);
match self.project.get_track_mut(track_id) {
Some(crate::audio::track::TrackNode::Audio(track)) => {
if let Some(lane) = track.get_automation_lane_mut(lane_id) {
lane.add_point(point);
}
}
Some(crate::audio::track::TrackNode::Midi(track)) => {
if let Some(lane) = track.get_automation_lane_mut(lane_id) {
lane.add_point(point);
}
}
Some(crate::audio::track::TrackNode::Group(group)) => {
if let Some(lane) = group.get_automation_lane_mut(lane_id) {
lane.add_point(point);
}
}
None => {}
}
}
Command::RemoveAutomationPoint(track_id, lane_id, time, tolerance) => {
// Remove automation point at specified time
match self.project.get_track_mut(track_id) {
Some(crate::audio::track::TrackNode::Audio(track)) => {
if let Some(lane) = track.get_automation_lane_mut(lane_id) {
lane.remove_point_at_time(time, tolerance);
}
}
Some(crate::audio::track::TrackNode::Midi(track)) => {
if let Some(lane) = track.get_automation_lane_mut(lane_id) {
lane.remove_point_at_time(time, tolerance);
}
}
Some(crate::audio::track::TrackNode::Group(group)) => {
if let Some(lane) = group.get_automation_lane_mut(lane_id) {
lane.remove_point_at_time(time, tolerance);
}
}
None => {}
}
}
Command::ClearAutomationLane(track_id, lane_id) => {
// Clear all points from the lane
match self.project.get_track_mut(track_id) {
Some(crate::audio::track::TrackNode::Audio(track)) => {
if let Some(lane) = track.get_automation_lane_mut(lane_id) {
lane.clear();
}
}
Some(crate::audio::track::TrackNode::Midi(track)) => {
if let Some(lane) = track.get_automation_lane_mut(lane_id) {
lane.clear();
}
}
Some(crate::audio::track::TrackNode::Group(group)) => {
if let Some(lane) = group.get_automation_lane_mut(lane_id) {
lane.clear();
}
}
None => {}
}
}
Command::RemoveAutomationLane(track_id, lane_id) => {
// Remove the automation lane entirely
match self.project.get_track_mut(track_id) {
Some(crate::audio::track::TrackNode::Audio(track)) => {
track.remove_automation_lane(lane_id);
}
Some(crate::audio::track::TrackNode::Midi(track)) => {
track.remove_automation_lane(lane_id);
}
Some(crate::audio::track::TrackNode::Group(group)) => {
group.remove_automation_lane(lane_id);
}
None => {}
}
}
Command::SetAutomationLaneEnabled(track_id, lane_id, enabled) => {
// Enable/disable the automation lane
match self.project.get_track_mut(track_id) {
Some(crate::audio::track::TrackNode::Audio(track)) => {
if let Some(lane) = track.get_automation_lane_mut(lane_id) {
lane.enabled = enabled;
}
}
Some(crate::audio::track::TrackNode::Midi(track)) => {
if let Some(lane) = track.get_automation_lane_mut(lane_id) {
lane.enabled = enabled;
}
}
Some(crate::audio::track::TrackNode::Group(group)) => {
if let Some(lane) = group.get_automation_lane_mut(lane_id) {
lane.enabled = enabled;
}
}
None => {}
}
}
Command::StartRecording(track_id, start_time) => {
// Start recording on the specified track
self.handle_start_recording(track_id, start_time);
}
Command::StopRecording => {
// Stop the current recording
self.handle_stop_recording();
}
Command::PauseRecording => {
// Pause the current recording
if let Some(recording) = &mut self.recording_state {
recording.pause();
}
}
Command::ResumeRecording => {
// Resume the current recording
if let Some(recording) = &mut self.recording_state {
recording.resume();
}
}
Command::Reset => {
// Reset the entire project to initial state
// Stop playback
self.playing = false;
self.playhead = 0;
self.playhead_atomic.store(0, Ordering::Relaxed);
// Stop any active recording
self.recording_state = None;
// Clear all project data
self.project = Project::new();
// Clear audio pool
self.audio_pool = AudioPool::new();
// Reset buffer pool (recreate with same settings)
let buffer_size = 512 * self.channels as usize;
self.buffer_pool = BufferPool::new(8, buffer_size);
// Reset ID counters
self.next_midi_clip_id = 0;
self.next_clip_id = 0;
// Clear mix buffer
self.mix_buffer.clear();
// Notify UI that reset is complete
let _ = self.event_tx.push(AudioEvent::ProjectReset);
}
}
}
/// Handle starting a recording
fn handle_start_recording(&mut self, track_id: TrackId, start_time: f64) {
use crate::io::WavWriter;
use std::env;
// Check if track exists and is an audio track
if let Some(crate::audio::track::TrackNode::Audio(_)) = self.project.get_track_mut(track_id) {
// Generate a unique temp file path
let temp_dir = env::temp_dir();
let timestamp = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_secs();
let temp_file_path = temp_dir.join(format!("daw_recording_{}.wav", timestamp));
// Create WAV writer
match WavWriter::create(&temp_file_path, self.sample_rate, self.channels) {
Ok(writer) => {
// Create intermediate clip
let clip_id = self.next_clip_id;
self.next_clip_id += 1;
let clip = crate::audio::clip::Clip::new(
clip_id,
0, // Temporary pool index, will be updated on finalization
start_time,
0.0, // Duration starts at 0, will be updated during recording
0.0,
);
// Add clip to track
if let Some(crate::audio::track::TrackNode::Audio(track)) = self.project.get_track_mut(track_id) {
track.clips.push(clip);
}
// Create recording state
let flush_interval_seconds = 5.0; // Flush every 5 seconds
let recording_state = RecordingState::new(
track_id,
clip_id,
temp_file_path,
writer,
self.sample_rate,
self.channels,
start_time,
flush_interval_seconds,
);
self.recording_state = Some(recording_state);
// Notify UI that recording has started
let _ = self.event_tx.push(AudioEvent::RecordingStarted(track_id, clip_id));
}
Err(e) => {
// Send error event to UI
let _ = self.event_tx.push(AudioEvent::RecordingError(
format!("Failed to create temp file: {}", e)
));
}
}
} else {
// Send error event if track not found or not an audio track
let _ = self.event_tx.push(AudioEvent::RecordingError(
format!("Track {} not found or is not an audio track", track_id)
));
}
}
/// Handle stopping a recording
fn handle_stop_recording(&mut self) {
if let Some(recording) = self.recording_state.take() {
let clip_id = recording.clip_id;
let track_id = recording.track_id;
// Finalize the recording and get temp file path
match recording.finalize() {
Ok(temp_file_path) => {
// Load the recorded audio file
match crate::io::AudioFile::load(&temp_file_path) {
Ok(audio_file) => {
// Add to pool
let pool_file = crate::audio::pool::AudioFile::new(
temp_file_path.clone(),
audio_file.data,
audio_file.channels,
audio_file.sample_rate,
);
let pool_index = self.audio_pool.add_file(pool_file);
// Update the clip to reference the pool
if let Some(crate::audio::track::TrackNode::Audio(track)) = self.project.get_track_mut(track_id) {
if let Some(clip) = track.clips.iter_mut().find(|c| c.id == clip_id) {
clip.audio_pool_index = pool_index;
// Duration should already be set during recording progress updates
}
}
// Delete temp file
let _ = std::fs::remove_file(&temp_file_path);
// Notify UI that recording has stopped
let _ = self.event_tx.push(AudioEvent::RecordingStopped(clip_id, pool_index));
}
Err(e) => {
// Send error event
let _ = self.event_tx.push(AudioEvent::RecordingError(
format!("Failed to load recorded audio: {}", e)
));
}
}
}
Err(e) => {
// Send error event
let _ = self.event_tx.push(AudioEvent::RecordingError(
format!("Failed to finalize recording: {}", e)
));
}
}
} }
} }
@ -429,6 +817,13 @@ pub struct EngineController {
channels: u32, channels: u32,
} }
// Safety: EngineController is safe to Send across threads because:
// - rtrb::Producer<Command> is Send by design (lock-free queue for cross-thread communication)
// - Arc<AtomicU64> is Send + Sync (atomic types are inherently thread-safe)
// - u32 primitives are Send + Sync (Copy types)
// EngineController is only accessed through Mutex in application state, ensuring no concurrent mutable access.
unsafe impl Send for EngineController {}
impl EngineController { impl EngineController {
/// Start or resume playback /// Start or resume playback
pub fn play(&mut self) { pub fn play(&mut self) {
@ -535,6 +930,21 @@ impl EngineController {
let _ = self.command_tx.push(Command::SetPitchShift(track_id, semitones)); let _ = self.command_tx.push(Command::SetPitchShift(track_id, semitones));
} }
/// Create a new audio track
pub fn create_audio_track(&mut self, name: String) {
let _ = self.command_tx.push(Command::CreateAudioTrack(name));
}
/// Add an audio file to the pool (must be called from non-audio thread with pre-loaded data)
pub fn add_audio_file(&mut self, path: String, data: Vec<f32>, channels: u32, sample_rate: u32) {
let _ = self.command_tx.push(Command::AddAudioFile(path, data, channels, sample_rate));
}
/// Add a clip to an audio track
pub fn add_audio_clip(&mut self, track_id: TrackId, pool_index: usize, start_time: f64, duration: f64, offset: f64) {
let _ = self.command_tx.push(Command::AddAudioClip(track_id, pool_index, start_time, duration, offset));
}
/// Create a new MIDI track /// Create a new MIDI track
pub fn create_midi_track(&mut self, name: String) { pub fn create_midi_track(&mut self, name: String) {
let _ = self.command_tx.push(Command::CreateMidiTrack(name)); let _ = self.command_tx.push(Command::CreateMidiTrack(name));
@ -560,4 +970,92 @@ impl EngineController {
pub fn request_buffer_pool_stats(&mut self) { pub fn request_buffer_pool_stats(&mut self) {
let _ = self.command_tx.push(Command::RequestBufferPoolStats); let _ = self.command_tx.push(Command::RequestBufferPoolStats);
} }
/// Create a new automation lane on a track
/// Returns an event AutomationLaneCreated with the lane ID
pub fn create_automation_lane(&mut self, track_id: TrackId, parameter_id: crate::audio::ParameterId) {
let _ = self.command_tx.push(Command::CreateAutomationLane(track_id, parameter_id));
}
/// Add an automation point to a lane
pub fn add_automation_point(
&mut self,
track_id: TrackId,
lane_id: crate::audio::AutomationLaneId,
time: f64,
value: f32,
curve: crate::audio::CurveType,
) {
let _ = self.command_tx.push(Command::AddAutomationPoint(
track_id, lane_id, time, value, curve,
));
}
/// Remove an automation point at a specific time
pub fn remove_automation_point(
&mut self,
track_id: TrackId,
lane_id: crate::audio::AutomationLaneId,
time: f64,
tolerance: f64,
) {
let _ = self.command_tx.push(Command::RemoveAutomationPoint(
track_id, lane_id, time, tolerance,
));
}
/// Clear all automation points from a lane
pub fn clear_automation_lane(
&mut self,
track_id: TrackId,
lane_id: crate::audio::AutomationLaneId,
) {
let _ = self.command_tx.push(Command::ClearAutomationLane(track_id, lane_id));
}
/// Remove an automation lane entirely
pub fn remove_automation_lane(
&mut self,
track_id: TrackId,
lane_id: crate::audio::AutomationLaneId,
) {
let _ = self.command_tx.push(Command::RemoveAutomationLane(track_id, lane_id));
}
/// Enable or disable an automation lane
pub fn set_automation_lane_enabled(
&mut self,
track_id: TrackId,
lane_id: crate::audio::AutomationLaneId,
enabled: bool,
) {
let _ = self.command_tx.push(Command::SetAutomationLaneEnabled(
track_id, lane_id, enabled,
));
}
/// Start recording on a track
pub fn start_recording(&mut self, track_id: TrackId, start_time: f64) {
let _ = self.command_tx.push(Command::StartRecording(track_id, start_time));
}
/// Stop the current recording
pub fn stop_recording(&mut self) {
let _ = self.command_tx.push(Command::StopRecording);
}
/// Pause the current recording
pub fn pause_recording(&mut self) {
let _ = self.command_tx.push(Command::PauseRecording);
}
/// Resume the current recording
pub fn resume_recording(&mut self) {
let _ = self.command_tx.push(Command::ResumeRecording);
}
/// Reset the entire project (clear all tracks, audio pool, and state)
pub fn reset(&mut self) {
let _ = self.command_tx.push(Command::Reset);
}
} }

View File

@ -1,15 +1,19 @@
pub mod automation;
pub mod buffer_pool; pub mod buffer_pool;
pub mod clip; pub mod clip;
pub mod engine; pub mod engine;
pub mod midi; pub mod midi;
pub mod pool; pub mod pool;
pub mod project; pub mod project;
pub mod recording;
pub mod track; pub mod track;
pub use automation::{AutomationLane, AutomationLaneId, AutomationPoint, CurveType, ParameterId};
pub use buffer_pool::BufferPool; pub use buffer_pool::BufferPool;
pub use clip::{Clip, ClipId}; pub use clip::{Clip, ClipId};
pub use engine::{Engine, EngineController}; pub use engine::{Engine, EngineController};
pub use midi::{MidiClip, MidiClipId, MidiEvent}; pub use midi::{MidiClip, MidiClipId, MidiEvent};
pub use pool::{AudioFile as PoolAudioFile, AudioPool}; pub use pool::{AudioFile as PoolAudioFile, AudioPool};
pub use project::Project; pub use project::Project;
pub use recording::RecordingState;
pub use track::{AudioTrack, Metatrack, MidiTrack, RenderContext, Track, TrackId, TrackNode}; pub use track::{AudioTrack, Metatrack, MidiTrack, RenderContext, Track, TrackId, TrackNode};

View File

@ -0,0 +1,124 @@
/// Audio recording system for capturing microphone input
use crate::audio::{ClipId, TrackId};
use crate::io::WavWriter;
use std::path::PathBuf;
/// State of an active recording session
pub struct RecordingState {
/// Track being recorded to
pub track_id: TrackId,
/// Clip ID for the intermediate clip
pub clip_id: ClipId,
/// Path to temporary WAV file
pub temp_file_path: PathBuf,
/// WAV file writer
pub writer: WavWriter,
/// Sample rate of recording
pub sample_rate: u32,
/// Number of channels
pub channels: u32,
/// Timeline start position in seconds
pub start_time: f64,
/// Total frames written to disk
pub frames_written: usize,
/// Accumulation buffer for next flush
pub buffer: Vec<f32>,
/// Number of frames to accumulate before flushing
pub flush_interval_frames: usize,
/// Whether recording is currently paused
pub paused: bool,
}
impl RecordingState {
/// Create a new recording state
pub fn new(
track_id: TrackId,
clip_id: ClipId,
temp_file_path: PathBuf,
writer: WavWriter,
sample_rate: u32,
channels: u32,
start_time: f64,
flush_interval_seconds: f64,
) -> Self {
let flush_interval_frames = (sample_rate as f64 * flush_interval_seconds) as usize;
Self {
track_id,
clip_id,
temp_file_path,
writer,
sample_rate,
channels,
start_time,
frames_written: 0,
buffer: Vec::new(),
flush_interval_frames,
paused: false,
}
}
/// Add samples to the accumulation buffer
/// Returns true if a flush occurred
pub fn add_samples(&mut self, samples: &[f32]) -> Result<bool, std::io::Error> {
if self.paused {
return Ok(false);
}
self.buffer.extend_from_slice(samples);
// Check if we should flush
let frames_in_buffer = self.buffer.len() / self.channels as usize;
if frames_in_buffer >= self.flush_interval_frames {
self.flush()?;
return Ok(true);
}
Ok(false)
}
/// Flush accumulated samples to disk
pub fn flush(&mut self) -> Result<(), std::io::Error> {
if self.buffer.is_empty() {
return Ok(());
}
// Write to WAV file
self.writer.write_samples(&self.buffer)?;
// Update frames written
let frames_flushed = self.buffer.len() / self.channels as usize;
self.frames_written += frames_flushed;
// Clear buffer
self.buffer.clear();
Ok(())
}
/// Get current recording duration in seconds
pub fn duration(&self) -> f64 {
self.frames_written as f64 / self.sample_rate as f64
}
/// Finalize the recording and return the temp file path
pub fn finalize(mut self) -> Result<PathBuf, std::io::Error> {
// Flush any remaining samples
self.flush()?;
// Finalize the WAV file
self.writer.finalize()?;
Ok(self.temp_file_path)
}
/// Pause recording
pub fn pause(&mut self) {
self.paused = true;
}
/// Resume recording
pub fn resume(&mut self) {
self.paused = false;
}
}

View File

@ -1,7 +1,9 @@
use super::automation::{AutomationLane, AutomationLaneId, ParameterId};
use super::clip::Clip; use super::clip::Clip;
use super::midi::MidiClip; use super::midi::MidiClip;
use super::pool::AudioPool; use super::pool::AudioPool;
use crate::effects::{Effect, SimpleSynth}; use crate::effects::{Effect, SimpleSynth};
use std::collections::HashMap;
/// Track ID type /// Track ID type
pub type TrackId = u32; pub type TrackId = u32;
@ -141,6 +143,9 @@ pub struct Metatrack {
pub pitch_shift: f32, pub pitch_shift: f32,
/// Time offset in seconds (shift content forward/backward in time) /// Time offset in seconds (shift content forward/backward in time)
pub offset: f64, pub offset: f64,
/// Automation lanes for this metatrack
pub automation_lanes: HashMap<AutomationLaneId, AutomationLane>,
next_automation_id: AutomationLaneId,
} }
impl Metatrack { impl Metatrack {
@ -157,9 +162,71 @@ impl Metatrack {
time_stretch: 1.0, time_stretch: 1.0,
pitch_shift: 0.0, pitch_shift: 0.0,
offset: 0.0, offset: 0.0,
automation_lanes: HashMap::new(),
next_automation_id: 0,
} }
} }
/// Add an automation lane to this metatrack
pub fn add_automation_lane(&mut self, parameter_id: ParameterId) -> AutomationLaneId {
let lane_id = self.next_automation_id;
self.next_automation_id += 1;
let lane = AutomationLane::new(lane_id, parameter_id);
self.automation_lanes.insert(lane_id, lane);
lane_id
}
/// Get an automation lane by ID
pub fn get_automation_lane(&self, lane_id: AutomationLaneId) -> Option<&AutomationLane> {
self.automation_lanes.get(&lane_id)
}
/// Get a mutable automation lane by ID
pub fn get_automation_lane_mut(&mut self, lane_id: AutomationLaneId) -> Option<&mut AutomationLane> {
self.automation_lanes.get_mut(&lane_id)
}
/// Remove an automation lane
pub fn remove_automation_lane(&mut self, lane_id: AutomationLaneId) -> bool {
self.automation_lanes.remove(&lane_id).is_some()
}
/// Evaluate automation at a specific time and return effective parameters
pub fn evaluate_automation_at_time(&self, time: f64) -> (f32, f32, f64) {
let mut volume = self.volume;
let mut time_stretch = self.time_stretch;
let mut offset = self.offset;
// Check for automation
for lane in self.automation_lanes.values() {
if !lane.enabled {
continue;
}
match lane.parameter_id {
ParameterId::TrackVolume => {
if let Some(automated_value) = lane.evaluate(time) {
volume = automated_value;
}
}
ParameterId::TimeStretch => {
if let Some(automated_value) = lane.evaluate(time) {
time_stretch = automated_value;
}
}
ParameterId::TimeOffset => {
if let Some(automated_value) = lane.evaluate(time) {
offset = automated_value as f64;
}
}
_ => {}
}
}
(volume, time_stretch, offset)
}
/// Add a child track to this group /// Add a child track to this group
pub fn add_child(&mut self, track_id: TrackId) { pub fn add_child(&mut self, track_id: TrackId) {
if !self.children.contains(&track_id) { if !self.children.contains(&track_id) {
@ -239,6 +306,9 @@ pub struct MidiTrack {
pub volume: f32, pub volume: f32,
pub muted: bool, pub muted: bool,
pub solo: bool, pub solo: bool,
/// Automation lanes for this track
pub automation_lanes: HashMap<AutomationLaneId, AutomationLane>,
next_automation_id: AutomationLaneId,
} }
impl MidiTrack { impl MidiTrack {
@ -253,9 +323,36 @@ impl MidiTrack {
volume: 1.0, volume: 1.0,
muted: false, muted: false,
solo: false, solo: false,
automation_lanes: HashMap::new(),
next_automation_id: 0,
} }
} }
/// Add an automation lane to this track
pub fn add_automation_lane(&mut self, parameter_id: ParameterId) -> AutomationLaneId {
let lane_id = self.next_automation_id;
self.next_automation_id += 1;
let lane = AutomationLane::new(lane_id, parameter_id);
self.automation_lanes.insert(lane_id, lane);
lane_id
}
/// Get an automation lane by ID
pub fn get_automation_lane(&self, lane_id: AutomationLaneId) -> Option<&AutomationLane> {
self.automation_lanes.get(&lane_id)
}
/// Get a mutable automation lane by ID
pub fn get_automation_lane_mut(&mut self, lane_id: AutomationLaneId) -> Option<&mut AutomationLane> {
self.automation_lanes.get_mut(&lane_id)
}
/// Remove an automation lane
pub fn remove_automation_lane(&mut self, lane_id: AutomationLaneId) -> bool {
self.automation_lanes.remove(&lane_id).is_some()
}
/// Add an effect to the track's effect chain /// Add an effect to the track's effect chain
pub fn add_effect(&mut self, effect: Box<dyn Effect>) { pub fn add_effect(&mut self, effect: Box<dyn Effect>) {
self.effects.push(effect); self.effects.push(effect);
@ -324,11 +421,37 @@ impl MidiTrack {
effect.process(output, channels as usize, sample_rate); effect.process(output, channels as usize, sample_rate);
} }
// Evaluate and apply automation
let effective_volume = self.evaluate_automation_at_time(playhead_seconds);
// Apply track volume // Apply track volume
for sample in output.iter_mut() { for sample in output.iter_mut() {
*sample *= self.volume; *sample *= effective_volume;
} }
} }
/// Evaluate automation at a specific time and return the effective volume
fn evaluate_automation_at_time(&self, time: f64) -> f32 {
let mut volume = self.volume;
// Check for volume automation
for lane in self.automation_lanes.values() {
if !lane.enabled {
continue;
}
match lane.parameter_id {
ParameterId::TrackVolume => {
if let Some(automated_value) = lane.evaluate(time) {
volume = automated_value;
}
}
_ => {}
}
}
volume
}
} }
/// Audio track with clips and effect chain /// Audio track with clips and effect chain
@ -340,6 +463,9 @@ pub struct AudioTrack {
pub volume: f32, pub volume: f32,
pub muted: bool, pub muted: bool,
pub solo: bool, pub solo: bool,
/// Automation lanes for this track
pub automation_lanes: HashMap<AutomationLaneId, AutomationLane>,
next_automation_id: AutomationLaneId,
} }
impl AudioTrack { impl AudioTrack {
@ -353,9 +479,36 @@ impl AudioTrack {
volume: 1.0, volume: 1.0,
muted: false, muted: false,
solo: false, solo: false,
automation_lanes: HashMap::new(),
next_automation_id: 0,
} }
} }
/// Add an automation lane to this track
pub fn add_automation_lane(&mut self, parameter_id: ParameterId) -> AutomationLaneId {
let lane_id = self.next_automation_id;
self.next_automation_id += 1;
let lane = AutomationLane::new(lane_id, parameter_id);
self.automation_lanes.insert(lane_id, lane);
lane_id
}
/// Get an automation lane by ID
pub fn get_automation_lane(&self, lane_id: AutomationLaneId) -> Option<&AutomationLane> {
self.automation_lanes.get(&lane_id)
}
/// Get a mutable automation lane by ID
pub fn get_automation_lane_mut(&mut self, lane_id: AutomationLaneId) -> Option<&mut AutomationLane> {
self.automation_lanes.get_mut(&lane_id)
}
/// Remove an automation lane
pub fn remove_automation_lane(&mut self, lane_id: AutomationLaneId) -> bool {
self.automation_lanes.remove(&lane_id).is_some()
}
/// Add an effect to the track's effect chain /// Add an effect to the track's effect chain
pub fn add_effect(&mut self, effect: Box<dyn Effect>) { pub fn add_effect(&mut self, effect: Box<dyn Effect>) {
self.effects.push(effect); self.effects.push(effect);
@ -435,14 +588,40 @@ impl AudioTrack {
effect.process(output, channels as usize, sample_rate); effect.process(output, channels as usize, sample_rate);
} }
// Evaluate and apply automation
let effective_volume = self.evaluate_automation_at_time(playhead_seconds);
// Apply track volume // Apply track volume
for sample in output.iter_mut() { for sample in output.iter_mut() {
*sample *= self.volume; *sample *= effective_volume;
} }
rendered rendered
} }
/// Evaluate automation at a specific time and return the effective volume
fn evaluate_automation_at_time(&self, time: f64) -> f32 {
let mut volume = self.volume;
// Check for volume automation
for lane in self.automation_lanes.values() {
if !lane.enabled {
continue;
}
match lane.parameter_id {
ParameterId::TrackVolume => {
if let Some(automated_value) = lane.evaluate(time) {
volume = automated_value;
}
}
_ => {}
}
}
volume
}
/// Render a single clip into the output buffer /// Render a single clip into the output buffer
fn render_clip( fn render_clip(
&self, &self,

View File

@ -1,4 +1,7 @@
use crate::audio::{ClipId, MidiClip, MidiClipId, TrackId}; use crate::audio::{
AutomationLaneId, ClipId, CurveType, MidiClip, MidiClipId, ParameterId,
TrackId,
};
use crate::audio::buffer_pool::BufferPoolStats; use crate::audio::buffer_pool::BufferPoolStats;
/// Commands sent from UI/control thread to audio thread /// Commands sent from UI/control thread to audio thread
@ -54,6 +57,15 @@ pub enum Command {
/// Set metatrack pitch shift in semitones (track_id, semitones) - for future use /// Set metatrack pitch shift in semitones (track_id, semitones) - for future use
SetPitchShift(TrackId, f32), SetPitchShift(TrackId, f32),
// Audio track commands
/// Create a new audio track with a name
CreateAudioTrack(String),
/// Add an audio file to the pool (path, data, channels, sample_rate)
/// Returns the pool index via an AudioEvent
AddAudioFile(String, Vec<f32>, u32, u32),
/// Add a clip to an audio track (track_id, pool_index, start_time, duration, offset)
AddAudioClip(TrackId, usize, f64, f64, f64),
// MIDI commands // MIDI commands
/// Create a new MIDI track with a name /// Create a new MIDI track with a name
CreateMidiTrack(String), CreateMidiTrack(String),
@ -67,6 +79,34 @@ pub enum Command {
// Diagnostics commands // Diagnostics commands
/// Request buffer pool statistics /// Request buffer pool statistics
RequestBufferPoolStats, RequestBufferPoolStats,
// Automation commands
/// Create a new automation lane on a track (track_id, parameter_id)
CreateAutomationLane(TrackId, ParameterId),
/// Add an automation point to a lane (track_id, lane_id, time, value, curve)
AddAutomationPoint(TrackId, AutomationLaneId, f64, f32, CurveType),
/// Remove an automation point at a specific time (track_id, lane_id, time, tolerance)
RemoveAutomationPoint(TrackId, AutomationLaneId, f64, f64),
/// Clear all automation points from a lane (track_id, lane_id)
ClearAutomationLane(TrackId, AutomationLaneId),
/// Remove an automation lane (track_id, lane_id)
RemoveAutomationLane(TrackId, AutomationLaneId),
/// Enable/disable an automation lane (track_id, lane_id, enabled)
SetAutomationLaneEnabled(TrackId, AutomationLaneId, bool),
// Recording commands
/// Start recording on a track (track_id, start_time)
StartRecording(TrackId, f64),
/// Stop the current recording
StopRecording,
/// Pause the current recording
PauseRecording,
/// Resume the current recording
ResumeRecording,
// Project commands
/// Reset the entire project (remove all tracks, clear audio pool, reset state)
Reset,
} }
/// Events sent from audio thread back to UI/control thread /// Events sent from audio thread back to UI/control thread
@ -80,6 +120,22 @@ pub enum AudioEvent {
BufferUnderrun, BufferUnderrun,
/// A new track was created (track_id, is_metatrack, name) /// A new track was created (track_id, is_metatrack, name)
TrackCreated(TrackId, bool, String), TrackCreated(TrackId, bool, String),
/// An audio file was added to the pool (pool_index, path)
AudioFileAdded(usize, String),
/// A clip was added to a track (track_id, clip_id)
ClipAdded(TrackId, ClipId),
/// Buffer pool statistics response /// Buffer pool statistics response
BufferPoolStats(BufferPoolStats), BufferPoolStats(BufferPoolStats),
/// Automation lane created (track_id, lane_id, parameter_id)
AutomationLaneCreated(TrackId, AutomationLaneId, ParameterId),
/// Recording started (track_id, clip_id)
RecordingStarted(TrackId, ClipId),
/// Recording progress update (clip_id, current_duration)
RecordingProgress(ClipId, f64),
/// Recording stopped (clip_id, pool_index)
RecordingStopped(ClipId, usize),
/// Recording error (error_message)
RecordingError(String),
/// Project has been reset
ProjectReset,
} }

View File

@ -7,6 +7,12 @@ use symphonia::core::io::MediaSourceStream;
use symphonia::core::meta::MetadataOptions; use symphonia::core::meta::MetadataOptions;
use symphonia::core::probe::Hint; use symphonia::core::probe::Hint;
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct WaveformPeak {
pub min: f32,
pub max: f32,
}
pub struct AudioFile { pub struct AudioFile {
pub data: Vec<f32>, pub data: Vec<f32>,
pub channels: u32, pub channels: u32,
@ -121,4 +127,48 @@ impl AudioFile {
frames, frames,
}) })
} }
/// Calculate the duration of the audio file in seconds
pub fn duration(&self) -> f64 {
self.frames as f64 / self.sample_rate as f64
}
/// Generate a waveform overview with the specified number of peaks
/// This creates a downsampled representation suitable for timeline visualization
pub fn generate_waveform_overview(&self, target_peaks: usize) -> Vec<WaveformPeak> {
if self.frames == 0 || target_peaks == 0 {
return Vec::new();
}
let total_frames = self.frames as usize;
let frames_per_peak = (total_frames / target_peaks).max(1);
let actual_peaks = (total_frames + frames_per_peak - 1) / frames_per_peak;
let mut peaks = Vec::with_capacity(actual_peaks);
for peak_idx in 0..actual_peaks {
let start_frame = peak_idx * frames_per_peak;
let end_frame = ((peak_idx + 1) * frames_per_peak).min(total_frames);
let mut min = 0.0f32;
let mut max = 0.0f32;
// Scan all samples in this window
for frame_idx in start_frame..end_frame {
// For multi-channel audio, combine all channels
for ch in 0..self.channels as usize {
let sample_idx = frame_idx * self.channels as usize + ch;
if sample_idx < self.data.len() {
let sample = self.data[sample_idx];
min = min.min(sample);
max = max.max(sample);
}
}
}
peaks.push(WaveformPeak { min, max });
}
peaks
}
} }

View File

@ -1,5 +1,7 @@
pub mod audio_file; pub mod audio_file;
pub mod midi_file; pub mod midi_file;
pub mod wav_writer;
pub use audio_file::AudioFile; pub use audio_file::{AudioFile, WaveformPeak};
pub use midi_file::load_midi_file; pub use midi_file::load_midi_file;
pub use wav_writer::WavWriter;

View File

@ -0,0 +1,113 @@
/// Incremental WAV file writer for streaming audio to disk
use std::fs::File;
use std::io::{self, Seek, SeekFrom, Write};
use std::path::Path;
/// WAV file writer that supports incremental writing
pub struct WavWriter {
file: File,
sample_rate: u32,
channels: u32,
frames_written: usize,
}
impl WavWriter {
/// Create a new WAV file and write initial header
/// The header is written with placeholder sizes that will be updated on finalization
pub fn create(path: impl AsRef<Path>, sample_rate: u32, channels: u32) -> io::Result<Self> {
let mut file = File::create(path)?;
// Write initial WAV header with placeholder sizes
write_wav_header(&mut file, sample_rate, channels, 0)?;
Ok(Self {
file,
sample_rate,
channels,
frames_written: 0,
})
}
/// Append audio samples to the file
/// Expects interleaved f32 samples in range [-1.0, 1.0]
pub fn write_samples(&mut self, samples: &[f32]) -> io::Result<()> {
// Convert f32 samples to 16-bit PCM
let pcm_data: Vec<u8> = samples
.iter()
.flat_map(|&sample| {
let clamped = sample.clamp(-1.0, 1.0);
let pcm_value = (clamped * 32767.0) as i16;
pcm_value.to_le_bytes()
})
.collect();
self.file.write_all(&pcm_data)?;
self.frames_written += samples.len() / self.channels as usize;
Ok(())
}
/// Get the current number of frames written
pub fn frames_written(&self) -> usize {
self.frames_written
}
/// Get the current duration in seconds
pub fn duration(&self) -> f64 {
self.frames_written as f64 / self.sample_rate as f64
}
/// Finalize the WAV file by updating the header with correct sizes
pub fn finalize(mut self) -> io::Result<()> {
// Flush any remaining data
self.file.flush()?;
// Calculate total data size
let data_size = self.frames_written * self.channels as usize * 2; // 2 bytes per sample (16-bit)
let file_size = 36 + data_size; // 36 = size of header before data
// Seek to RIFF chunk size (offset 4)
self.file.seek(SeekFrom::Start(4))?;
self.file.write_all(&((file_size - 8) as u32).to_le_bytes())?;
// Seek to data chunk size (offset 40)
self.file.seek(SeekFrom::Start(40))?;
self.file.write_all(&(data_size as u32).to_le_bytes())?;
self.file.flush()?;
Ok(())
}
}
/// Write WAV header with specified parameters
fn write_wav_header(file: &mut File, sample_rate: u32, channels: u32, frames: usize) -> io::Result<()> {
let bytes_per_sample = 2u16; // 16-bit PCM
let data_size = (frames * channels as usize * bytes_per_sample as usize) as u32;
let file_size = 36 + data_size;
// RIFF header
file.write_all(b"RIFF")?;
file.write_all(&(file_size - 8).to_le_bytes())?;
file.write_all(b"WAVE")?;
// fmt chunk
file.write_all(b"fmt ")?;
file.write_all(&16u32.to_le_bytes())?; // fmt chunk size
file.write_all(&1u16.to_le_bytes())?; // PCM format
file.write_all(&(channels as u16).to_le_bytes())?;
file.write_all(&sample_rate.to_le_bytes())?;
let byte_rate = sample_rate * channels * bytes_per_sample as u32;
file.write_all(&byte_rate.to_le_bytes())?;
let block_align = channels as u16 * bytes_per_sample;
file.write_all(&block_align.to_le_bytes())?;
file.write_all(&(bytes_per_sample * 8).to_le_bytes())?; // bits per sample
// data chunk header
file.write_all(b"data")?;
file.write_all(&data_size.to_le_bytes())?;
Ok(())
}

View File

@ -12,9 +12,71 @@ pub mod io;
// Re-export commonly used types // Re-export commonly used types
pub use audio::{ pub use audio::{
AudioPool, AudioTrack, BufferPool, Clip, ClipId, Engine, EngineController, AudioPool, AudioTrack, AutomationLane, AutomationLaneId, AutomationPoint, BufferPool, Clip, ClipId, CurveType, Engine, EngineController,
Metatrack, MidiClip, MidiClipId, MidiEvent, MidiTrack, PoolAudioFile, Project, RenderContext, Track, TrackId, TrackNode, Metatrack, MidiClip, MidiClipId, MidiEvent, MidiTrack, ParameterId, PoolAudioFile, Project, RecordingState, RenderContext, Track, TrackId,
TrackNode,
}; };
pub use command::{AudioEvent, Command}; pub use command::{AudioEvent, Command};
pub use effects::{Effect, GainEffect, PanEffect, SimpleEQ, SimpleSynth}; pub use effects::{Effect, GainEffect, PanEffect, SimpleEQ, SimpleSynth};
pub use io::{load_midi_file, AudioFile}; pub use io::{load_midi_file, AudioFile, WaveformPeak, WavWriter};
use cpal::traits::{DeviceTrait, HostTrait, StreamTrait};
/// Simple audio system that handles cpal initialization internally
pub struct AudioSystem {
pub controller: EngineController,
pub stream: cpal::Stream,
pub event_rx: rtrb::Consumer<AudioEvent>,
pub sample_rate: u32,
pub channels: u32,
}
impl AudioSystem {
/// Initialize the audio system with default device
pub fn new() -> Result<Self, String> {
let host = cpal::default_host();
let device = host
.default_output_device()
.ok_or("No output device available")?;
let default_config = device.default_output_config().map_err(|e| e.to_string())?;
let sample_rate = default_config.sample_rate().0;
let channels = default_config.channels() as u32;
// Create queues
let (command_tx, command_rx) = rtrb::RingBuffer::new(256);
let (event_tx, event_rx) = rtrb::RingBuffer::new(256);
// Create engine
let mut engine = Engine::new(sample_rate, channels, command_rx, event_tx);
let controller = engine.get_controller(command_tx);
// Build stream
let config: cpal::StreamConfig = default_config.clone().into();
let mut buffer = vec![0.0f32; 16384];
let stream = device
.build_output_stream(
&config,
move |data: &mut [f32], _: &cpal::OutputCallbackInfo| {
let buf = &mut buffer[..data.len()];
buf.fill(0.0);
engine.process(buf);
data.copy_from_slice(buf);
},
|err| eprintln!("Stream error: {}", err),
None,
)
.map_err(|e| e.to_string())?;
stream.play().map_err(|e| e.to_string())?;
Ok(Self {
controller,
stream,
event_rx,
sample_rate,
channels,
})
}
}

View File

@ -1,5 +1,5 @@
use cpal::traits::{DeviceTrait, HostTrait, StreamTrait}; use cpal::traits::{DeviceTrait, HostTrait, StreamTrait};
use daw_backend::{load_midi_file, AudioEvent, AudioFile, Clip, Engine, PoolAudioFile, Track}; use daw_backend::{load_midi_file, AudioEvent, AudioFile, Clip, CurveType, Engine, ParameterId, PoolAudioFile, Track};
use std::env; use std::env;
use std::io::{self, Write}; use std::io::{self, Write};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
@ -224,6 +224,56 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
print!("> "); print!("> ");
io::stdout().flush().ok(); io::stdout().flush().ok();
} }
AudioEvent::AutomationLaneCreated(track_id, lane_id, parameter_id) => {
print!("\r\x1b[K");
println!("Automation lane {} created on track {} for parameter {:?}",
lane_id, track_id, parameter_id);
print!("> ");
io::stdout().flush().ok();
}
AudioEvent::AudioFileAdded(pool_index, path) => {
print!("\r\x1b[K");
println!("Audio file added to pool at index {}: '{}'", pool_index, path);
print!("> ");
io::stdout().flush().ok();
}
AudioEvent::ClipAdded(track_id, clip_id) => {
print!("\r\x1b[K");
println!("Clip {} added to track {}", clip_id, track_id);
print!("> ");
io::stdout().flush().ok();
}
AudioEvent::RecordingStarted(track_id, clip_id) => {
print!("\r\x1b[K");
println!("Recording started on track {} (clip {})", track_id, clip_id);
print!("> ");
io::stdout().flush().ok();
}
AudioEvent::RecordingProgress(clip_id, duration) => {
print!("\r\x1b[K");
print!("Recording clip {}: {:.2}s", clip_id, duration);
io::stdout().flush().ok();
}
AudioEvent::RecordingStopped(clip_id, pool_index) => {
print!("\r\x1b[K");
println!("Recording stopped (clip {}, pool index {})", clip_id, pool_index);
print!("> ");
io::stdout().flush().ok();
}
AudioEvent::RecordingError(error) => {
print!("\r\x1b[K");
println!("Recording error: {}", error);
print!("> ");
io::stdout().flush().ok();
}
AudioEvent::ProjectReset => {
print!("\r\x1b[K");
println!("Project reset - all tracks and audio cleared");
// Clear the local track list
track_ids_clone.lock().unwrap().clear();
print!("> ");
io::stdout().flush().ok();
}
} }
} }
} }
@ -633,6 +683,35 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
} }
} else if input == "stats" || input == "buffers" { } else if input == "stats" || input == "buffers" {
controller.request_buffer_pool_stats(); controller.request_buffer_pool_stats();
} else if input.starts_with("autovolume ") {
// Parse: autovolume <track_id> <time> <value>
let parts: Vec<&str> = input.split_whitespace().collect();
if parts.len() == 4 {
if let (Ok(track_id), Ok(time), Ok(value)) =
(parts[1].parse::<u32>(), parts[2].parse::<f64>(), parts[3].parse::<f32>()) {
let ids = track_ids.lock().unwrap();
if ids.contains(&track_id) {
drop(ids);
// Create automation lane (if not exists, will be reused)
controller.create_automation_lane(track_id, ParameterId::TrackVolume);
// Add automation point (note: lane_id=0 is assumed, real app would track this)
controller.add_automation_point(track_id, 0, time, value, CurveType::Linear);
println!("Added volume automation point on track {} at {:.2}s: {:.2}", track_id, time, value);
} else {
println!("Invalid track ID. Available tracks: {:?}", *ids);
}
} else {
println!("Invalid format. Usage: autovolume <track_id> <time> <value>");
}
} else {
println!("Usage: autovolume <track_id> <time> <value>");
println!(" Example: autovolume 0 2.0 0.5 (set volume to 0.5 at 2 seconds)");
}
} else if input == "reset" {
controller.reset();
// Clear local clip info tracking
clip_info.clear();
println!("Resetting project...");
} else if input == "help" || input == "h" { } else if input == "help" || input == "h" {
print_help(); print_help();
} else { } else {
@ -688,6 +767,8 @@ fn print_help() {
println!(" (e.g. 'loadmidi 0 song.mid 0.0')"); println!(" (e.g. 'loadmidi 0 song.mid 0.0')");
println!("\nDiagnostics:"); println!("\nDiagnostics:");
println!(" stats, buffers - Show buffer pool statistics"); println!(" stats, buffers - Show buffer pool statistics");
println!("\nProject Commands:");
println!(" reset - Clear all tracks and audio (reset to empty project)");
println!("\nOther:"); println!("\nOther:");
println!(" h, help - Show this help"); println!(" h, help - Show this help");
println!(" q, quit - Quit"); println!(" q, quit - Quit");