background color

This commit is contained in:
Skyler Lehmkuhl 2026-03-01 06:37:10 -05:00
parent f8df4d1232
commit 520776c6e5
7 changed files with 416 additions and 71 deletions

View File

@ -5,6 +5,7 @@
use crate::action::Action;
use crate::document::Document;
use crate::shape::ShapeColor;
/// Individual property change for a document
#[derive(Clone, Debug)]
@ -13,18 +14,14 @@ pub enum DocumentPropertyChange {
Height(f64),
Duration(f64),
Framerate(f64),
BackgroundColor(ShapeColor),
}
impl DocumentPropertyChange {
/// Extract the f64 value from any variant
fn value(&self) -> f64 {
match self {
DocumentPropertyChange::Width(v) => *v,
DocumentPropertyChange::Height(v) => *v,
DocumentPropertyChange::Duration(v) => *v,
DocumentPropertyChange::Framerate(v) => *v,
}
}
/// Stored old value for undo (either f64 or color)
#[derive(Clone, Debug)]
enum OldValue {
F64(f64),
Color(ShapeColor),
}
/// Action that sets a property on the document
@ -32,7 +29,7 @@ pub struct SetDocumentPropertiesAction {
/// The new property value
property: DocumentPropertyChange,
/// The old value for undo
old_value: Option<f64>,
old_value: Option<OldValue>,
}
impl SetDocumentPropertiesAction {
@ -68,41 +65,53 @@ impl SetDocumentPropertiesAction {
}
}
fn get_current_value(&self, document: &Document) -> f64 {
match &self.property {
DocumentPropertyChange::Width(_) => document.width,
DocumentPropertyChange::Height(_) => document.height,
DocumentPropertyChange::Duration(_) => document.duration,
DocumentPropertyChange::Framerate(_) => document.framerate,
}
}
fn apply_value(&self, document: &mut Document, value: f64) {
match &self.property {
DocumentPropertyChange::Width(_) => document.width = value,
DocumentPropertyChange::Height(_) => document.height = value,
DocumentPropertyChange::Duration(_) => document.duration = value,
DocumentPropertyChange::Framerate(_) => document.framerate = value,
/// Create a new action to set background color
pub fn set_background_color(color: ShapeColor) -> Self {
Self {
property: DocumentPropertyChange::BackgroundColor(color),
old_value: None,
}
}
}
impl Action for SetDocumentPropertiesAction {
fn execute(&mut self, document: &mut Document) -> Result<(), String> {
// Store old value if not already stored
if self.old_value.is_none() {
self.old_value = Some(self.get_current_value(document));
self.old_value = Some(match &self.property {
DocumentPropertyChange::Width(_) => OldValue::F64(document.width),
DocumentPropertyChange::Height(_) => OldValue::F64(document.height),
DocumentPropertyChange::Duration(_) => OldValue::F64(document.duration),
DocumentPropertyChange::Framerate(_) => OldValue::F64(document.framerate),
DocumentPropertyChange::BackgroundColor(_) => OldValue::Color(document.background_color),
});
}
// Apply new value
let new_value = self.property.value();
self.apply_value(document, new_value);
match &self.property {
DocumentPropertyChange::Width(v) => document.width = *v,
DocumentPropertyChange::Height(v) => document.height = *v,
DocumentPropertyChange::Duration(v) => document.duration = *v,
DocumentPropertyChange::Framerate(v) => document.framerate = *v,
DocumentPropertyChange::BackgroundColor(c) => document.background_color = *c,
}
Ok(())
}
fn rollback(&mut self, document: &mut Document) -> Result<(), String> {
if let Some(old_value) = self.old_value {
self.apply_value(document, old_value);
match &self.old_value {
Some(OldValue::F64(v)) => {
let v = *v;
match &self.property {
DocumentPropertyChange::Width(_) => document.width = v,
DocumentPropertyChange::Height(_) => document.height = v,
DocumentPropertyChange::Duration(_) => document.duration = v,
DocumentPropertyChange::Framerate(_) => document.framerate = v,
DocumentPropertyChange::BackgroundColor(_) => {}
}
}
Some(OldValue::Color(c)) => {
document.background_color = *c;
}
None => {}
}
Ok(())
}
@ -113,6 +122,7 @@ impl Action for SetDocumentPropertiesAction {
DocumentPropertyChange::Height(_) => "canvas height",
DocumentPropertyChange::Duration(_) => "duration",
DocumentPropertyChange::Framerate(_) => "framerate",
DocumentPropertyChange::BackgroundColor(_) => "background color",
};
format!("Set {}", property_name)
}

View File

@ -747,6 +747,7 @@ pub fn render_frame_to_rgba_hdr(
base_transform,
image_cache,
video_manager,
None, // No webcam during export
);
// Buffer specs for layer rendering
@ -1132,6 +1133,7 @@ pub fn render_frame_to_gpu_rgba(
base_transform,
image_cache,
video_manager,
None, // No webcam during export
);
// Buffer specs for layer rendering

View File

@ -760,6 +760,14 @@ struct EditorApp {
audio_channels: u32,
// Video decoding and management
video_manager: std::sync::Arc<std::sync::Mutex<lightningbeam_core::video::VideoManager>>, // Shared video manager
// Webcam capture state
webcam: Option<lightningbeam_core::webcam::WebcamCapture>,
/// Latest polled webcam frame (updated each frame for preview)
webcam_frame: Option<lightningbeam_core::webcam::CaptureFrame>,
/// Pending webcam recording command (set by timeline, processed in update)
webcam_record_command: Option<panes::WebcamRecordCommand>,
/// Layer being recorded to via webcam
webcam_recording_layer_id: Option<Uuid>,
// Track ID mapping (Document layer UUIDs <-> daw-backend TrackIds)
layer_to_track_map: HashMap<Uuid, daw_backend::TrackId>,
track_to_layer_map: HashMap<daw_backend::TrackId, Uuid>,
@ -1013,6 +1021,10 @@ impl EditorApp {
video_manager: std::sync::Arc::new(std::sync::Mutex::new(
lightningbeam_core::video::VideoManager::new()
)),
webcam: None,
webcam_frame: None,
webcam_record_command: None,
webcam_recording_layer_id: None,
layer_to_track_map: HashMap::new(),
track_to_layer_map: HashMap::new(),
clip_to_metatrack_map: HashMap::new(),
@ -1341,7 +1353,8 @@ impl EditorApp {
document.root.add_child(AnyLayer::Vector(layer))
}
1 => {
// Video editing focus -> VideoLayer
// Video editing focus -> VideoLayer + black background
document.background_color = lightningbeam_core::shape::ShapeColor::rgb(0, 0, 0);
let layer = VideoLayer::new("Video 1");
document.root.add_child(AnyLayer::Video(layer))
}
@ -3893,6 +3906,44 @@ impl eframe::App for EditorApp {
self.handle_audio_extraction_result(result);
}
// Webcam management: open/close based on camera_enabled layers, poll frames
{
let any_camera_enabled = self.action_executor.document().root.children.iter().any(|layer| {
if let lightningbeam_core::layer::AnyLayer::Video(v) = layer {
v.camera_enabled
} else {
false
}
});
if any_camera_enabled && self.webcam.is_none() {
// Try to open the default camera
if let Some(device) = lightningbeam_core::webcam::default_camera() {
match lightningbeam_core::webcam::WebcamCapture::open(&device) {
Ok(cam) => {
eprintln!("[WEBCAM] Opened camera: {}", device.name);
self.webcam = Some(cam);
}
Err(e) => {
eprintln!("[WEBCAM] Failed to open camera: {}", e);
}
}
}
} else if !any_camera_enabled && self.webcam.is_some() {
eprintln!("[WEBCAM] Closing camera (no layers with camera enabled)");
self.webcam = None;
self.webcam_frame = None;
}
// Poll latest frame from webcam
if let Some(webcam) = &mut self.webcam {
if let Some(frame) = webcam.poll_frame() {
self.webcam_frame = Some(frame.clone());
ctx.request_repaint(); // Keep repainting while camera is active
}
}
}
// Check for native menu events (macOS)
if let Some(menu_system) = &self.menu_system {
if let Some(action) = menu_system.check_events() {
@ -4861,6 +4912,8 @@ impl eframe::App for EditorApp {
.map(|g| g.thumbnail_cache())
.unwrap_or(&empty_thumbnail_cache),
effect_thumbnails_to_invalidate: &mut self.effect_thumbnails_to_invalidate,
webcam_frame: self.webcam_frame.clone(),
webcam_record_command: &mut self.webcam_record_command,
target_format: self.target_format,
pending_menu_actions: &mut pending_menu_actions,
clipboard_manager: &mut self.clipboard_manager,
@ -4960,6 +5013,157 @@ impl eframe::App for EditorApp {
self.handle_menu_action(action);
}
// Process webcam recording commands from timeline
if let Some(cmd) = self.webcam_record_command.take() {
match cmd {
panes::WebcamRecordCommand::Start { layer_id } => {
// Ensure webcam is open
if self.webcam.is_none() {
if let Some(device) = lightningbeam_core::webcam::default_camera() {
match lightningbeam_core::webcam::WebcamCapture::open(&device) {
Ok(cam) => {
eprintln!("[WEBCAM] Opened camera for recording: {}", device.name);
self.webcam = Some(cam);
}
Err(e) => {
eprintln!("[WEBCAM] Failed to open camera for recording: {}", e);
}
}
}
}
if let Some(webcam) = &mut self.webcam {
// Generate output path in project directory or temp
let recording_dir = if let Some(ref file_path) = self.current_file_path {
file_path.parent().unwrap_or(std::path::Path::new(".")).to_path_buf()
} else {
std::env::temp_dir()
};
let timestamp = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs();
let codec = lightningbeam_core::webcam::RecordingCodec::H264; // TODO: read from preferences
let ext = match codec {
lightningbeam_core::webcam::RecordingCodec::H264 => "mp4",
lightningbeam_core::webcam::RecordingCodec::Lossless => "mkv",
};
let recording_path = recording_dir.join(format!("webcam_recording_{}.{}", timestamp, ext));
match webcam.start_recording(recording_path, codec) {
Ok(()) => {
self.webcam_recording_layer_id = Some(layer_id);
eprintln!("[WEBCAM] Recording started");
}
Err(e) => {
eprintln!("[WEBCAM] Failed to start recording: {}", e);
}
}
}
}
panes::WebcamRecordCommand::Stop => {
if let Some(webcam) = &mut self.webcam {
match webcam.stop_recording() {
Ok(result) => {
let file_path_str = result.file_path.to_string_lossy().to_string();
eprintln!("[WEBCAM] Recording saved to: {}", file_path_str);
// Create VideoClip + ClipInstance from recorded file
if let Some(layer_id) = self.webcam_recording_layer_id.take() {
match lightningbeam_core::video::probe_video(&file_path_str) {
Ok(info) => {
use lightningbeam_core::clip::{VideoClip, ClipInstance};
let clip = VideoClip {
id: Uuid::new_v4(),
name: result.file_path.file_name()
.and_then(|n| n.to_str())
.unwrap_or("Webcam Recording")
.to_string(),
file_path: file_path_str.clone(),
width: info.width as f64,
height: info.height as f64,
duration: info.duration,
frame_rate: info.fps,
linked_audio_clip_id: None,
folder_id: None,
};
let clip_id = clip.id;
let duration = clip.duration;
self.action_executor.document_mut().video_clips.insert(clip_id, clip);
let mut clip_instance = ClipInstance::new(clip_id)
.with_timeline_start(self.recording_start_time)
.with_timeline_duration(duration);
// Scale to fit document and center (like drag-dropped videos)
{
let doc = self.action_executor.document();
let video_width = info.width as f64;
let video_height = info.height as f64;
let scale_x = doc.width / video_width;
let scale_y = doc.height / video_height;
let uniform_scale = scale_x.min(scale_y);
clip_instance.transform.scale_x = uniform_scale;
clip_instance.transform.scale_y = uniform_scale;
let scaled_w = video_width * uniform_scale;
let scaled_h = video_height * uniform_scale;
clip_instance.transform.x = (doc.width - scaled_w) / 2.0;
clip_instance.transform.y = (doc.height - scaled_h) / 2.0;
}
if let Some(layer) = self.action_executor.document_mut().get_layer_mut(&layer_id) {
if let lightningbeam_core::layer::AnyLayer::Video(video_layer) = layer {
video_layer.clip_instances.push(clip_instance);
}
}
// Load into video manager for playback
// Use the video's native dimensions so decoded frames
// match the VideoClip width/height the renderer uses
// for the display rect.
{
let mut vm = self.video_manager.lock().unwrap();
if let Err(e) = vm.load_video(clip_id, file_path_str, info.width, info.height) {
eprintln!("[WEBCAM] Failed to load recorded video: {}", e);
}
}
// Generate thumbnails in background
let vm_clone = Arc::clone(&self.video_manager);
std::thread::spawn(move || {
// Build keyframe index first
{
let vm = vm_clone.lock().unwrap();
if let Err(e) = vm.build_keyframe_index(&clip_id) {
eprintln!("[WEBCAM] Failed to build keyframe index: {e}");
}
}
// Generate thumbnails
{
let mut vm = vm_clone.lock().unwrap();
if let Err(e) = vm.generate_thumbnails(&clip_id, duration) {
eprintln!("[WEBCAM] Failed to generate thumbnails: {e}");
}
}
});
eprintln!("[WEBCAM] Created video clip: {:.1}s @ {:.1}fps", duration, info.fps);
}
Err(e) => {
eprintln!("[WEBCAM] Failed to probe recorded video: {}", e);
}
}
}
}
Err(e) => {
eprintln!("[WEBCAM] Failed to stop recording: {}", e);
self.webcam_recording_layer_id = None;
}
}
}
self.is_recording = false;
self.recording_layer_id = None;
}
}
}
// Process editing context navigation (enter/exit movie clips)
if let Some((clip_id, instance_id, parent_layer_id)) = pending_enter_clip {
let entry = EditingContextEntry {
@ -5231,6 +5435,10 @@ struct RenderContext<'a> {
effect_thumbnail_cache: &'a HashMap<Uuid, Vec<u8>>,
/// Effect IDs whose thumbnails should be invalidated
effect_thumbnails_to_invalidate: &'a mut Vec<Uuid>,
/// Latest webcam capture frame (None if no camera active)
webcam_frame: Option<lightningbeam_core::webcam::CaptureFrame>,
/// Pending webcam recording command
webcam_record_command: &'a mut Option<panes::WebcamRecordCommand>,
/// Surface texture format for GPU rendering (Rgba8Unorm or Bgra8Unorm depending on platform)
target_format: wgpu::TextureFormat,
/// Menu actions queued by panes (e.g. context menus), processed after rendering
@ -5739,6 +5947,8 @@ fn render_pane(
effect_thumbnail_requests: ctx.effect_thumbnail_requests,
effect_thumbnail_cache: ctx.effect_thumbnail_cache,
effect_thumbnails_to_invalidate: ctx.effect_thumbnails_to_invalidate,
webcam_frame: ctx.webcam_frame.clone(),
webcam_record_command: ctx.webcam_record_command,
target_format: ctx.target_format,
pending_menu_actions: ctx.pending_menu_actions,
clipboard_manager: ctx.clipboard_manager,
@ -5827,6 +6037,8 @@ fn render_pane(
effect_thumbnail_requests: ctx.effect_thumbnail_requests,
effect_thumbnail_cache: ctx.effect_thumbnail_cache,
effect_thumbnails_to_invalidate: ctx.effect_thumbnails_to_invalidate,
webcam_frame: ctx.webcam_frame.clone(),
webcam_record_command: ctx.webcam_record_command,
target_format: ctx.target_format,
pending_menu_actions: ctx.pending_menu_actions,
clipboard_manager: ctx.clipboard_manager,

View File

@ -481,6 +481,19 @@ impl InfopanelPane {
}
});
// Background color
ui.horizontal(|ui| {
ui.label("Background:");
let bg = document.background_color;
let mut color = [bg.r, bg.g, bg.b];
if ui.color_edit_button_srgb(&mut color).changed() {
let action = SetDocumentPropertiesAction::set_background_color(
ShapeColor::rgb(color[0], color[1], color[2]),
);
shared.pending_actions.push(Box::new(action));
}
});
// Layer count (read-only)
ui.horizontal(|ui| {
ui.label("Layers:");

View File

@ -55,6 +55,15 @@ pub struct DraggingAsset {
pub linked_audio_clip_id: Option<Uuid>,
}
/// Command for webcam recording (issued by timeline, processed by main)
#[derive(Debug)]
pub enum WebcamRecordCommand {
/// Start recording on the given video layer
Start { layer_id: uuid::Uuid },
/// Stop current webcam recording
Stop,
}
pub mod toolbar;
pub mod stage;
pub mod timeline;
@ -221,6 +230,10 @@ pub struct SharedPaneState<'a> {
pub effect_thumbnail_cache: &'a std::collections::HashMap<Uuid, Vec<u8>>,
/// Effect IDs whose thumbnails should be invalidated (e.g., after shader edit)
pub effect_thumbnails_to_invalidate: &'a mut Vec<Uuid>,
/// Latest webcam capture frame (None if no camera is active)
pub webcam_frame: Option<lightningbeam_core::webcam::CaptureFrame>,
/// Pending webcam recording commands (processed by main.rs after render)
pub webcam_record_command: &'a mut Option<WebcamRecordCommand>,
/// Surface texture format for GPU rendering (Rgba8Unorm or Bgra8Unorm depending on platform)
pub target_format: wgpu::TextureFormat,
/// Menu actions queued by panes (e.g. context menu items), processed by main after rendering

View File

@ -388,6 +388,8 @@ struct VelloRenderContext {
region_selection: Option<lightningbeam_core::selection::RegionSelection>,
/// Mouse position in document-local (clip-local) world coordinates, for hover hit testing
mouse_world_pos: Option<vello::kurbo::Point>,
/// Latest webcam frame for live preview (if any camera is active)
webcam_frame: Option<lightningbeam_core::webcam::CaptureFrame>,
}
/// Callback for Vello rendering within egui
@ -475,6 +477,7 @@ impl egui_wgpu::CallbackTrait for VelloCallback {
camera_transform,
&mut image_cache,
&shared.video_manager,
self.ctx.webcam_frame.as_ref(),
);
drop(image_cache);
@ -6961,6 +6964,7 @@ impl PaneRenderer for StagePane {
editing_parent_layer_id: shared.editing_parent_layer_id,
region_selection: shared.region_selection.clone(),
mouse_world_pos,
webcam_frame: shared.webcam_frame.clone(),
}};
let cb = egui_wgpu::Callback::new_paint_callback(

View File

@ -137,6 +137,13 @@ enum TimeDisplayFormat {
Measures,
}
/// Type of recording in progress (for stop logic dispatch)
enum RecordingType {
Audio,
Midi,
Webcam,
}
pub struct TimelinePane {
/// Horizontal zoom level (pixels per second)
pixels_per_second: f32,
@ -260,7 +267,7 @@ impl TimelinePane {
}
/// Toggle recording on/off
/// In Auto mode, records to the active audio layer
/// In Auto mode, records to the active layer (audio or video with camera)
fn toggle_recording(&mut self, shared: &mut SharedPaneState) {
if *shared.is_recording {
// Stop recording
@ -271,7 +278,7 @@ impl TimelinePane {
}
}
/// Start recording on the active audio layer
/// Start recording on the active layer (audio or video with camera)
fn start_recording(&mut self, shared: &mut SharedPaneState) {
use lightningbeam_core::clip::{AudioClip, ClipInstance};
@ -280,6 +287,44 @@ impl TimelinePane {
return;
};
// Check if this is a video layer with camera enabled
let is_video_camera = {
let document = shared.action_executor.document();
let context_layers = document.context_layers(shared.editing_clip_id.as_ref());
context_layers.iter().copied()
.find(|l| l.id() == active_layer_id)
.map(|layer| {
if let AnyLayer::Video(v) = layer {
v.camera_enabled
} else {
false
}
})
.unwrap_or(false)
};
if is_video_camera {
// Issue webcam recording start command (processed by main.rs)
*shared.webcam_record_command = Some(super::WebcamRecordCommand::Start {
layer_id: active_layer_id,
});
*shared.is_recording = true;
*shared.recording_start_time = *shared.playback_time;
*shared.recording_layer_id = Some(active_layer_id);
// Auto-start playback for recording
if !*shared.is_playing {
if let Some(controller_arc) = shared.audio_controller {
let mut controller = controller_arc.lock().unwrap();
controller.play();
*shared.is_playing = true;
println!("▶ Auto-started playback for webcam recording");
}
}
println!("📹 Started webcam recording on layer {}", active_layer_id);
return;
}
// Get layer type (copy it so we can drop the document borrow before mutating)
let layer_type = {
let document = shared.action_executor.document();
@ -362,27 +407,42 @@ impl TimelinePane {
/// Stop the current recording
fn stop_recording(&mut self, shared: &mut SharedPaneState) {
// Determine if this is MIDI or audio recording by checking the layer type
let is_midi_recording = if let Some(layer_id) = *shared.recording_layer_id {
// Determine recording type by checking the layer
let recording_type = if let Some(layer_id) = *shared.recording_layer_id {
let context_layers = shared.action_executor.document().context_layers(shared.editing_clip_id.as_ref());
context_layers.iter().copied()
.find(|l| l.id() == layer_id)
.map(|layer| {
if let lightningbeam_core::layer::AnyLayer::Audio(audio_layer) = layer {
matches!(audio_layer.audio_layer_type, lightningbeam_core::layer::AudioLayerType::Midi)
match layer {
lightningbeam_core::layer::AnyLayer::Audio(audio_layer) => {
if matches!(audio_layer.audio_layer_type, lightningbeam_core::layer::AudioLayerType::Midi) {
RecordingType::Midi
} else {
false
RecordingType::Audio
}
}
lightningbeam_core::layer::AnyLayer::Video(v) if v.camera_enabled => {
RecordingType::Webcam
}
_ => RecordingType::Audio,
}
})
.unwrap_or(false)
.unwrap_or(RecordingType::Audio)
} else {
false
RecordingType::Audio
};
match recording_type {
RecordingType::Webcam => {
// Issue webcam stop command (processed by main.rs)
*shared.webcam_record_command = Some(super::WebcamRecordCommand::Stop);
println!("📹 Stopped webcam recording");
}
_ => {
if let Some(controller_arc) = shared.audio_controller {
let mut controller = controller_arc.lock().unwrap();
if is_midi_recording {
if matches!(recording_type, RecordingType::Midi) {
controller.stop_midi_recording();
println!("🎹 Stopped MIDI recording");
} else {
@ -390,6 +450,8 @@ impl TimelinePane {
println!("🎤 Stopped audio recording");
}
}
}
}
// Note: Don't clear recording_layer_id here!
// The RecordingStopped/MidiRecordingStopped event handler in main.rs
@ -957,9 +1019,28 @@ impl TimelinePane {
let is_soloed = layer.soloed();
let is_locked = layer.locked();
// Mute button
// TODO: Replace with SVG icon (volume-up-fill.svg / volume-mute.svg)
let mute_response = ui.scope_builder(egui::UiBuilder::new().max_rect(mute_button_rect), |ui| {
// Mute button — or camera toggle for video layers
let is_video_layer = matches!(layer, lightningbeam_core::layer::AnyLayer::Video(_));
let camera_enabled = if let lightningbeam_core::layer::AnyLayer::Video(v) = layer {
v.camera_enabled
} else {
false
};
let first_btn_response = ui.scope_builder(egui::UiBuilder::new().max_rect(mute_button_rect), |ui| {
if is_video_layer {
// Camera toggle for video layers
let cam_text = if camera_enabled { "📹" } else { "📷" };
let button = egui::Button::new(cam_text)
.fill(if camera_enabled {
egui::Color32::from_rgba_unmultiplied(100, 200, 100, 100)
} else {
egui::Color32::from_gray(40)
})
.stroke(egui::Stroke::NONE);
ui.add(button)
} else {
// Mute button for non-video layers
let mute_text = if is_muted { "🔇" } else { "🔊" };
let button = egui::Button::new(mute_text)
.fill(if is_muted {
@ -969,10 +1050,19 @@ impl TimelinePane {
})
.stroke(egui::Stroke::NONE);
ui.add(button)
}
}).inner;
if mute_response.clicked() {
if first_btn_response.clicked() {
self.layer_control_clicked = true;
if is_video_layer {
pending_actions.push(Box::new(
lightningbeam_core::actions::SetLayerPropertiesAction::new(
layer_id,
lightningbeam_core::actions::LayerProperty::CameraEnabled(!camera_enabled),
)
));
} else {
pending_actions.push(Box::new(
lightningbeam_core::actions::SetLayerPropertiesAction::new(
layer_id,
@ -980,6 +1070,7 @@ impl TimelinePane {
)
));
}
}
// Solo button
// TODO: Replace with SVG headphones icon