import audio and draw it on timeline

This commit is contained in:
Skyler Lehmkuhl 2024-12-06 15:28:31 -05:00
parent b589885ed7
commit 5ed9c06156
5 changed files with 192 additions and 8 deletions

3
src/Tone.js Normal file

File diff suppressed because one or more lines are too long

View File

@ -7,6 +7,7 @@
<script src="coloris.js"></script>
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Tauri App</title>
<script src="Tone.js"></script>
<script type="module" src="/simplify.js"></script>
<script type="module" src="/canvas2svg.js"></script>
<script src="/ffmpeg-mp4.js"></script>

View File

@ -3,7 +3,7 @@ import * as fitCurve from '/fit-curve.js';
import { Bezier } from "/bezier.js";
import { Quadtree } from './quadtree.js';
import { createNewFileDialog, showNewFileDialog, closeDialog } from './newfile.js';
import { titleCase, getMousePositionFraction, getKeyframesSurrounding, invertPixels, lerpColor, lerp, camelToWords } from './utils.js';
import { titleCase, getMousePositionFraction, getKeyframesSurrounding, invertPixels, lerpColor, lerp, camelToWords, generateWaveform } from './utils.js';
const { writeTextFile: writeTextFile, readTextFile: readTextFile, writeFile: writeFile, readFile: readFile }= window.__TAURI__.fs;
const {
open: openFileDialog,
@ -343,6 +343,44 @@ let actions = {
}
}
},
addAudio: {
create: (audiosrc, object) => {
redoStack.length = 0
let action = {
audiosrc:audiosrc,
uuid: uuidv4(),
frameNum: object.currentFrameNum,
object: object.idx
}
undoStack.push({name: 'addAudio', action: action})
actions.addAudio.execute(action)
updateMenu()
},
execute: async (action) => {
const player = new Tone.Player().toDestination();
await player.load(action.audiosrc)
// player.autostart = true;
let newAudioLayer = new AudioLayer()
let object = pointerList[action.object]
const img = new Image();
img.className = "audioWaveform"
let soundObj = {
player: player,
start: action.frameNum,
img: img
}
pointerList[action.uuid] = soundObj
newAudioLayer.sounds[action.uuid] = soundObj
object.audioLayers.push(newAudioLayer)
// TODO: compute image height better
generateWaveform(img, player.buffer, 50, 25, fileFps)
updateLayers()
},
rollback: (action) => {
// your code here
updateLayers()
}
},
duplicateObject: {
create: (object) => {
redoStack.length = 0
@ -1044,6 +1082,30 @@ class Layer {
}
}
class AudioLayer {
constructor(uuid) {
this.sounds = {}
if (!uuid) {
this.idx = uuidv4()
} else {
this.idx = uuid
}
}
copy() {
let newAudioLayer = new AudioLayer()
for (let sound of this.sounds) {
let newPlayer = new Tone.Player(sound.buffer()).toDestination()
let idx = uuidv4()
let soundObj = {
player: newPlayer,
start: sound.start
}
pointerList[idx] = soundObj
newAudioLayer.sounds[idx] = soundObj
}
}
}
class BaseShape {
constructor(startx, starty) {
this.startx = startx
@ -1454,6 +1516,7 @@ class GraphicsObject {
this.currentFrameNum = 0;
this.currentLayer = 0;
this.layers = [new Layer(uuid+"-L1")]
this.audioLayers = []
// this.children = []
this.shapes = []
@ -1787,6 +1850,9 @@ class GraphicsObject {
for (let layer of this.layers) {
newGO.layers.push(layer.copy())
}
for (let audioLayer of this.audioLayers) {
newGO.audioLayers.push(audioLayer.copy())
}
return newGO;
}
@ -2303,6 +2369,7 @@ function stage() {
const imageTypes = ['image/png', 'image/gif', 'image/avif', 'image/jpeg',
'image/webp', //'image/svg+xml' // Disabling SVG until we can export them nicely
];
const audioTypes = ['audio/mpeg'] // TODO: figure out what other audio formats Tone.js accepts
if (e.dataTransfer.items) {
let i = 0
for (let item of e.dataTransfer.items) {
@ -2318,17 +2385,22 @@ function stage() {
reader.onload = function(event) {
let imgsrc = event.target.result; // This is the data URL
// console.log(imgsrc)
// img.onload = function() {
actions.addImageObject.create(
mouse.x, mouse.y, imgsrc, reader.ix, context.activeObject);
// };
actions.addImageObject.create(
mouse.x, mouse.y, imgsrc, reader.ix, context.activeObject);
};
reader.onerror = function(error) {
console.error("Error reading file as data URL", error);
};
} else if (audioTypes.includes(file.type)) {
let reader = new FileReader();
// Read the file as a data URL
reader.readAsDataURL(file);
reader.onload = function(event) {
let audiosrc = event.target.result;
actions.addAudio.create(audiosrc, context.activeObject)
}
}
i++;
}
@ -3045,6 +3117,21 @@ function updateLayers() {
layerTrack.appendChild(highlightObj)
}
}
for (let audioLayer of context.activeObject.audioLayers) {
let layerHeader = document.createElement("div")
layerHeader.className = "layer-header"
layerHeader.classList.add("audio")
layerspanel.appendChild(layerHeader)
let layerTrack = document.createElement("div")
layerTrack.className = "layer-track"
layerTrack.classList.add("audio")
framescontainer.appendChild(layerTrack)
console.log(audioLayer)
for (let i in audioLayer.sounds) {
let sound = audioLayer.sounds[i]
layerTrack.appendChild(sound.img)
}
}
}
}

View File

@ -327,6 +327,11 @@ button {
border-bottom: 1px solid #bbb;
flex-shrink: 0;
}
.layer-header.audio {
background-color: #8281cc;
border-top: 1px solid #9a99db;
border-bottom: 1px solid #817db9;
}
.layer-track {
min-width: 100%;
height: calc( 2 * var(--lineheight));
@ -547,6 +552,11 @@ button {
border-top: 1px solid #4f4f4f;
border-bottom: 1px solid #222222;
}
.layer-header.audio {
background-color: #23253b;
border-top: 1px solid #403f4e;
border-bottom: 1px solid #1f1e24;
}
.layer-track {
background-image:
linear-gradient(to right, transparent 23px, #1a1a1a 23px 25px), /* Dark mode frame dividers */
@ -583,4 +593,7 @@ button {
.horiz_break {
background-color: #2f2f2f;
}
.audioWaveform {
filter: invert(1);
}
}

View File

@ -130,4 +130,84 @@ function camelToWords(camelCaseString) {
return words.replace(/\b\w/g, char => char.toUpperCase());
}
export { titleCase, getMousePositionFraction, getKeyframesSurrounding, invertPixels, lerp, lerpColor, camelToWords };
function generateWaveform(img, buffer, imgHeight, frameWidth, framesPerSecond) {
// Total duration of the audio in seconds
const duration = buffer.duration;
const canvasWidth = Math.ceil(frameWidth * framesPerSecond * duration);
const canvas = document.createElement('canvas');
const ctx = canvas.getContext('2d');
canvas.width = canvasWidth;
canvas.height = imgHeight;
// Get the audio buffer's data (mono or stereo channels)
const channels = buffer.numberOfChannels;
const leftChannelData = buffer.getChannelData(0); // Left channel
const rightChannelData = channels > 1 ? buffer.getChannelData(1) : null; // Right channel, if stereo
const width = canvas.width;
const step = Math.ceil(leftChannelData.length / width); // Step size for drawing
const halfHeight = canvas.height / 2;
ctx.fillStyle = '#000';
function drawChannel(channelData) {
const samples = [];
// Draw the waveform by taking the maximum value of samples in each window
for (let i = 0; i < width; i++) {
let maxSample = -Infinity;
// Calculate the maximum value within the window
for (let j = i * step; j < (i + 1) * step && j < channelData.length; j++) {
maxSample = Math.max(maxSample, Math.abs(channelData[j])); // Find the maximum absolute sample
}
// Normalize and scale the max sample to fit within the canvas height
const y = maxSample * halfHeight;
samples.push([i, y]);
}
// Fill the waveform
if (samples.length > 0) {
ctx.beginPath();
ctx.moveTo(samples[0][0], samples[0][1]);
for (let i = 0; i < samples.length; i++) {
ctx.lineTo(samples[i][0], samples[i][1]);
}
for (let i = samples.length - 1; i >= 0; i--) {
ctx.lineTo(samples[i][0], -samples[i][1]);
}
ctx.fill();
}
}
if (channels>1) {
ctx.save();
ctx.translate(0, halfHeight*0.5);
drawChannel(leftChannelData);
ctx.restore();
ctx.save();
ctx.translate(0, halfHeight*1.5);
drawChannel(rightChannelData);
ctx.restore();
} else {
ctx.save();
ctx.translate(0, halfHeight);
drawChannel(leftChannelData);
ctx.restore();
}
const dataUrl = canvas.toDataURL("image/png");
img.src = dataUrl;
}
export {
titleCase,
getMousePositionFraction,
getKeyframesSurrounding,
invertPixels,
lerp,
lerpColor,
camelToWords,
generateWaveform
};