forked from nikita/muzika-gromche
121 lines
3.2 KiB
Vue
121 lines
3.2 KiB
Vue
<script setup lang="ts">
|
|
import { useWaveform } from '@/audio/AudioWaveform';
|
|
import { unrefElement, useResizeObserver, useThrottleFn } from '@vueuse/core';
|
|
import { shallowRef, useTemplateRef, watchEffect } from 'vue';
|
|
|
|
const {
|
|
buffer,
|
|
} = defineProps<{
|
|
buffer: AudioBuffer,
|
|
}>();
|
|
|
|
const canvas = useTemplateRef('canvas');
|
|
const canvasWidth = shallowRef(0);
|
|
|
|
// TODO: only render what's visible on the timeline.
|
|
// Currently at max zoom canvas may exceed 32_000 px width which browser refuses to render.
|
|
|
|
const waveform = useWaveform(() => buffer, canvasWidth);
|
|
|
|
const resizeObserver: globalThis.ResizeObserverCallback = (entries: ResizeObserverEntry[], observer: ResizeObserver) => {
|
|
const c = unrefElement(canvas);
|
|
if (!c) return;
|
|
|
|
const ctx = c.getContext("2d");
|
|
if (!ctx) return;
|
|
|
|
const entry = entries.filter(entry => entry.target === c)[0];
|
|
if (!entry) return;
|
|
|
|
// get the size from the ResizeObserverEntry (contentRect) and handle
|
|
// devicePixelRatio so the canvas looks sharp on HiDPI screens
|
|
const rect = entry.contentRect || c.getBoundingClientRect();
|
|
const cssWidth = rect.width;
|
|
const cssHeight = rect.height;
|
|
const dpr = window.devicePixelRatio || 1;
|
|
|
|
// set internal canvas size in device pixels
|
|
c.width = Math.max(1, Math.round(cssWidth * dpr));
|
|
c.height = Math.max(1, Math.round(cssHeight * dpr));
|
|
|
|
canvasWidth.value = c.width;
|
|
|
|
redraw(waveform.isDone.value, waveform.peaks.value);
|
|
}
|
|
|
|
let peakHeights = new Uint32Array(0);
|
|
|
|
const redraw = useThrottleFn((isDone: boolean, peaks: Float32Array) => {
|
|
const c = unrefElement(canvas);
|
|
if (!c) return;
|
|
|
|
const ctx = c.getContext("2d");
|
|
if (!ctx) return;
|
|
|
|
const width = c.width;
|
|
const halfHeight = Math.floor(c.height / 2);
|
|
|
|
if (peakHeights.length != width) {
|
|
peakHeights = new Uint32Array(width);
|
|
}
|
|
|
|
const scale = 1.75;
|
|
for (let x = 0; x < width; x += 1) {
|
|
// audio tracks are normalized to a peak -14 dBFS, so we need to stretch them up to take up reasonable space
|
|
const peakHeight = Math.min(1, (peaks[x] ?? 0) * scale);
|
|
const height = Math.round(peakHeight * halfHeight);
|
|
peakHeights[x] = height;
|
|
}
|
|
|
|
ctx.save();
|
|
ctx.clearRect(0, 0, c.width, c.height);
|
|
|
|
ctx.fillStyle = "#ffffffd8";
|
|
ctx.strokeStyle = "transparent";
|
|
|
|
// fill first, slanted outline next
|
|
for (let x = 0; x < width; x += 1) {
|
|
const height = peakHeights[x]!;
|
|
// draw vertically centered
|
|
const y = Math.round(halfHeight - height);
|
|
ctx.fillRect(x, y, 1, height * 2);
|
|
}
|
|
|
|
// outline
|
|
ctx.fillStyle = "transparent";
|
|
ctx.strokeStyle = "#00000080";
|
|
ctx.lineWidth = 1;
|
|
|
|
ctx.beginPath();
|
|
|
|
for (const sign of [-1, 1]) {
|
|
ctx.moveTo(0, peakHeights[0] ?? 0);
|
|
|
|
for (let x = 1; x < width; x += 1) {
|
|
const height = peakHeights[x]!;
|
|
const y = sign * height + halfHeight;
|
|
ctx.lineTo(x, y);
|
|
}
|
|
|
|
}
|
|
ctx.stroke();
|
|
|
|
// middle line
|
|
ctx.fillStyle = "#a1a998";
|
|
ctx.fillRect(0, Math.round(halfHeight), c.width, 1);
|
|
|
|
ctx.restore();
|
|
}, 0);
|
|
|
|
useResizeObserver(canvas, resizeObserver);
|
|
|
|
watchEffect(() => {
|
|
redraw(waveform.isDone.value, waveform.peaks.value);
|
|
}, { flush: 'sync' });
|
|
</script>
|
|
<template>
|
|
<canvas ref="canvas" class="tw:size-full">
|
|
</canvas>
|
|
</template>
|
|
<style scoped></style>
|