396 lines
13 KiB
TypeScript
396 lines
13 KiB
TypeScript
import { Switch } from "@headlessui/react";
|
|
import clsx from "clsx";
|
|
import React, { useEffect, useRef, useState } from "react";
|
|
import { BsPauseFill, BsPlayFill, BsScissors, BsTrash } from "react-icons/bs";
|
|
import { MdAllInclusive } from "react-icons/md";
|
|
import { BsFillFileEarmarkMusicFill } from "react-icons/bs";
|
|
import WaveSurfer from "wavesurfer.js";
|
|
// @ts-ignore
|
|
import RegionsPlugin from 'wavesurfer.js/dist/plugin/wavesurfer.regions.min.js';
|
|
import { toast } from "react-toastify";
|
|
|
|
interface Props {
|
|
audio: string;
|
|
waveColor: string;
|
|
progressColor: string;
|
|
variant?: 'exercise' | 'edit';
|
|
setAudioUrl?: React.Dispatch<React.SetStateAction<string | undefined>>;
|
|
}
|
|
|
|
interface AudioCut {
|
|
id: string;
|
|
start: number;
|
|
end: number;
|
|
}
|
|
|
|
const Waveform = ({
|
|
audio,
|
|
waveColor,
|
|
progressColor,
|
|
variant = 'exercise',
|
|
setAudioUrl
|
|
}: Props) => {
|
|
const containerRef = useRef(null);
|
|
const previewContainerRef = useRef(null);
|
|
const waveSurferRef = useRef<WaveSurfer | null>(null);
|
|
const previewWaveSurferRef = useRef<WaveSurfer | null>(null);
|
|
const audioContextRef = useRef<AudioContext | null>(null);
|
|
const [isPlaying, setIsPlaying] = useState(false);
|
|
const [isPreviewPlaying, setIsPreviewPlaying] = useState(false);
|
|
const [currentCut, setCurrentCut] = useState<AudioCut | null>(null);
|
|
const [duration, setDuration] = useState<number>(0);
|
|
const [isProcessing, setIsProcessing] = useState(false);
|
|
const [cutAudioUrl, setCutAudioUrl] = useState<string | null>(null);
|
|
const [useFullAudio, setUseFullAudio] = useState(true);
|
|
|
|
const cleanupPreview = () => {
|
|
if (cutAudioUrl) {
|
|
URL.revokeObjectURL(cutAudioUrl);
|
|
setCutAudioUrl(null);
|
|
}
|
|
if (previewWaveSurferRef.current) {
|
|
previewWaveSurferRef.current.destroy();
|
|
previewWaveSurferRef.current = null;
|
|
}
|
|
setIsPreviewPlaying(false);
|
|
};
|
|
|
|
useEffect(() => {
|
|
const waveSurfer = WaveSurfer.create({
|
|
container: containerRef?.current || "",
|
|
responsive: true,
|
|
cursorWidth: 0,
|
|
height: variant === 'edit' ? 96 : 24,
|
|
waveColor,
|
|
progressColor,
|
|
barGap: 5,
|
|
barWidth: 8,
|
|
barRadius: 4,
|
|
fillParent: true,
|
|
hideScrollbar: true,
|
|
normalize: true,
|
|
autoCenter: true,
|
|
ignoreSilenceMode: true,
|
|
barMinHeight: 4,
|
|
plugins: variant === 'edit' ? [
|
|
RegionsPlugin.create({
|
|
dragSelection: true,
|
|
slop: 5
|
|
})
|
|
] : []
|
|
});
|
|
|
|
waveSurfer.load(audio);
|
|
|
|
waveSurfer.on("ready", () => {
|
|
waveSurferRef.current = waveSurfer;
|
|
setDuration(waveSurfer.getDuration());
|
|
});
|
|
|
|
waveSurfer.on("finish", () => setIsPlaying(false));
|
|
|
|
if (variant === 'edit') {
|
|
waveSurfer.on('region-created', (region) => {
|
|
const regions = waveSurfer.regions.list;
|
|
Object.keys(regions).forEach(id => {
|
|
if (id !== region.id) {
|
|
regions[id].remove();
|
|
}
|
|
});
|
|
cleanupPreview();
|
|
|
|
const newCut: AudioCut = {
|
|
id: region.id,
|
|
start: region.start,
|
|
end: region.end
|
|
};
|
|
setCurrentCut(newCut);
|
|
});
|
|
|
|
waveSurfer.on('region-updated', (region) => {
|
|
const updatedCut: AudioCut = {
|
|
id: region.id,
|
|
start: region.start,
|
|
end: region.end
|
|
};
|
|
setCurrentCut(updatedCut);
|
|
cleanupPreview();
|
|
});
|
|
}
|
|
|
|
return () => {
|
|
waveSurfer.destroy();
|
|
cleanupPreview();
|
|
if (audioContextRef.current?.state !== 'closed') {
|
|
audioContextRef.current?.close();
|
|
}
|
|
};
|
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
}, [audio, progressColor, waveColor, variant]);
|
|
|
|
useEffect(() => {
|
|
if (cutAudioUrl && previewContainerRef.current) {
|
|
const previewWaveSurfer = WaveSurfer.create({
|
|
container: previewContainerRef.current,
|
|
responsive: true,
|
|
cursorWidth: 0,
|
|
height: 48,
|
|
waveColor,
|
|
progressColor,
|
|
barGap: 5,
|
|
barWidth: 8,
|
|
barRadius: 4,
|
|
fillParent: true,
|
|
hideScrollbar: true,
|
|
normalize: true,
|
|
autoCenter: true,
|
|
barMinHeight: 4,
|
|
});
|
|
|
|
previewWaveSurfer.load(cutAudioUrl);
|
|
previewWaveSurfer.on("finish", () => setIsPreviewPlaying(false));
|
|
previewWaveSurferRef.current = previewWaveSurfer;
|
|
|
|
return () => {
|
|
previewWaveSurfer.destroy();
|
|
previewWaveSurferRef.current = null;
|
|
};
|
|
}
|
|
}, [cutAudioUrl, waveColor, progressColor]);
|
|
|
|
const handlePlayPause = () => {
|
|
setIsPlaying(prev => !prev);
|
|
waveSurferRef.current?.playPause();
|
|
};
|
|
|
|
const handlePreviewPlayPause = () => {
|
|
setIsPreviewPlaying(prev => !prev);
|
|
previewWaveSurferRef.current?.playPause();
|
|
};
|
|
|
|
const handleDeleteRegion = () => {
|
|
if (currentCut && waveSurferRef.current?.regions?.list[currentCut.id]) {
|
|
waveSurferRef.current.regions.list[currentCut.id].remove();
|
|
setCurrentCut(null);
|
|
cleanupPreview();
|
|
}
|
|
};
|
|
|
|
const applyCuts = async () => {
|
|
if (!waveSurferRef.current || !currentCut) return;
|
|
|
|
setIsProcessing(true);
|
|
try {
|
|
if (!audioContextRef.current) {
|
|
audioContextRef.current = new AudioContext();
|
|
}
|
|
|
|
const response = await fetch(audio);
|
|
const arrayBuffer = await response.arrayBuffer();
|
|
const originalBuffer = await audioContextRef.current.decodeAudioData(arrayBuffer);
|
|
|
|
const duration = currentCut.end - currentCut.start;
|
|
const newBuffer = audioContextRef.current.createBuffer(
|
|
originalBuffer.numberOfChannels,
|
|
Math.ceil(audioContextRef.current.sampleRate * duration),
|
|
audioContextRef.current.sampleRate
|
|
);
|
|
|
|
for (let channel = 0; channel < originalBuffer.numberOfChannels; channel++) {
|
|
const newChannelData = newBuffer.getChannelData(channel);
|
|
const originalChannelData = originalBuffer.getChannelData(channel);
|
|
|
|
const startSample = Math.floor(currentCut.start * audioContextRef.current.sampleRate);
|
|
const endSample = Math.floor(currentCut.end * audioContextRef.current.sampleRate);
|
|
const cutLength = endSample - startSample;
|
|
|
|
for (let i = 0; i < cutLength; i++) {
|
|
newChannelData[i] = originalChannelData[startSample + i];
|
|
}
|
|
}
|
|
|
|
const offlineContext = new OfflineAudioContext(
|
|
newBuffer.numberOfChannels,
|
|
newBuffer.length,
|
|
newBuffer.sampleRate
|
|
);
|
|
|
|
const source = offlineContext.createBufferSource();
|
|
source.buffer = newBuffer;
|
|
source.connect(offlineContext.destination);
|
|
source.start();
|
|
|
|
const renderedBuffer = await offlineContext.startRendering();
|
|
|
|
const wavBlob = await new Promise<Blob>((resolve) => {
|
|
const numberOfChannels = renderedBuffer.numberOfChannels;
|
|
const length = renderedBuffer.length * numberOfChannels * 2;
|
|
const buffer = new ArrayBuffer(44 + length);
|
|
const view = new DataView(buffer);
|
|
|
|
writeString(view, 0, 'RIFF');
|
|
view.setUint32(4, 36 + length, true);
|
|
writeString(view, 8, 'WAVE');
|
|
writeString(view, 12, 'fmt ');
|
|
view.setUint32(16, 16, true);
|
|
view.setUint16(20, 1, true);
|
|
view.setUint16(22, numberOfChannels, true);
|
|
view.setUint32(24, renderedBuffer.sampleRate, true);
|
|
view.setUint32(28, renderedBuffer.sampleRate * numberOfChannels * 2, true);
|
|
view.setUint16(32, numberOfChannels * 2, true);
|
|
view.setUint16(34, 16, true);
|
|
writeString(view, 36, 'data');
|
|
view.setUint32(40, length, true);
|
|
|
|
let offset = 44;
|
|
for (let i = 0; i < renderedBuffer.length; i++) {
|
|
for (let channel = 0; channel < numberOfChannels; channel++) {
|
|
const sample = renderedBuffer.getChannelData(channel)[i];
|
|
view.setInt16(offset, sample < 0 ? sample * 0x8000 : sample * 0x7FFF, true);
|
|
offset += 2;
|
|
}
|
|
}
|
|
|
|
resolve(new Blob([buffer], { type: 'audio/wav' }));
|
|
});
|
|
|
|
const newUrl = URL.createObjectURL(wavBlob);
|
|
|
|
if (cutAudioUrl) {
|
|
URL.revokeObjectURL(cutAudioUrl);
|
|
}
|
|
setCutAudioUrl(newUrl);
|
|
setUseFullAudio(false);
|
|
setAudioUrl?.(newUrl);
|
|
|
|
} catch (error) {
|
|
console.error('Error applying cuts:', error);
|
|
} finally {
|
|
setIsProcessing(false);
|
|
}
|
|
};
|
|
|
|
const formatTime = (time: number) => {
|
|
const minutes = Math.floor(time / 60);
|
|
const seconds = Math.floor(time % 60);
|
|
return `${minutes}:${seconds.toString().padStart(2, '0')}`;
|
|
};
|
|
|
|
const writeString = (view: DataView, offset: number, string: string) => {
|
|
for (let i = 0; i < string.length; i++) {
|
|
view.setUint8(offset + i, string.charCodeAt(i));
|
|
}
|
|
};
|
|
|
|
const switchAudio = () => {
|
|
if (!cutAudioUrl) {
|
|
toast.info("Apply an audio cut first!");
|
|
} else {
|
|
setUseFullAudio(!useFullAudio);
|
|
setAudioUrl?.(useFullAudio ? audio : cutAudioUrl!)
|
|
}
|
|
}
|
|
|
|
return (
|
|
<div className="space-y-4">
|
|
<div className="flex items-center justify-between">
|
|
<div className="flex items-center gap-4">
|
|
{isPlaying ? (
|
|
<BsPauseFill
|
|
className="text-mti-gray-cool cursor-pointer w-5 h-5"
|
|
onClick={handlePlayPause}
|
|
/>
|
|
) : (
|
|
<BsPlayFill
|
|
className="text-mti-gray-cool cursor-pointer w-5 h-5"
|
|
onClick={handlePlayPause}
|
|
/>
|
|
)}
|
|
|
|
{variant === 'edit' && duration > 0 && (
|
|
<div className="text-sm text-gray-500">
|
|
Total Duration: {formatTime(duration)}
|
|
</div>
|
|
)}
|
|
</div>
|
|
{variant === 'edit' && (
|
|
<div className={clsx(
|
|
"flex items-center gap-3 px-3 py-1.5 text-sm text-white rounded-md w-36 justify-center",
|
|
useFullAudio ? "bg-green-600" : "bg-blue-600"
|
|
)}>
|
|
<BsFillFileEarmarkMusicFill className="w-4 h-4" />
|
|
<Switch
|
|
checked={useFullAudio}
|
|
onChange={() => switchAudio()}
|
|
className={clsx(
|
|
"relative inline-flex h-[30px] w-[58px] shrink-0 cursor-pointer rounded-full border-2 border-transparent transition-colors duration-200 ease-in-out focus:outline-none focus-visible:ring-2 focus-visible:ring-white/75",
|
|
useFullAudio ? "bg-green-200" : "bg-blue-200"
|
|
)}
|
|
>
|
|
<span
|
|
aria-hidden="true"
|
|
className={clsx(
|
|
"pointer-events-none inline-block h-[26px] w-[26px] transform rounded-full bg-white shadow-lg ring-0 transition duration-200 ease-in-out",
|
|
useFullAudio ? 'translate-x-7' : 'translate-x-0'
|
|
)}
|
|
/>
|
|
</Switch>
|
|
<BsScissors className="w-4 h-4" />
|
|
</div>
|
|
)}
|
|
</div>
|
|
|
|
<div className="w-full max-w-4xl h-fit" ref={containerRef} />
|
|
|
|
{variant === 'edit' && currentCut && (
|
|
<div className="space-y-2">
|
|
<div className="flex items-center justify-between">
|
|
<h3 className="font-medium text-gray-700">Selected Region</h3>
|
|
<button
|
|
onClick={applyCuts}
|
|
disabled={isProcessing}
|
|
className="flex items-center gap-2 px-3 py-1.5 text-sm text-white bg-blue-600 hover:bg-blue-700 disabled:bg-blue-300 rounded-md"
|
|
>
|
|
<BsScissors className="w-4 h-4" />
|
|
{isProcessing ? 'Processing...' : 'Apply Cut'}
|
|
</button>
|
|
</div>
|
|
<div className="flex items-center justify-between p-2 bg-gray-50 rounded-md">
|
|
<div className="text-sm text-gray-600">
|
|
{formatTime(currentCut.start)} - {formatTime(currentCut.end)}
|
|
</div>
|
|
<button
|
|
onClick={handleDeleteRegion}
|
|
className="p-1 text-red-500 hover:bg-red-50 rounded"
|
|
>
|
|
<BsTrash className="w-4 h-4" />
|
|
</button>
|
|
</div>
|
|
</div>
|
|
)}
|
|
|
|
{cutAudioUrl && (
|
|
<div className="mt-8 space-y-4 border-t pt-4">
|
|
<div className="flex items-center gap-4">
|
|
<h3 className="font-medium text-gray-700">Cut Preview</h3>
|
|
{isPreviewPlaying ? (
|
|
<BsPauseFill
|
|
className="text-mti-gray-cool cursor-pointer w-5 h-5"
|
|
onClick={handlePreviewPlayPause}
|
|
/>
|
|
) : (
|
|
<BsPlayFill
|
|
className="text-mti-gray-cool cursor-pointer w-5 h-5"
|
|
onClick={handlePreviewPlayPause}
|
|
/>
|
|
)}
|
|
</div>
|
|
<div className="w-full max-w-4xl h-fit" ref={previewContainerRef} />
|
|
</div>
|
|
)}
|
|
</div>
|
|
);
|
|
};
|
|
|
|
export default Waveform;
|