import { Switch } from "@headlessui/react"; import clsx from "clsx"; import React, { useEffect, useRef, useState } from "react"; import { BsPauseFill, BsPlayFill, BsScissors, BsTrash } from "react-icons/bs"; import { MdAllInclusive } from "react-icons/md"; import { BsFillFileEarmarkMusicFill } from "react-icons/bs"; import WaveSurfer from "wavesurfer.js"; // @ts-ignore import RegionsPlugin from 'wavesurfer.js/dist/plugin/wavesurfer.regions.min.js'; import { toast } from "react-toastify"; interface Props { audio: string; waveColor: string; progressColor: string; variant?: 'exercise' | 'edit'; setAudioUrl?: React.Dispatch>; } interface AudioCut { id: string; start: number; end: number; } const Waveform = ({ audio, waveColor, progressColor, variant = 'exercise', setAudioUrl }: Props) => { const containerRef = useRef(null); const previewContainerRef = useRef(null); const waveSurferRef = useRef(null); const previewWaveSurferRef = useRef(null); const audioContextRef = useRef(null); const [isPlaying, setIsPlaying] = useState(false); const [isPreviewPlaying, setIsPreviewPlaying] = useState(false); const [currentCut, setCurrentCut] = useState(null); const [duration, setDuration] = useState(0); const [isProcessing, setIsProcessing] = useState(false); const [cutAudioUrl, setCutAudioUrl] = useState(null); const [useFullAudio, setUseFullAudio] = useState(true); const cleanupPreview = () => { if (cutAudioUrl) { URL.revokeObjectURL(cutAudioUrl); setCutAudioUrl(null); } if (previewWaveSurferRef.current) { previewWaveSurferRef.current.destroy(); previewWaveSurferRef.current = null; } setIsPreviewPlaying(false); }; useEffect(() => { const waveSurfer = WaveSurfer.create({ container: containerRef?.current || "", responsive: true, cursorWidth: 0, height: variant === 'edit' ? 96 : 24, waveColor, progressColor, barGap: 5, barWidth: 8, barRadius: 4, fillParent: true, hideScrollbar: true, normalize: true, autoCenter: true, ignoreSilenceMode: true, barMinHeight: 4, plugins: variant === 'edit' ? [ RegionsPlugin.create({ dragSelection: true, slop: 5 }) ] : [] }); waveSurfer.load(audio); waveSurfer.on("ready", () => { waveSurferRef.current = waveSurfer; setDuration(waveSurfer.getDuration()); }); waveSurfer.on("finish", () => setIsPlaying(false)); if (variant === 'edit') { waveSurfer.on('region-created', (region) => { const regions = waveSurfer.regions.list; Object.keys(regions).forEach(id => { if (id !== region.id) { regions[id].remove(); } }); cleanupPreview(); const newCut: AudioCut = { id: region.id, start: region.start, end: region.end }; setCurrentCut(newCut); }); waveSurfer.on('region-updated', (region) => { const updatedCut: AudioCut = { id: region.id, start: region.start, end: region.end }; setCurrentCut(updatedCut); cleanupPreview(); }); } return () => { waveSurfer.destroy(); cleanupPreview(); if (audioContextRef.current?.state !== 'closed') { audioContextRef.current?.close(); } }; // eslint-disable-next-line react-hooks/exhaustive-deps }, [audio, progressColor, waveColor, variant]); useEffect(() => { if (cutAudioUrl && previewContainerRef.current) { const previewWaveSurfer = WaveSurfer.create({ container: previewContainerRef.current, responsive: true, cursorWidth: 0, height: 48, waveColor, progressColor, barGap: 5, barWidth: 8, barRadius: 4, fillParent: true, hideScrollbar: true, normalize: true, autoCenter: true, barMinHeight: 4, }); previewWaveSurfer.load(cutAudioUrl); previewWaveSurfer.on("finish", () => setIsPreviewPlaying(false)); previewWaveSurferRef.current = previewWaveSurfer; return () => { previewWaveSurfer.destroy(); previewWaveSurferRef.current = null; }; } }, [cutAudioUrl, waveColor, progressColor]); const handlePlayPause = () => { setIsPlaying(prev => !prev); waveSurferRef.current?.playPause(); }; const handlePreviewPlayPause = () => { setIsPreviewPlaying(prev => !prev); previewWaveSurferRef.current?.playPause(); }; const handleDeleteRegion = () => { if (currentCut && waveSurferRef.current?.regions?.list[currentCut.id]) { waveSurferRef.current.regions.list[currentCut.id].remove(); setCurrentCut(null); cleanupPreview(); } }; const applyCuts = async () => { if (!waveSurferRef.current || !currentCut) return; setIsProcessing(true); try { if (!audioContextRef.current) { audioContextRef.current = new AudioContext(); } const response = await fetch(audio); const arrayBuffer = await response.arrayBuffer(); const originalBuffer = await audioContextRef.current.decodeAudioData(arrayBuffer); const duration = currentCut.end - currentCut.start; const newBuffer = audioContextRef.current.createBuffer( originalBuffer.numberOfChannels, Math.ceil(audioContextRef.current.sampleRate * duration), audioContextRef.current.sampleRate ); for (let channel = 0; channel < originalBuffer.numberOfChannels; channel++) { const newChannelData = newBuffer.getChannelData(channel); const originalChannelData = originalBuffer.getChannelData(channel); const startSample = Math.floor(currentCut.start * audioContextRef.current.sampleRate); const endSample = Math.floor(currentCut.end * audioContextRef.current.sampleRate); const cutLength = endSample - startSample; for (let i = 0; i < cutLength; i++) { newChannelData[i] = originalChannelData[startSample + i]; } } const offlineContext = new OfflineAudioContext( newBuffer.numberOfChannels, newBuffer.length, newBuffer.sampleRate ); const source = offlineContext.createBufferSource(); source.buffer = newBuffer; source.connect(offlineContext.destination); source.start(); const renderedBuffer = await offlineContext.startRendering(); const wavBlob = await new Promise((resolve) => { const numberOfChannels = renderedBuffer.numberOfChannels; const length = renderedBuffer.length * numberOfChannels * 2; const buffer = new ArrayBuffer(44 + length); const view = new DataView(buffer); writeString(view, 0, 'RIFF'); view.setUint32(4, 36 + length, true); writeString(view, 8, 'WAVE'); writeString(view, 12, 'fmt '); view.setUint32(16, 16, true); view.setUint16(20, 1, true); view.setUint16(22, numberOfChannels, true); view.setUint32(24, renderedBuffer.sampleRate, true); view.setUint32(28, renderedBuffer.sampleRate * numberOfChannels * 2, true); view.setUint16(32, numberOfChannels * 2, true); view.setUint16(34, 16, true); writeString(view, 36, 'data'); view.setUint32(40, length, true); let offset = 44; for (let i = 0; i < renderedBuffer.length; i++) { for (let channel = 0; channel < numberOfChannels; channel++) { const sample = renderedBuffer.getChannelData(channel)[i]; view.setInt16(offset, sample < 0 ? sample * 0x8000 : sample * 0x7FFF, true); offset += 2; } } resolve(new Blob([buffer], { type: 'audio/wav' })); }); const newUrl = URL.createObjectURL(wavBlob); if (cutAudioUrl) { URL.revokeObjectURL(cutAudioUrl); } setCutAudioUrl(newUrl); setUseFullAudio(false); setAudioUrl?.(newUrl); } catch (error) { console.error('Error applying cuts:', error); } finally { setIsProcessing(false); } }; const formatTime = (time: number) => { const minutes = Math.floor(time / 60); const seconds = Math.floor(time % 60); return `${minutes}:${seconds.toString().padStart(2, '0')}`; }; const writeString = (view: DataView, offset: number, string: string) => { for (let i = 0; i < string.length; i++) { view.setUint8(offset + i, string.charCodeAt(i)); } }; const switchAudio = () => { if (!cutAudioUrl) { toast.info("Apply an audio cut first!"); } else { setUseFullAudio(!useFullAudio); setAudioUrl?.(useFullAudio ? audio : cutAudioUrl!) } } return (
{isPlaying ? ( ) : ( )} {variant === 'edit' && duration > 0 && (
Total Duration: {formatTime(duration)}
)}
{variant === 'edit' && (
switchAudio()} className={clsx( "relative inline-flex h-[30px] w-[58px] shrink-0 cursor-pointer rounded-full border-2 border-transparent transition-colors duration-200 ease-in-out focus:outline-none focus-visible:ring-2 focus-visible:ring-white/75", useFullAudio ? "bg-green-200" : "bg-blue-200" )} >
)}
{variant === 'edit' && currentCut && (

Selected Region

{formatTime(currentCut.start)} - {formatTime(currentCut.end)}
)} {cutAudioUrl && (

Cut Preview

{isPreviewPlaying ? ( ) : ( )}
)}
); }; export default Waveform;