import React, { useState, useEffect, useRef, ErrorInfo, ReactNode } from 'react';
import { motion, AnimatePresence } from 'motion/react';
import {
Music,
Upload,
FileAudio,
FileVideo,
Download,
History,
Play,
Pause,
Trash2,
LogOut,
LogIn,
Loader2,
CheckCircle2,
AlertCircle,
Music2,
Piano,
Settings2,
Copy,
Check,
Sun,
Moon,
Keyboard,
Save,
Key,
Languages,
Layers,
Eye,
EyeOff,
Gauge,
Mic2,
MicOff,
Volume2,
VolumeX,
FastForward,
RotateCcw,
Mic,
Square,
Share2,
HelpCircle,
ExternalLink,
Twitter,
Linkedin,
Facebook,
Scissors,
Edit,
Type
} from 'lucide-react';
import abcjs from 'abcjs';
import {
auth,
db,
googleProvider,
signInWithPopup,
signOut,
onAuthStateChanged,
collection,
addDoc,
query,
where,
onSnapshot,
serverTimestamp,
doc,
getDoc,
setDoc,
deleteDoc,
getDocFromServer,
OperationType,
handleFirestoreError,
User,
Timestamp
} from './lib/firebase';
import { transcribeMedia, TranscriptionResult, TranscriptionSettings } from './services/geminiService';
import { Button } from '@/components/ui/button';
import { Card, CardContent, CardDescription, CardFooter, CardHeader, CardTitle } from '@/components/ui/card';
import { Input } from '@/components/ui/input';
import { Label } from '@/components/ui/label';
import { Tabs, TabsContent, TabsList, TabsTrigger } from '@/components/ui/tabs';
import { ScrollArea } from '@/components/ui/scroll-area';
import { Badge } from '@/components/ui/badge';
import { Separator } from '@/components/ui/separator';
import { Toaster } from '@/components/ui/sonner';
import { toast } from 'sonner';
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip';
// Helper to convert AudioBuffer to WAV Blob
const audioBufferToWav = (buffer: AudioBuffer): Blob => {
const numOfChan = buffer.numberOfChannels;
const length = buffer.length * numOfChan * 2 + 44;
const outBuffer = new ArrayBuffer(length);
const view = new DataView(outBuffer);
const channels = [];
let i;
let sample;
let offset = 0;
let pos = 0;
// write WAVE header
setUint32(0x46464952); // "RIFF"
setUint32(length - 8); // file length - 8
setUint32(0x45564157); // "WAVE"
setUint32(0x20746d66); // "fmt " chunk
setUint32(16); // length = 16
setUint16(1); // PCM (uncompressed)
setUint16(numOfChan);
setUint32(buffer.sampleRate);
setUint32(buffer.sampleRate * 2 * numOfChan); // avg. bytes/sec
setUint16(numOfChan * 2); // block-align
setUint16(16); // 16-bit (hardcoded)
setUint32(0x61746164); // "data" - chunk
setUint32(length - pos - 4); // chunk length
// write interleaved data
for (i = 0; i < buffer.numberOfChannels; i++) {
channels.push(buffer.getChannelData(i));
}
while (pos < length) {
for (i = 0; i < numOfChan; i++) {
// interleave channels
sample = Math.max(-1, Math.min(1, channels[i][offset])); // clamp
sample = (sample < 0 ? sample * 0x8000 : sample * 0x7FFF) | 0; // scale to 16-bit signed int
view.setInt16(pos, sample, true); // write 16-bit sample
pos += 2;
}
offset++; // next source sample
}
return new Blob([outBuffer], { type: 'audio/wav' });
function setUint16(data: number) {
view.setUint16(pos, data, true);
pos += 2;
}
function setUint32(data: number) {
view.setUint32(pos, data, true);
pos += 4;
}
};
// Helper to convert File to Base64
const fileToBase64 = (file: File | Blob): Promise
=> {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = () => {
const base64String = reader.result as string;
resolve(base64String.split(',')[1]);
};
reader.onerror = (error) => reject(error);
});
};
class ErrorBoundary extends React.Component {
state = { hasError: false, error: null };
static getDerivedStateFromError(error: Error) {
return { hasError: true, error };
}
componentDidCatch(error: Error, errorInfo: ErrorInfo) {
console.error("ErrorBoundary caught an error", error, errorInfo);
}
render() {
const state = (this as any).state;
const props = (this as any).props;
if (state.hasError) {
let displayMessage = "Something went wrong.";
try {
const parsed = JSON.parse(state.error?.message || "{}");
if (parsed.error) {
displayMessage = `Firestore Error: ${parsed.error} during ${parsed.operationType} on ${parsed.path}`;
}
} catch (e) {
displayMessage = state.error?.message || displayMessage;
}
return (
Application Error
{displayMessage}
window.location.reload()}
className="w-full bg-[#141414] text-[#E4E3E0] rounded-none"
>
Reload Application
);
}
return props.children;
}
}
export default function AppWithErrorBoundary() {
return (
);
}
function App() {
const [user, setUser] = useState(null);
const [loading, setLoading] = useState(true);
const [isProcessing, setIsProcessing] = useState(false);
const [file, setFile] = useState(null);
const [result, setResult] = useState(null);
const [history, setHistory] = useState([]);
const [copied, setCopied] = useState(false);
const [isPlaying, setIsPlaying] = useState(false);
const [isDragging, setIsDragging] = useState(false);
const [theme, setTheme] = useState<'light' | 'dark'>('light');
const [showSettings, setShowSettings] = useState(false);
const [userApiKey, setUserApiKey] = useState(localStorage.getItem('gemini_api_key') || '');
const [searchTerm, setSearchTerm] = useState('');
const [playbackProgress, setPlaybackProgress] = useState(0);
const [showLyrics, setShowLyrics] = useState(true);
const [showChords, setShowChords] = useState(true);
const [playbackSpeed, setPlaybackSpeed] = useState(1);
const [midiInstrument, setMidiInstrument] = useState(0);
const [transpose, setTranspose] = useState(0);
const [instrumentVolume, setInstrumentVolume] = useState(0.8);
const [playbackHistory, setPlaybackHistory] = useState([]);
const [historyIndex, setHistoryIndex] = useState(-1);
const [isKaraokeMode, setIsKaraokeMode] = useState(false);
const [currentLyricIndex, setCurrentLyricIndex] = useState(-1);
const [currentLyricProgress, setCurrentLyricProgress] = useState(0);
const [isDemoMode, setIsDemoMode] = useState(false);
const [isRecording, setIsRecording] = useState(false);
const [recordedBlob, setRecordedBlob] = useState(null);
const [showHowTo, setShowHowTo] = useState(false);
const [vocalRemoverEnabled, setVocalRemoverEnabled] = useState(false);
const [isEditingLyrics, setIsEditingLyrics] = useState(false);
const [editedLyrics, setEditedLyrics] = useState('');
const [lyricLookAhead, setLyricLookAhead] = useState(0.5); // seconds
const [processedAudioUrl, setProcessedAudioUrl] = useState(null);
const [isPerformanceAudioMode, setIsPerformanceAudioMode] = useState(false);
const [voiceMemos, setVoiceMemos] = useState([]);
const [isRecordingMemo, setIsRecordingMemo] = useState(false);
const [showMemoRecorder, setShowMemoRecorder] = useState(false);
const performanceAudioRef = useRef(null);
const memoRecorderRef = useRef(null);
const memoChunksRef = useRef([]);
const [transcriptionSettings, setTranscriptionSettings] = useState(() => {
const saved = localStorage.getItem('transcription_settings');
return saved ? JSON.parse(saved) : { sensitivity: 7, accuracy: 'balanced' };
});
const [metronomeEnabled, setMetronomeEnabled] = useState(false);
const [metronomeSettings, setMetronomeSettings] = useState(() => {
const saved = localStorage.getItem('metronome_settings');
return saved ? JSON.parse(saved) : { volume: 0.5, timeSignature: '4/4', sound: 'digital' };
});
const notationRef = useRef(null);
const visualizerRef = useRef(null);
const synthControlRef = useRef(null);
const playbackTimerRef = useRef(null);
const mediaRecorderRef = useRef(null);
const audioChunksRef = useRef([]);
// Persist Settings
useEffect(() => {
localStorage.setItem('transcription_settings', JSON.stringify(transcriptionSettings));
}, [transcriptionSettings]);
useEffect(() => {
localStorage.setItem('metronome_settings', JSON.stringify(metronomeSettings));
}, [metronomeSettings]);
// Playback History Management
const addToHistory = (speed: number, instrument: number, trans: number) => {
const newState = { speed, instrument, transpose: trans };
const newHistory = playbackHistory.slice(0, historyIndex + 1);
newHistory.push(newState);
if (newHistory.length > 20) newHistory.shift();
setPlaybackHistory(newHistory);
setHistoryIndex(newHistory.length - 1);
};
const startMemoRecording = async () => {
try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
const recorder = new MediaRecorder(stream);
memoRecorderRef.current = recorder;
memoChunksRef.current = [];
recorder.ondataavailable = (e) => {
if (e.data.size > 0) {
memoChunksRef.current.push(e.data);
}
};
recorder.onstop = async () => {
const blob = new Blob(memoChunksRef.current, { type: 'audio/webm' });
const base64 = await fileToBase64(blob);
// Check size (1MB limit for Firestore)
if (base64.length > 1048576) {
toast.error('Recording is too long for cloud sync (max ~1 min). Please download it instead.');
const url = URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = `idea-${new Date().getTime()}.webm`;
a.click();
return;
}
if (user) {
const memosPath = 'voice_memos';
try {
await addDoc(collection(db, memosPath), {
userId: user.uid,
title: `Musical Idea ${new Date().toLocaleTimeString()}`,
audioData: base64,
duration: 0, // Could calculate if needed
createdAt: serverTimestamp()
});
toast.success('Musical idea captured and synced!');
} catch (error) {
handleFirestoreError(error, OperationType.CREATE, memosPath);
}
}
};
recorder.start();
setIsRecordingMemo(true);
toast.info('Recording started... Capture your melody!');
} catch (err) {
console.error('Failed to start recording', err);
toast.error('Microphone access denied or not available.');
}
};
const stopMemoRecording = () => {
if (memoRecorderRef.current && isRecordingMemo) {
memoRecorderRef.current.stop();
setIsRecordingMemo(false);
memoRecorderRef.current.stream.getTracks().forEach(track => track.stop());
}
};
const deleteVoiceMemo = async (id: string) => {
if (!user) return;
const path = `voice_memos/${id}`;
try {
await deleteDoc(doc(db, 'voice_memos', id));
toast.success('Memo deleted');
} catch (error) {
handleFirestoreError(error, OperationType.DELETE, path);
}
};
const undoPlayback = () => {
if (historyIndex > 0) {
const prev = playbackHistory[historyIndex - 1];
setPlaybackSpeed(prev.speed);
setMidiInstrument(prev.instrument);
setTranspose(prev.transpose);
setHistoryIndex(historyIndex - 1);
stopPlayback();
}
};
const redoPlayback = () => {
if (historyIndex < playbackHistory.length - 1) {
const next = playbackHistory[historyIndex + 1];
setPlaybackSpeed(next.speed);
setMidiInstrument(next.instrument);
setTranspose(next.transpose);
setHistoryIndex(historyIndex + 1);
stopPlayback();
}
};
// Visualizer Animation
useEffect(() => {
if (!isPlaying || !visualizerRef.current) return;
const canvas = visualizerRef.current;
const ctx = canvas.getContext('2d');
if (!ctx) return;
let animationId: number;
const bars = 32;
const barWidth = canvas.width / bars;
const heights = new Array(bars).fill(0);
const render = () => {
ctx.clearRect(0, 0, canvas.width, canvas.height);
ctx.fillStyle = theme === 'light' ? '#141414' : '#E4E3E0';
for (let i = 0; i < bars; i++) {
// Target height based on some pseudo-randomness or beat
const target = Math.random() * canvas.height * 0.8;
heights[i] += (target - heights[i]) * 0.2;
ctx.fillRect(i * barWidth + 1, canvas.height - heights[i], barWidth - 2, heights[i]);
}
animationId = requestAnimationFrame(render);
};
render();
return () => cancelAnimationFrame(animationId);
}, [isPlaying, theme]);
useEffect(() => {
const handleKeyDown = (e: KeyboardEvent) => {
if (e.ctrlKey || e.metaKey) {
if (e.key === 'p') {
e.preventDefault();
togglePlayback();
}
if (e.key === 'u') {
e.preventDefault();
document.getElementById('file-upload')?.click();
}
if (e.key === 's') {
e.preventDefault();
setShowSettings(prev => !prev);
}
}
};
window.addEventListener('keydown', handleKeyDown);
return () => window.removeEventListener('keydown', handleKeyDown);
}, [isPlaying, result]);
// Theme Sync
useEffect(() => {
const savedTheme = localStorage.getItem('theme') as 'light' | 'dark';
if (savedTheme) {
setTheme(savedTheme);
document.documentElement.classList.toggle('dark', savedTheme === 'dark');
}
}, []);
const toggleTheme = () => {
const newTheme = theme === 'light' ? 'dark' : 'light';
setTheme(newTheme);
localStorage.setItem('theme', newTheme);
document.documentElement.classList.toggle('dark', newTheme === 'dark');
};
// Recording Logic
const startRecording = async () => {
try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
const recorder = new MediaRecorder(stream);
mediaRecorderRef.current = recorder;
audioChunksRef.current = [];
recorder.ondataavailable = (e) => {
if (e.data.size > 0) {
audioChunksRef.current.push(e.data);
}
};
recorder.onstop = () => {
const blob = new Blob(audioChunksRef.current, { type: 'audio/webm' });
setRecordedBlob(blob);
toast.success('Recording complete! You can now download your performance.');
};
recorder.start();
setIsRecording(true);
toast.info('Recording started...');
} catch (err) {
console.error('Error accessing microphone:', err);
toast.error('Microphone access denied or not available.');
}
};
const stopRecording = () => {
if (mediaRecorderRef.current && isRecording) {
mediaRecorderRef.current.stop();
mediaRecorderRef.current.stream.getTracks().forEach(track => track.stop());
setIsRecording(false);
}
};
const downloadRecording = () => {
if (!recordedBlob) return;
const url = URL.createObjectURL(recordedBlob);
const a = document.createElement('a');
a.href = url;
a.download = `performance-${Date.now()}.webm`;
a.click();
URL.revokeObjectURL(url);
};
const handleShare = async () => {
if (!result) return;
try {
// In a real app, we'd save this to a public 'shares' collection
// For now, we'll simulate sharing by copying the current app URL + a mock ID
const shareUrl = `${window.location.origin}/share/${Math.random().toString(36).substr(2, 9)}`;
await navigator.clipboard.writeText(shareUrl);
toast.success('Share link copied to clipboard!');
} catch (err) {
toast.error('Failed to copy share link.');
}
};
const saveApiKey = (key: string) => {
setUserApiKey(key);
localStorage.setItem('gemini_api_key', key);
toast.success('API Key saved locally');
};
useEffect(() => {
const testConnection = async () => {
try {
await getDocFromServer(doc(db, 'test', 'connection'));
} catch (error) {
if(error instanceof Error && error.message.includes('the client is offline')) {
console.error("Please check your Firebase configuration. ");
}
}
};
testConnection();
let unsubTranscriptions: (() => void) | null = null;
let unsubMemos: (() => void) | null = null;
const unsubscribe = onAuthStateChanged(auth, (user) => {
setUser(user);
setLoading(false);
// Cleanup previous listeners if any
if (unsubTranscriptions) unsubTranscriptions();
if (unsubMemos) unsubMemos();
if (user) {
// Sync user to Firestore
const userPath = `users/${user.uid}`;
const userRef = doc(db, 'users', user.uid);
getDoc(userRef).then(userDoc => {
const userData: any = {
uid: user.uid,
email: user.email,
displayName: user.displayName,
photoURL: user.photoURL,
lastLoginAt: serverTimestamp()
};
if (!userDoc.exists()) {
userData.createdAt = serverTimestamp();
}
setDoc(userRef, userData, { merge: true })
.catch(err => handleFirestoreError(err, OperationType.WRITE, userPath));
}).catch(err => handleFirestoreError(err, OperationType.GET, userPath));
// Load history
const transcriptionsPath = 'transcriptions';
const q = query(collection(db, transcriptionsPath), where('userId', '==', user.uid));
unsubTranscriptions = onSnapshot(q, (snapshot) => {
const items = snapshot.docs.map(doc => ({ id: doc.id, ...doc.data() }));
setHistory(items.sort((a: any, b: any) => b.createdAt?.seconds - a.createdAt?.seconds));
}, (error) => {
handleFirestoreError(error, OperationType.LIST, transcriptionsPath);
});
// Load voice memos
const memosPath = 'voice_memos';
const qMemos = query(collection(db, memosPath), where('userId', '==', user.uid));
unsubMemos = onSnapshot(qMemos, (snapshot) => {
const items = snapshot.docs.map(doc => ({ id: doc.id, ...doc.data() }));
setVoiceMemos(items.sort((a: any, b: any) => b.createdAt?.seconds - a.createdAt?.seconds));
}, (error) => {
handleFirestoreError(error, OperationType.LIST, memosPath);
});
} else {
setHistory([]);
setVoiceMemos([]);
}
});
return () => {
unsubscribe();
if (unsubTranscriptions) unsubTranscriptions();
if (unsubMemos) unsubMemos();
};
}, []);
useEffect(() => {
if (result && notationRef.current) {
let abc = result.abcNotation;
// Apply instrument selection more robustly to all voices
if (midiInstrument !== 0) {
const lines = abc.split('\n');
// Insert at start and after every voice definition
let newLines = [];
let kFound = false;
for (let line of lines) {
newLines.push(line);
if (line.startsWith('K:') && !kFound) {
newLines.push(`%%MIDI program ${midiInstrument}`);
kFound = true;
} else if (line.startsWith('V:')) {
newLines.push(`%%MIDI program ${midiInstrument}`);
}
}
abc = newLines.join('\n');
}
// Apply transpose to all voices
if (transpose !== 0) {
const lines = abc.split('\n');
let newLines = [];
let kFound = false;
for (let line of lines) {
newLines.push(line);
if (line.startsWith('K:') && !kFound) {
newLines.push(`%%MIDI transpose ${transpose}`);
kFound = true;
} else if (line.startsWith('V:')) {
newLines.push(`%%MIDI transpose ${transpose}`);
}
}
abc = newLines.join('\n');
}
// Apply speed change to ABC tempo
if (playbackSpeed !== 1 && result.tempo) {
const newTempo = Math.round(result.tempo * playbackSpeed);
abc = abc.replace(/Q:.*\n/g, `Q:1/4=${newTempo}\n`);
}
if (!showChords) {
abc = abc.replace(/"[^"]*"/g, '');
}
if (!showLyrics) {
abc = abc.replace(/\nw:.*\n/g, '\n');
}
abcjs.renderAbc(notationRef.current, abc, {
responsive: 'resize',
paddingbottom: 30,
paddingtop: 30,
paddingleft: 30,
paddingright: 30,
add_classes: true
});
}
}, [result, showChords, showLyrics, playbackSpeed, midiInstrument]);
const handleLogin = async () => {
try {
await signInWithPopup(auth, googleProvider);
toast.success('Successfully signed in');
} catch (error) {
console.error(error);
toast.error('Failed to sign in');
}
};
const handleLogout = async () => {
try {
stopPlayback();
await signOut(auth);
setResult(null);
setFile(null);
toast.success('Signed out');
} catch (error) {
console.error(error);
}
};
const stopPlayback = () => {
if (performanceAudioRef.current) {
performanceAudioRef.current.pause();
performanceAudioRef.current = null;
}
if (synthControlRef.current) {
synthControlRef.current.stop();
if (synthControlRef.current.timingCallbacks) {
synthControlRef.current.timingCallbacks.stop();
}
setIsPlaying(false);
setPlaybackProgress(0);
setCurrentLyricIndex(-1);
if (playbackTimerRef.current) clearInterval(playbackTimerRef.current);
}
};
const togglePlayback = async () => {
if (!result) return;
if (isPlaying) {
stopPlayback();
return;
}
try {
const audioCtx = new (window.AudioContext || (window as any).webkitAudioContext)();
// Performance Audio Mode (Original audio minus vocals)
if (isPerformanceAudioMode && processedAudioUrl) {
const audio = new Audio(processedAudioUrl);
audio.playbackRate = playbackSpeed;
audio.volume = instrumentVolume;
performanceAudioRef.current = audio;
audio.onended = () => {
setIsPlaying(false);
setPlaybackProgress(0);
setCurrentLyricIndex(-1);
};
await audio.play();
setIsPlaying(true);
const updatePerformanceProgress = () => {
if (!audio.paused && performanceAudioRef.current === audio) {
const currentTime = audio.currentTime;
const duration = audio.duration || 1;
setPlaybackProgress((currentTime / duration) * 100);
if (result.lyrics && result.lyrics.length > 0) {
const lookAheadTime = currentTime + lyricLookAhead;
const index = result.lyrics.findIndex((l, i) => {
const next = result.lyrics![i + 1];
return lookAheadTime >= l.startTime && (!next || lookAheadTime < next.startTime);
});
if (index !== -1) {
setCurrentLyricIndex(index);
const currentLine = result.lyrics[index];
const nextLine = result.lyrics[index + 1];
const lineDuration = nextLine ? nextLine.startTime - currentLine.startTime : 5;
const elapsedInLine = Math.max(0, currentTime - currentLine.startTime);
setCurrentLyricProgress(Math.min((elapsedInLine / lineDuration) * 100, 100));
}
}
requestAnimationFrame(updatePerformanceProgress);
}
};
requestAnimationFrame(updatePerformanceProgress);
return;
}
// Standard MIDI Playback Mode
const midiBuffer = new abcjs.synth.CreateSynth();
let abc = result.abcNotation;
// Apply instrument selection more robustly to all voices
if (midiInstrument !== 0) {
const lines = abc.split('\n');
let newLines = [];
let kFound = false;
for (let line of lines) {
newLines.push(line);
if (line.startsWith('K:') && !kFound) {
newLines.push(`%%MIDI program ${midiInstrument}`);
kFound = true;
} else if (line.startsWith('V:')) {
newLines.push(`%%MIDI program ${midiInstrument}`);
}
}
abc = newLines.join('\n');
}
// Apply transpose to all voices
if (transpose !== 0) {
const lines = abc.split('\n');
let newLines = [];
let kFound = false;
for (let line of lines) {
newLines.push(line);
if (line.startsWith('K:') && !kFound) {
newLines.push(`%%MIDI transpose ${transpose}`);
kFound = true;
} else if (line.startsWith('V:')) {
newLines.push(`%%MIDI transpose ${transpose}`);
}
}
abc = newLines.join('\n');
}
if (playbackSpeed !== 1 && result.tempo) {
const newTempo = Math.round(result.tempo * playbackSpeed);
abc = abc.replace(/Q:.*\n/g, `Q:1/4=${newTempo}\n`);
}
const visualObj = (abcjs.renderAbc(document.createElement('div'), abc) as any)[0];
await midiBuffer.init({
visualObj,
options: {
onEnded: () => {
setIsPlaying(false);
setPlaybackProgress(0);
if (playbackTimerRef.current) clearInterval(playbackTimerRef.current);
}
}
});
await midiBuffer.prime();
// Set volume
if ((midiBuffer as any).setVolume) {
(midiBuffer as any).setVolume(instrumentVolume);
}
midiBuffer.start();
synthControlRef.current = midiBuffer;
setIsPlaying(true);
// Perfect Metronome Sync and Progress Tracking using abcjs timing callbacks
const beatsPerMeasure = parseInt(metronomeSettings.timeSignature.split('/')[0]) || 4;
const playClick = (isStrong: boolean) => {
if (!metronomeEnabled) return;
const osc = audioCtx.createOscillator();
const gain = audioCtx.createGain();
switch (metronomeSettings.sound) {
case 'woodblock':
osc.type = 'triangle';
osc.frequency.setValueAtTime(isStrong ? 1200 : 900, audioCtx.currentTime);
break;
case 'perc':
osc.type = 'square';
osc.frequency.setValueAtTime(isStrong ? 200 : 150, audioCtx.currentTime);
break;
default: // digital
osc.type = 'sine';
osc.frequency.setValueAtTime(isStrong ? 1000 : 800, audioCtx.currentTime);
}
gain.gain.setValueAtTime(metronomeSettings.volume, audioCtx.currentTime);
gain.gain.exponentialRampToValueAtTime(0.001, audioCtx.currentTime + 0.1);
osc.connect(gain);
gain.connect(audioCtx.destination);
osc.start();
osc.stop(audioCtx.currentTime + 0.1);
};
const timingCallbacks = new (abcjs as any).TimingCallbacks(visualObj, {
eventCallback: (event: any) => {
if (!event) return;
// Update progress
const totalTime = visualObj.getTotalTime();
const progress = (event.milliseconds / (totalTime * 1000)) * 100;
setPlaybackProgress(Math.min(progress, 100));
// Update lyrics
if (result.lyrics && result.lyrics.length > 0) {
const currentTime = event.milliseconds / 1000;
const lookAheadTime = currentTime + lyricLookAhead;
const index = result.lyrics.findIndex((l, i) => {
const next = result.lyrics![i + 1];
return lookAheadTime >= l.startTime && (!next || lookAheadTime < next.startTime);
});
if (index !== -1) {
setCurrentLyricIndex(index);
// Calculate progress within current line for word highlighting
const currentLine = result.lyrics[index];
const nextLine = result.lyrics[index + 1];
// Estimate duration: until next line or 5s max
const lineDuration = nextLine ? nextLine.startTime - currentLine.startTime : 5;
const elapsedInLine = Math.max(0, currentTime - currentLine.startTime);
setCurrentLyricProgress(Math.min((elapsedInLine / lineDuration) * 100, 100));
}
}
},
beatCallback: (beatNumber: number) => {
playClick(beatNumber % beatsPerMeasure === 0);
}
});
timingCallbacks.start();
// Store timingCallbacks to stop it later
(midiBuffer as any).timingCallbacks = timingCallbacks;
} catch (error) {
console.error(error);
toast.error('Audio playback failed');
}
};
const handleDeleteHistory = async (e: React.MouseEvent, id: string) => {
e.stopPropagation();
const path = `transcriptions/${id}`;
try {
await deleteDoc(doc(db, 'transcriptions', id));
toast.success('Transcription deleted');
if (result && (result as any).id === id) {
setResult(null);
}
} catch (error) {
handleFirestoreError(error, OperationType.DELETE, path);
}
};
const handleDragOver = (e: React.DragEvent) => {
e.preventDefault();
setIsDragging(true);
};
const handleDragLeave = () => {
setIsDragging(false);
};
const handleDrop = (e: React.DragEvent) => {
e.preventDefault();
setIsDragging(false);
if (e.dataTransfer.files && e.dataTransfer.files[0]) {
const droppedFile = e.dataTransfer.files[0];
if (droppedFile.type.startsWith('audio/') || droppedFile.type.startsWith('video/')) {
setFile(droppedFile);
setResult(null);
setProcessedAudioUrl(null);
setIsPerformanceAudioMode(false);
} else {
toast.error('Please upload an audio or video file');
}
}
};
const loadDemoScore = () => {
const demo: TranscriptionResult = {
abcNotation: `X:1
T:Ode to Joy (Duet Performance)
M:4/4
L:1/4
Q:1/4=120
K:C
V:1 name="Soprano" snm="S."
V:2 name="Alto" snm="A."
[V:1] "C"E E F G | "G"G F E D | "C"C C D E | "G"E>D D2 |
[V:2] "C"C C D E | "G"E D C B, | "C"A, A, B, C | "G"C>B, B,2 |
[V:1] "C"E E F G | "G"G F E D | "C"C C D E | "G"D>C C2 |
[V:2] "C"C C D E | "G"E D C B, | "C"A, A, B, C | "G"B,>A, A,2 |
[V:1] "G"D D E C | "G"D E/F/ E C | "G"D E/F/ E D | "C"C D G,2 |
[V:2] "G"B, B, C A, | "G"B, C/D/ C A, | "G"B, C/D/ C B, | "C"A, B, E,2 |
[V:1] "C"E E F G | "G"G F E D | "C"C C D E | "G"D>C C2 |]
[V:2] "C"C C D E | "G"E D C B, | "C"A, A, B, C | "G"B,>A, A,2 |]`,
instruments: ["Piano", "Violin", "Voice"],
tempo: 120,
key: "C",
chords: ["C", "G", "Am", "F"],
lyrics: [
{ text: "Voice 1: Joyful, joyful, we adore Thee", startTime: 0, voice: 1 },
{ text: "Voice 2: God of glory, Lord of love", startTime: 4, voice: 2 },
{ text: "Voice 1: Hearts unfold like flowers before Thee", startTime: 8, voice: 1 },
{ text: "Voice 2: Opening to the sun above", startTime: 12, voice: 2 },
{ text: "Voice 1: Melt the clouds of sin and sadness", startTime: 16, voice: 1 },
{ text: "Voice 2: Drive the dark of doubt away", startTime: 20, voice: 2 },
{ text: "Voice 1: Giver of immortal gladness", startTime: 24, voice: 1 },
{ text: "Voice 2: Fill us with the light of day!", startTime: 28, voice: 2 }
]
};
setResult(demo);
setIsDemoMode(true);
setCurrentLyricIndex(-1);
setPlaybackProgress(0);
toast.success("Extended Duet Performance loaded! Try the dedicated Performance button.");
};
const handleFileChange = (e: React.ChangeEvent) => {
if (e.target.files && e.target.files[0]) {
const selectedFile = e.target.files[0];
const MAX_SIZE = 20 * 1024 * 1024; // 20MB
if (selectedFile.size > MAX_SIZE) {
toast.error('File too large. Max size is 20MB.');
e.target.value = ''; // Reset input
return;
}
setFile(selectedFile);
setResult(null);
toast.info(`File selected: ${selectedFile.name}`);
}
};
const removeVocalsFromBuffer = async (file: File): Promise => {
const audioCtx = new (window.AudioContext || (window as any).webkitAudioContext)();
const arrayBuffer = await file.arrayBuffer();
const audioBuffer = await audioCtx.decodeAudioData(arrayBuffer);
if (audioBuffer.numberOfChannels < 2) {
throw new Error('Vocal removal works best on stereo files.');
}
const offlineCtx = new OfflineAudioContext(
audioBuffer.numberOfChannels,
audioBuffer.length,
audioBuffer.sampleRate
);
const source = offlineCtx.createBufferSource();
source.buffer = audioBuffer;
const splitter = offlineCtx.createChannelSplitter(2);
const merger = offlineCtx.createChannelMerger(2);
const invertGain = offlineCtx.createGain();
invertGain.gain.value = -1;
const highPass = offlineCtx.createBiquadFilter();
highPass.type = 'highpass';
highPass.frequency.value = 150;
source.connect(splitter);
splitter.connect(merger, 0, 0);
splitter.connect(merger, 0, 1);
splitter.connect(invertGain, 1);
invertGain.connect(highPass);
highPass.connect(merger, 0, 0);
highPass.connect(merger, 0, 1);
merger.connect(offlineCtx.destination);
source.start();
const renderedBuffer = await offlineCtx.startRendering();
return audioBufferToWav(renderedBuffer);
};
const handlePerformanceVocalRemoval = async () => {
if (!file || !user) return;
setIsProcessing(true);
setIsPerformanceAudioMode(true);
try {
toast.info('Performance Suite: Isolating backing track...');
const processedBlob = await removeVocalsFromBuffer(file);
const url = URL.createObjectURL(processedBlob);
setProcessedAudioUrl(url);
toast.info('Analyzing for lyrics and timing...');
const base64 = await fileToBase64(processedBlob);
const transcription = await transcribeMedia(base64, 'audio/wav', userApiKey, transcriptionSettings);
setResult(transcription);
toast.success('Performance track ready! Vocals removed.');
// Save to Firestore with mode
const transcriptionsPath = 'transcriptions';
try {
await addDoc(collection(db, transcriptionsPath), {
userId: user.uid,
fileName: file.name,
fileType: file.type,
abcNotation: transcription.abcNotation,
instruments: transcription.instruments,
tempo: transcription.tempo,
key: transcription.key,
chords: transcription.chords,
lyrics: transcription.lyrics,
mode: 'performance-suite',
createdAt: serverTimestamp()
});
} catch (error) {
handleFirestoreError(error, OperationType.CREATE, transcriptionsPath);
}
} catch (error: any) {
console.error(error);
toast.error(error.message || 'Performance processing failed');
setIsPerformanceAudioMode(false);
} finally {
setIsProcessing(false);
}
};
const handleProcess = async () => {
if (!file || !user) return;
// Improved Input Validation
const MAX_SIZE = 20 * 1024 * 1024; // 20MB
if (file.size > MAX_SIZE) {
toast.error('File too large. Max size is 20MB.');
return;
}
setIsProcessing(true);
setIsPerformanceAudioMode(false);
try {
let processingFile: File | Blob = file;
// Vocal Removal Logic
if (vocalRemoverEnabled) {
toast.info('AI Vocal Remover: Isolating instruments...');
try {
processingFile = await removeVocalsFromBuffer(file);
toast.success('Vocals suppressed! Sending to AI...');
} catch (err: any) {
console.error('Vocal removal failed:', err);
toast.error(err.message || 'Vocal removal failed. Using original file.');
}
}
const base64 = await fileToBase64(processingFile);
const transcription = await transcribeMedia(base64, processingFile.type, userApiKey, transcriptionSettings);
setResult(transcription);
// Save to Firestore
const transcriptionsPath = 'transcriptions';
try {
await addDoc(collection(db, transcriptionsPath), {
userId: user.uid,
fileName: file.name,
fileType: file.type,
abcNotation: transcription.abcNotation,
instruments: transcription.instruments,
tempo: transcription.tempo,
key: transcription.key,
chords: transcription.chords,
lyrics: transcription.lyrics,
mode: 'music-lab',
createdAt: serverTimestamp()
});
} catch (error) {
handleFirestoreError(error, OperationType.CREATE, transcriptionsPath);
}
toast.success('Transcription complete!');
} catch (error: any) {
console.error(error);
const msg = error.message || 'Failed to process file';
toast.error(msg);
} finally {
setIsProcessing(false);
}
};
const copyToClipboard = () => {
if (!result) return;
navigator.clipboard.writeText(result.abcNotation);
setCopied(true);
toast.success('ABC notation copied to clipboard');
setTimeout(() => setCopied(false), 2000);
};
const downloadABC = () => {
if (!result) return;
const blob = new Blob([result.abcNotation], { type: 'text/plain' });
const url = URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = `${file?.name || 'score'}.abc`;
a.click();
URL.revokeObjectURL(url);
};
const downloadMIDI = async () => {
if (!result) return;
try {
// abcjs synth can generate MIDI data
const midi = (abcjs.synth as any).getMidiFile(result.abcNotation);
const blob = new Blob([midi], { type: 'audio/midi' });
const url = URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = `${file?.name || 'score'}.mid`;
a.click();
URL.revokeObjectURL(url);
toast.success('MIDI file generated');
} catch (error) {
console.error(error);
toast.error('Failed to generate MIDI');
}
};
const filteredHistory = history.filter(item =>
item.fileName?.toLowerCase().includes(searchTerm.toLowerCase()) ||
item.key?.toLowerCase().includes(searchTerm.toLowerCase()) ||
item.mode?.toLowerCase().includes(searchTerm.toLowerCase())
);
if (loading) {
return (
);
}
return (
{/* Header */}
ScoreScribe AI
The Ultimate Music Lab & Performance Suite
}>
setShowMemoRecorder(true)}
className="rounded-none text-[10px] font-mono uppercase tracking-widest flex items-center gap-2 text-purple-600"
>
Quick Idea
Cloud Sync Storage
Recordings are stored securely in your private Firebase account. You can download them anytime from the Archive.
setShowHowTo(true)}
className="rounded-none text-[10px] font-mono uppercase tracking-widest flex items-center gap-2"
>
How-To
{theme === 'light' ? : }
setShowSettings(true)} className="rounded-none">
{user ? (
{user.displayName}
{user.email}
Sign Out
) : (
Sign In
)}
{/* Settings Modal */}
{showSettings && (
setShowSettings(false)}
>
e.stopPropagation()}
>
Settings
setShowSettings(false)}>✕
Transcription Engine
Accuracy Preference
{['fast', 'balanced', 'high'].map((acc) => (
setTranscriptionSettings(prev => ({ ...prev, accuracy: acc as any }))}
className={`rounded-none text-[10px] font-mono uppercase h-8 ${transcriptionSettings.accuracy === acc ? 'bg-[#141414] text-[#E4E3E0] dark:bg-[#E4E3E0] dark:text-[#141414]' : ''}`}
>
{acc}
))}
Performance Settings
Lyric Look-ahead
{lyricLookAhead}s
setLyricLookAhead(parseFloat(e.target.value))}
className="w-full accent-[#141414] dark:accent-[#E4E3E0]"
/>
How early the next lyric appears before its note.
Keyboard Shortcuts
Play/Pause: Ctrl+P
Upload: Ctrl+U
Settings: Ctrl+S
)}
{/* Left Column: Controls & History */}
Input Source
{file ? (
<>
{file.type.startsWith('video') ?
: }
{file.name}
{(file.size / (1024 * 1024)).toFixed(2)} MB
) : (
<>
Drop audio or video file here
)}
AI Vocal Remover
How it works:
Uses center-channel phase cancellation to suppress lead vocals. Best for stereo studio recordings.
Note: For professional source separation (like Moises/Lalal), dedicated AI models are required. This is a high-performance client-side approximation.
Phase cancellation + Bass preservation
setVocalRemoverEnabled(!vocalRemoverEnabled)}
className={`w-10 h-5 rounded-full relative transition-colors ${vocalRemoverEnabled ? 'bg-purple-600' : 'bg-gray-300'}`}
>
{!user && (
Please sign in to transcribe files.
)}
{isProcessing && !isPerformanceAudioMode ? (
<>
Analyzing...
) : (
<>
Transcribe to Score
)}
}>
{isProcessing && isPerformanceAudioMode ? (
<>
Isolating...
) : (
<>
Performance Suite - Vocal Remover
)}
Performance Suite
Removes vocals from your original file and uses the processed audio as the backing track for Karaoke. This provides a high-quality studio experience compared to MIDI synthesis.
— or —
Explore with Demo Score
Archive
Scores
Ideas
setSearchTerm(e.target.value)}
className="h-8 text-[10px] font-mono rounded-none border-[#141414]/20 dark:border-[#E4E3E0]/20 bg-transparent"
/>
{filteredHistory.length === 0 ? (
{searchTerm ? 'No matches found' : 'No history found'}
) : (
filteredHistory.map((item) => (
{
stopPlayback();
setResult(item);
setIsPerformanceAudioMode(item.mode === 'performance-suite');
if (item.mode !== 'performance-suite') {
setProcessedAudioUrl(null);
}
}}
className="p-4 border-b border-[#141414] dark:border-[#E4E3E0] hover:bg-[#141414] dark:hover:bg-[#E4E3E0] hover:text-[#E4E3E0] dark:hover:text-[#141414] cursor-pointer transition-colors group relative"
>
{item.fileName}
{item.createdAt?.toDate().toLocaleDateString()}
{item.key}
{item.tempo} BPM
{item.mode && (
{item.mode === 'performance-suite' ? 'Performance' : 'Music Lab'}
)}
handleDeleteHistory(e, item.id)}
className="absolute right-4 bottom-4 opacity-0 group-hover:opacity-100 transition-opacity p-1 hover:bg-red-500 hover:text-white"
>
))
)}
{voiceMemos.length === 0 ? (
No musical ideas captured yet.
) : (
voiceMemos.map((memo) => (
{memo.title}
{memo.createdAt?.toDate().toLocaleDateString()}
{
const a = document.createElement('a');
a.href = `data:audio/webm;base64,${memo.audioData}`;
a.download = `${memo.title}.webm`;
a.click();
}}
className="p-1 hover:bg-purple-600 hover:text-white transition-colors"
title="Download"
>
deleteVoiceMemo(memo.id)}
className="p-1 hover:bg-red-500 hover:text-white transition-colors"
title="Delete"
>
))
)}
{/* Right Column: Results */}
Musical Score
Notation Base: ABC 2.1
We use ABC Notation, a text-based standard for music. It allows our AI to generate complex scores efficiently.
Why so rigid?
AI transcription often quantizes notes to standard durations. We've updated our engine to capture more "soul" using ties, triplets, and articulations, but complex human timing (rubato) is still a frontier!
Rendered ABC Notation
{result && (
{isPlaying ? : }
{isPlaying ? 'Stop' : 'Play'}
{copied ? : }
Copy
ABC
MIDI
Share
)}
{result && (
Speed
{[0.5, 0.75, 1, 1.25, 1.5].map(s => (
{
stopPlayback();
setPlaybackSpeed(s);
addToHistory(s, midiInstrument, transpose);
}}
className={`px-2 py-1 text-[10px] font-mono border-r last:border-r-0 border-[#141414] dark:border-[#E4E3E0] transition-colors ${
playbackSpeed === s ? 'bg-[#141414] text-[#E4E3E0] dark:bg-[#E4E3E0] dark:text-[#141414]' : 'hover:bg-[#141414]/10 dark:hover:bg-[#E4E3E0]/10'
}`}
>
{s}x
))}
Transpose
{
const val = transpose - 1;
setTranspose(val);
addToHistory(playbackSpeed, midiInstrument, val);
stopPlayback();
}}
className="px-2 py-1 text-[10px] font-mono border-r border-[#141414] dark:border-[#E4E3E0] hover:bg-[#141414]/10 dark:hover:bg-[#E4E3E0]/10"
>
-1
{transpose > 0 ? `+${transpose}` : transpose}
{
const val = transpose + 1;
setTranspose(val);
addToHistory(playbackSpeed, midiInstrument, val);
stopPlayback();
}}
className="px-2 py-1 text-[10px] font-mono border-l border-[#141414] dark:border-[#E4E3E0] hover:bg-[#141414]/10 dark:hover:bg-[#E4E3E0]/10"
>
+1
= playbackHistory.length - 1}
onClick={redoPlayback}
className="h-7 w-7 p-0 rounded-none border border-[#141414] dark:border-[#E4E3E0] rotate-180"
>
Record
{!isRecording ? (
Start
) : (
Stop
)}
{recordedBlob && (
Save
)}
setShowLyrics(!showLyrics)}
className={`h-7 text-[10px] font-mono rounded-none border-[#141414] dark:border-[#E4E3E0] ${showLyrics ? 'bg-[#141414] text-[#E4E3E0] dark:bg-[#E4E3E0] dark:text-[#141414]' : ''}`}
>
{showLyrics ? : }
Lyrics
{
if (result?.lyrics) {
setEditedLyrics(result.lyrics.map(l => l.text).join('\n'));
setIsEditingLyrics(true);
}
}}
className="h-7 text-[10px] font-mono rounded-none border-[#141414] dark:border-[#E4E3E0] hover:bg-purple-50 hover:text-purple-600"
>
Edit
setShowChords(!showChords)}
className={`h-7 text-[10px] font-mono rounded-none border-[#141414] dark:border-[#E4E3E0] ${showChords ? 'bg-[#141414] text-[#E4E3E0] dark:bg-[#E4E3E0] dark:text-[#141414]' : ''}`}
>
{showChords ? : }
Chords
setIsKaraokeMode(!isKaraokeMode)}
className={`h-7 text-[10px] font-mono rounded-none border-[#141414] dark:border-[#E4E3E0] ${isKaraokeMode ? 'bg-purple-600 text-white border-purple-600' : 'border-purple-600/50 text-purple-600 hover:bg-purple-50'}`}
>
{isKaraokeMode ? 'Exit Karaoke' : 'Enter Karaoke Mode'}
)}
{!result && !isProcessing && (
No score generated yet
Upload a file to begin the transcription process
— or try it now —
Load Interactive Duet Demo
)}
{isProcessing && (
AI Analysis in Progress
Decomposing frequencies, identifying instruments, and mapping notation...
)}
{result && (
{isKaraokeMode ? (
{isPerformanceAudioMode ? 'Performance Suite Active' : 'Karaoke Mode'}
{isPerformanceAudioMode && (
Backing Track: Original Audio
)}
Voice 1
Voice 2
{
stopPlayback();
setCurrentLyricIndex(-1);
setPlaybackProgress(0);
toast.info('Performance reset');
}} className="h-5 px-2 text-[8px] font-mono uppercase border border-[#E4E3E0]/20 text-[#E4E3E0] hover:bg-white/10 rounded-none ml-2" > Reset
{file?.name || 'Current Session'}
Key: {result.key}
Tempo: {Math.round(result.tempo * playbackSpeed)} BPM
{!isPlaying && (
Start Performance
)}
{isPlaying && result.lyrics && result.lyrics.length > 0 ? (
{result.lyrics[currentLyricIndex]?.text || '...'}
{result.lyrics[currentLyricIndex]?.text || '...'}
{result.lyrics[currentLyricIndex + 1]?.text || ''}
) : isPlaying && (
No lyrics detected for this track
)}
{showChords && result.chords && (
{result.chords.map((chord, i) => (
{chord}
))}
)}
{isPlaying && (
)}
) : (
<>
Mode
{isPerformanceAudioMode ? 'Performance Active' : 'Practice Active'}
{isPerformanceAudioMode ? 'Performance Mode' : 'Practice Mode'}
{isPerformanceAudioMode
? 'You are using the original audio (minus vocals) as your backing track. MIDI instrument overrides are disabled.'
: 'You are currently in the Music Lab. Use the controls above to change instruments, adjust tempo, or transpose keys for practice.'}
Tempo
{Math.round(result.tempo * playbackSpeed)} BPM
Instruments
{result.instruments.map((inst, i) => (
{inst}
Detected Instrument
Our AI identified "{inst}" in the source recording. In Practice Mode, you can override this with the MIDI selector above.
))}
{showChords && (
Chords
{result.chords?.map((chord, i) => (
{chord}
))}
)}
{showLyrics && result.lyrics && result.lyrics.length > 0 && (
Lyrics Highlighter
{result.lyrics.map((line, i) => (
{line.text}
{currentLyricIndex === i && (
{line.text}
)}
))}
)}
{isPlaying && (
<>
)}
)}
)}
Note: AI-generated scores may require manual refinement. MIDI output is generated based on ABC notation.
{/* Footer */}
{/* Quick Idea Recorder Modal */}
{showMemoRecorder && (
!isRecordingMemo && setShowMemoRecorder(false)}
>
e.stopPropagation()}
>
Capture Musical Idea
{!isRecordingMemo && setShowMemoRecorder(false)}>✕ }
{isRecordingMemo ? (
) : (
)}
{isRecordingMemo ? 'Recording in progress...' : 'Ready to record'}
{isRecordingMemo ? 'Capture your melody, riff, or hum' : 'Press the button below to start'}
{isRecordingMemo ? (
<>
Stop & Sync
) : (
<>
Start Recording
)}
{isRecordingMemo && (
Max 1 minute for cloud sync
)}
Ideas are synced to your private archive and can be accessed from the "Archive" tab.
)}
{/* Lyric Editor Modal */}
{isEditingLyrics && (
setIsEditingLyrics(false)}
>
e.stopPropagation()}
>
Edit Lyrics
setIsEditingLyrics(false)}>✕
One line per entry. Timings will be preserved.