'use client';
import * as fal from '@fal-ai/serverless-client';
import { useCallback, useMemo, useState } from 'react';
fal.config({
// credentials: 'FAL_KEY_ID:FAL_KEY_SECRET',
requestMiddleware: fal.withProxy({
targetUrl: '/api/fal/proxy',
}),
});
type ErrorProps = {
error: any;
};
function Error(props: ErrorProps) {
if (!props.error) {
return null;
}
return (
Error {props.error.message}
);
}
type RecorderOptions = {
maxDuration?: number;
};
function useMediaRecorder({ maxDuration = 10000 }: RecorderOptions = {}) {
const [isRecording, setIsRecording] = useState(false);
const [mediaRecorder, setMediaRecorder] = useState(
null
);
const record = useCallback(async () => {
setIsRecording(true);
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
const audioChunks: BlobPart[] = [];
const recorder = new MediaRecorder(stream);
setMediaRecorder(recorder);
return new Promise((resolve, reject) => {
try {
recorder.addEventListener('dataavailable', (event) => {
audioChunks.push(event.data);
});
recorder.addEventListener('stop', async () => {
const fileOptions = { type: 'audio/wav' };
const audioBlob = new Blob(audioChunks, fileOptions);
const audioFile = new File(
[audioBlob],
`recording_${Date.now()}.wav`,
fileOptions
);
setIsRecording(false);
resolve(audioFile);
});
setTimeout(() => {
recorder.stop();
recorder.stream.getTracks().forEach((track) => track.stop());
}, maxDuration);
recorder.start();
} catch (error) {
reject(error);
}
});
}, [maxDuration]);
const stopRecording = useCallback(() => {
setIsRecording(false);
mediaRecorder?.stop();
mediaRecorder?.stream.getTracks().forEach((track) => track.stop());
}, [mediaRecorder]);
return { record, stopRecording, isRecording };
}
export default function WhisperDemo() {
const [loading, setLoading] = useState(false);
const [error, setError] = useState(null);
const [logs, setLogs] = useState([]);
const [audioFile, setAudioFile] = useState(null);
const [result, setResult] = useState(null); // eslint-disable-line @typescript-eslint/no-explicit-any
const [elapsedTime, setElapsedTime] = useState(0);
const { record, stopRecording, isRecording } = useMediaRecorder();
const reset = () => {
setLoading(false);
setError(null);
setLogs([]);
setElapsedTime(0);
setResult(null);
};
const audioFileLocalUrl = useMemo(() => {
if (!audioFile) {
return null;
}
return URL.createObjectURL(audioFile);
}, [audioFile]);
const transcribeAudio = async (audioFile: File) => {
reset();
setLoading(true);
const start = Date.now();
try {
const result = await fal.subscribe('110602490-whisper', {
input: {
file_name: 'recording.wav',
audio_url: audioFile,
},
pollInterval: 1000,
logs: true,
onQueueUpdate(update) {
setElapsedTime(Date.now() - start);
if (
update.status === 'IN_PROGRESS' ||
update.status === 'COMPLETED'
) {
setLogs((update.logs || []).map((log) => log.message));
}
},
});
setResult(result);
} catch (error: any) {
setError(error);
} finally {
setLoading(false);
setElapsedTime(Date.now() - start);
}
};
return (
Hello fal and{' '}
whisper
{audioFileLocalUrl && (
)}
JSON Result
{`Elapsed Time (seconds): ${(elapsedTime / 1000).toFixed(2)}`}
{result
? JSON.stringify(result, null, 2)
: '// result pending...'}
Logs
{logs.filter(Boolean).join('\n')}
);
}