'use client'; import * as fal from '@fal-ai/serverless-client'; import { useState } from 'react'; fal.config({ proxyUrl: '/api/fal/proxy', }); type LlavaInput = { prompt: string; image_url: string; max_new_tokens?: number; temperature?: number; top_p?: number; }; type LlavaOutput = { output: string; partial: boolean; stats: { num_input_tokens: number; num_output_tokens: number; }; }; export default function StreamingDemo() { const [answer, setAnswer] = useState(''); const [streamStatus, setStreamStatus] = useState('idle'); const runInference = async () => { const stream = await fal.stream( 'fal-ai/llavav15-13b', { input: { prompt: 'Do you know who drew this picture and what is the name of it?', image_url: 'https://llava-vl.github.io/static/images/monalisa.jpg', max_new_tokens: 100, temperature: 0.2, top_p: 1, }, } ); setStreamStatus('running'); for await (const partial of stream) { setAnswer(partial.output); } const result = await stream.done(); setStreamStatus('done'); setAnswer(result.output); }; return (

Hello fal +{' '} streaming

Answer

streaming: {streamStatus}

{answer}

); }