From 92e554a527cb4d5dfcdbe7dac339d318ad79502f Mon Sep 17 00:00:00 2001 From: badayvedat Date: Tue, 23 Apr 2024 05:19:46 +0300 Subject: [PATCH] feat: add comfy server examples --- .../app/comfy/image_to_image/page.tsx | 205 ++++++++++++++++++ .../app/comfy/image_to_image/workflow.tsx | 102 +++++++++ .../app/comfy/image_to_video/page.tsx | 183 ++++++++++++++++ .../app/comfy/image_to_video/workflow.tsx | 91 ++++++++ .../demo-nextjs-app-router/app/comfy/page.tsx | 40 ++++ .../app/comfy/text_to_image/page.tsx | 173 +++++++++++++++ .../app/comfy/text_to_image/workflow.tsx | 103 +++++++++ .../pages/new_file.tsx | 86 ++++++++ .../pages/workflow.tsx | 91 ++++++++ 9 files changed, 1074 insertions(+) create mode 100644 apps/demo-nextjs-app-router/app/comfy/image_to_image/page.tsx create mode 100644 apps/demo-nextjs-app-router/app/comfy/image_to_image/workflow.tsx create mode 100644 apps/demo-nextjs-app-router/app/comfy/image_to_video/page.tsx create mode 100644 apps/demo-nextjs-app-router/app/comfy/image_to_video/workflow.tsx create mode 100644 apps/demo-nextjs-app-router/app/comfy/page.tsx create mode 100644 apps/demo-nextjs-app-router/app/comfy/text_to_image/page.tsx create mode 100644 apps/demo-nextjs-app-router/app/comfy/text_to_image/workflow.tsx create mode 100644 apps/demo-nextjs-page-router/pages/new_file.tsx create mode 100644 apps/demo-nextjs-page-router/pages/workflow.tsx diff --git a/apps/demo-nextjs-app-router/app/comfy/image_to_image/page.tsx b/apps/demo-nextjs-app-router/app/comfy/image_to_image/page.tsx new file mode 100644 index 0000000..cadc6b6 --- /dev/null +++ b/apps/demo-nextjs-app-router/app/comfy/image_to_image/page.tsx @@ -0,0 +1,205 @@ +'use client'; + +import * as fal from '@fal-ai/serverless-client'; +import { useMemo, useState } from 'react'; +import getWorkflow from './workflow'; + +// @snippet:start(client.config) +fal.config({ + proxyUrl: '/api/fal/proxy', // the built-int nextjs proxy + // proxyUrl: 'http://localhost:3333/api/fal/proxy', // or your own external proxy +}); +// @snippet:end + +// @snippet:start(client.result.type) +type Image = { + filename: string; + subfolder: string; + type: string; + url: string; +}; + +type Result = { + url: string; + outputs: Record[]; + images: Image[]; +}; +// @snippet:end + +type ErrorProps = { + error: any; +}; + +function Error(props: ErrorProps) { + if (!props.error) { + return null; + } + return ( +
+ Error {props.error.message} +
+ ); +} + +const DEFAULT_PROMPT = + 'photograph of victorian woman with wings, sky clouds, meadow grass'; + +export function Index() { + // @snippet:start("client.ui.state") + // Input state + const [prompt, setPrompt] = useState(DEFAULT_PROMPT); + const [imageFile, setImageFile] = useState(null); + // Result state + const [loading, setLoading] = useState(false); + const [error, setError] = useState(null); + const [result, setResult] = useState(null); + const [logs, setLogs] = useState([]); + const [elapsedTime, setElapsedTime] = useState(0); + // @snippet:end + const video = useMemo(() => { + if (!result) { + return null; + } + return result; + }, [result]); + + const reset = () => { + setLoading(false); + setError(null); + setResult(null); + setLogs([]); + setElapsedTime(0); + }; + + const getImageURL = (result: Result) => { + return result.outputs[9].images[0]; + }; + + const generateVideo = async () => { + reset(); + // @snippet:start("client.queue.subscribe") + setLoading(true); + const start = Date.now(); + try { + const result: Result = await fal.subscribe('fal-ai/comfy-server', { + input: getWorkflow({ + prompt: prompt, + loadimage_1: imageFile, + }), + pollInterval: 3000, // Default is 1000 (every 1s) + logs: true, + onQueueUpdate(update) { + setElapsedTime(Date.now() - start); + if ( + update.status === 'IN_PROGRESS' || + update.status === 'COMPLETED' + ) { + setLogs((update.logs || []).map((log) => log.message)); + } + }, + }); + setResult(getImageURL(result)); + } catch (error: any) { + setError(error); + } finally { + setLoading(false); + setElapsedTime(Date.now() - start); + } + // @snippet:end + }; + return ( +
+
+

+ Comfy SD1.5 - Image to Image +

+
+ +
+
+ {imageFile && ( + + )} +
+ + setImageFile(e.target.files?.[0] ?? null)} + /> +
+
+
+ + setPrompt(e.target.value)} + onBlur={(e) => setPrompt(e.target.value.trim())} + /> +
+ + + + + +
+
+ {video && ( + // eslint-disable-next-line @next/next/no-img-element + + )} +
+
+

JSON Result

+

+ {`Elapsed Time (seconds): ${(elapsedTime / 1000).toFixed(2)}`} +

+
+              {result
+                ? JSON.stringify(result, null, 2)
+                : '// result pending...'}
+            
+
+ +
+

Logs

+
+              {logs.filter(Boolean).join('\n')}
+            
+
+
+
+
+ ); +} + +export default Index; diff --git a/apps/demo-nextjs-app-router/app/comfy/image_to_image/workflow.tsx b/apps/demo-nextjs-app-router/app/comfy/image_to_image/workflow.tsx new file mode 100644 index 0000000..2e098f0 --- /dev/null +++ b/apps/demo-nextjs-app-router/app/comfy/image_to_image/workflow.tsx @@ -0,0 +1,102 @@ +// This workflow is generated with ComfyUI-fal +const WORKFLOW = { + prompt: { + '3': { + inputs: { + seed: 280823642470253, + steps: 20, + cfg: 8, + sampler_name: 'dpmpp_2m', + scheduler: 'normal', + denoise: 0.8700000000000001, + model: ['14', 0], + positive: ['6', 0], + negative: ['7', 0], + latent_image: ['12', 0], + }, + class_type: 'KSampler', + }, + '6': { + inputs: { + text: ['15', 0], + clip: ['14', 1], + }, + class_type: 'CLIPTextEncode', + }, + '7': { + inputs: { + text: 'watermark, text\n', + clip: ['14', 1], + }, + class_type: 'CLIPTextEncode', + }, + '8': { + inputs: { + samples: ['3', 0], + vae: ['14', 2], + }, + class_type: 'VAEDecode', + }, + '9': { + inputs: { + filename_prefix: 'ComfyUI', + images: ['8', 0], + }, + class_type: 'SaveImage', + }, + '10': { + inputs: { + image: 'example.png', + upload: 'image', + }, + class_type: 'LoadImage', + }, + '12': { + inputs: { + pixels: ['10', 0], + vae: ['14', 2], + }, + class_type: 'VAEEncode', + }, + '14': { + inputs: { + ckpt_name: 'v1-5-pruned-emaonly.ckpt', + }, + class_type: 'CheckpointLoaderSimple', + }, + '15': { + inputs: { + name: 'prompt', + value: + 'photograph of victorian woman with wings, sky clouds, meadow grass\n', + }, + class_type: 'StringInput_fal', + }, + }, + extra_data: {}, + fal_inputs_dev_info: { + loadimage_1: { + key: ['10', 'inputs', 'image'], + class_type: 'LoadImage', + }, + prompt: { + key: ['15', 'inputs', 'value'], + class_type: 'StringInput_fal', + }, + }, + fal_inputs: { + loadimage_1: 'example_url', + prompt: + 'photograph of victorian woman with wings, sky clouds, meadow grass\n', + }, +}; + +export default function getWorkflow(object: any) { + let newWorkflow = JSON.parse(JSON.stringify(WORKFLOW)); + newWorkflow.fal_inputs = { + ...newWorkflow.fal_inputs, + ...object, + }; + + return newWorkflow; +} diff --git a/apps/demo-nextjs-app-router/app/comfy/image_to_video/page.tsx b/apps/demo-nextjs-app-router/app/comfy/image_to_video/page.tsx new file mode 100644 index 0000000..98239a1 --- /dev/null +++ b/apps/demo-nextjs-app-router/app/comfy/image_to_video/page.tsx @@ -0,0 +1,183 @@ +'use client'; + +import * as fal from '@fal-ai/serverless-client'; +import { useMemo, useState } from 'react'; +import getWorkflow from './workflow'; + +// @snippet:start(client.config) +fal.config({ + proxyUrl: '/api/fal/proxy', // the built-int nextjs proxy + // proxyUrl: 'http://localhost:3333/api/fal/proxy', // or your own external proxy +}); +// @snippet:end + +// @snippet:start(client.result.type) +type Image = { + filename: string; + subfolder: string; + type: string; + url: string; +}; + +type Result = { + url: string; + outputs: Record[]; + images: Image[]; +}; +// @snippet:end + +type ErrorProps = { + error: any; +}; + +function Error(props: ErrorProps) { + if (!props.error) { + return null; + } + return ( +
+ Error {props.error.message} +
+ ); +} + +export function Index() { + // @snippet:start("client.ui.state") + // Input state + const [imageFile, setImageFile] = useState(null); + // Result state + const [loading, setLoading] = useState(false); + const [error, setError] = useState(null); + const [result, setResult] = useState(null); + const [logs, setLogs] = useState([]); + const [elapsedTime, setElapsedTime] = useState(0); + // @snippet:end + const video = useMemo(() => { + if (!result) { + return null; + } + return result; + }, [result]); + + const reset = () => { + setLoading(false); + setError(null); + setResult(null); + setLogs([]); + setElapsedTime(0); + }; + + const getImageURL = (result: Result) => { + return result.outputs[10].images[0]; + }; + + const generateVideo = async () => { + reset(); + // @snippet:start("client.queue.subscribe") + setLoading(true); + const start = Date.now(); + try { + const result: Result = await fal.subscribe('fal-ai/comfy-server', { + input: getWorkflow({ + loadimage_1: imageFile, + }), + pollInterval: 3000, // Default is 1000 (every 1s) + logs: true, + onQueueUpdate(update) { + setElapsedTime(Date.now() - start); + if ( + update.status === 'IN_PROGRESS' || + update.status === 'COMPLETED' + ) { + setLogs((update.logs || []).map((log) => log.message)); + } + }, + }); + setResult(getImageURL(result)); + } catch (error: any) { + setError(error); + } finally { + setLoading(false); + setElapsedTime(Date.now() - start); + } + // @snippet:end + }; + return ( +
+
+

Comfy SVD - Image to Video

+
+ +
+
+ {imageFile && ( + + )} +
+ + setImageFile(e.target.files?.[0] ?? null)} + /> +
+
+ + + + + +
+
+ {video && ( + // eslint-disable-next-line @next/next/no-img-element + + )} +
+
+

JSON Result

+

+ {`Elapsed Time (seconds): ${(elapsedTime / 1000).toFixed(2)}`} +

+
+              {result
+                ? JSON.stringify(result, null, 2)
+                : '// result pending...'}
+            
+
+ +
+

Logs

+
+              {logs.filter(Boolean).join('\n')}
+            
+
+
+
+
+ ); +} + +export default Index; diff --git a/apps/demo-nextjs-app-router/app/comfy/image_to_video/workflow.tsx b/apps/demo-nextjs-app-router/app/comfy/image_to_video/workflow.tsx new file mode 100644 index 0000000..d4ea952 --- /dev/null +++ b/apps/demo-nextjs-app-router/app/comfy/image_to_video/workflow.tsx @@ -0,0 +1,91 @@ +const WORKFLOW = { + prompt: { + '3': { + inputs: { + seed: 351912937281939, + steps: 20, + cfg: 2.5, + sampler_name: 'euler', + scheduler: 'karras', + denoise: 1, + model: ['14', 0], + positive: ['12', 0], + negative: ['12', 1], + latent_image: ['12', 2], + }, + class_type: 'KSampler', + }, + '8': { + inputs: { + samples: ['3', 0], + vae: ['15', 2], + }, + class_type: 'VAEDecode', + }, + '10': { + inputs: { + filename_prefix: 'ComfyUI', + fps: 10, + lossless: false, + quality: 85, + method: 'default', + images: ['8', 0], + }, + class_type: 'SaveAnimatedWEBP', + }, + '12': { + inputs: { + width: 1024, + height: 576, + video_frames: 14, + motion_bucket_id: 127, + fps: 6, + augmentation_level: 0, + clip_vision: ['15', 1], + init_image: ['23', 0], + vae: ['15', 2], + }, + class_type: 'SVD_img2vid_Conditioning', + }, + '14': { + inputs: { + min_cfg: 1, + model: ['15', 0], + }, + class_type: 'VideoLinearCFGGuidance', + }, + '15': { + inputs: { + ckpt_name: 'svd.safetensors', + }, + class_type: 'ImageOnlyCheckpointLoader', + }, + '23': { + inputs: { + image: '18.png', + upload: 'image', + }, + class_type: 'LoadImage', + }, + }, + extra_data: {}, + fal_inputs_dev_info: { + loadimage_1: { + key: ['23', 'inputs', 'image'], + class_type: 'LoadImage', + }, + }, + fal_inputs: { + loadimage_1: 'example_url', + }, +}; + +export default function getWorkflow(object: any) { + let newWorkflow = JSON.parse(JSON.stringify(WORKFLOW)); + newWorkflow.fal_inputs = { + ...newWorkflow.fal_inputs, + ...object, + }; + + return newWorkflow; +} diff --git a/apps/demo-nextjs-app-router/app/comfy/page.tsx b/apps/demo-nextjs-app-router/app/comfy/page.tsx new file mode 100644 index 0000000..70e6d93 --- /dev/null +++ b/apps/demo-nextjs-app-router/app/comfy/page.tsx @@ -0,0 +1,40 @@ +'use client'; + +import { useRouter } from 'next/navigation'; + +export default function Index() { + const router = useRouter(); // Use correct router + return ( +
+
+

+ Serverless Comfy Workflow Examples powered by{' '} + fal +

+

+ Learn how to use our fal-js to execute Comfy workflows. +

+
+ + + +
+
+
+ ); +} diff --git a/apps/demo-nextjs-app-router/app/comfy/text_to_image/page.tsx b/apps/demo-nextjs-app-router/app/comfy/text_to_image/page.tsx new file mode 100644 index 0000000..31d070e --- /dev/null +++ b/apps/demo-nextjs-app-router/app/comfy/text_to_image/page.tsx @@ -0,0 +1,173 @@ +'use client'; + +import * as fal from '@fal-ai/serverless-client'; +import { useMemo, useState } from 'react'; +import getWorkflow from './workflow'; + +// @snippet:start(client.config) +fal.config({ + proxyUrl: '/api/fal/proxy', // the built-int nextjs proxy + // proxyUrl: 'http://localhost:3333/api/fal/proxy', // or your own external proxy +}); +// @snippet:end + +// @snippet:start(client.result.type) +type Image = { + filename: string; + subfolder: string; + type: string; + url: string; +}; + +type Result = { + url: string; + outputs: Record[]; + images: Image[]; +}; +// @snippet:end + +type ErrorProps = { + error: any; +}; + +function Error(props: ErrorProps) { + if (!props.error) { + return null; + } + return ( +
+ Error {props.error.message} +
+ ); +} + +const DEFAULT_PROMPT = + 'a city landscape of a cyberpunk metropolis, raining, purple, pink and teal neon lights, highly detailed, uhd'; + +export function Index() { + // @snippet:start("client.ui.state") + // Input state + const [prompt, setPrompt] = useState(DEFAULT_PROMPT); + // Result state + const [loading, setLoading] = useState(false); + const [error, setError] = useState(null); + const [result, setResult] = useState(null); + const [logs, setLogs] = useState([]); + const [elapsedTime, setElapsedTime] = useState(0); + // @snippet:end + const image = useMemo(() => { + if (!result) { + return null; + } + return result; + }, [result]); + + const reset = () => { + setLoading(false); + setError(null); + setResult(null); + setLogs([]); + setElapsedTime(0); + }; + + const getImageURL = (result: Result) => { + return result.outputs[9].images[0]; + }; + + const generateImage = async () => { + reset(); + // @snippet:start("client.queue.subscribe") + setLoading(true); + const start = Date.now(); + try { + const result: Result = await fal.subscribe('fal-ai/comfy-server', { + input: getWorkflow({}), + pollInterval: 3000, // Default is 1000 (every 1s) + logs: true, + onQueueUpdate(update) { + setElapsedTime(Date.now() - start); + if ( + update.status === 'IN_PROGRESS' || + update.status === 'COMPLETED' + ) { + setLogs((update.logs || []).map((log) => log.message)); + } + }, + }); + setResult(getImageURL(result)); + } catch (error: any) { + setError(error); + } finally { + setLoading(false); + setElapsedTime(Date.now() - start); + } + // @snippet:end + }; + return ( +
+
+

Comfy SDXL - Text to Image

+
+ + setPrompt(e.target.value)} + onBlur={(e) => setPrompt(e.target.value.trim())} + /> +
+ + + + + +
+
+ {image && ( + // eslint-disable-next-line @next/next/no-img-element + + )} +
+
+

JSON Result

+

+ {`Elapsed Time (seconds): ${(elapsedTime / 1000).toFixed(2)}`} +

+
+              {result
+                ? JSON.stringify(result, null, 2)
+                : '// result pending...'}
+            
+
+ +
+

Logs

+
+              {logs.filter(Boolean).join('\n')}
+            
+
+
+
+
+ ); +} + +export default Index; diff --git a/apps/demo-nextjs-app-router/app/comfy/text_to_image/workflow.tsx b/apps/demo-nextjs-app-router/app/comfy/text_to_image/workflow.tsx new file mode 100644 index 0000000..3299e72 --- /dev/null +++ b/apps/demo-nextjs-app-router/app/comfy/text_to_image/workflow.tsx @@ -0,0 +1,103 @@ +// This workflow is generated with ComfyUI-fal +const WORKFLOW = { + prompt: { + '3': { + inputs: { + seed: 704126934460886, + steps: 20, + cfg: 8, + sampler_name: 'euler', + scheduler: 'normal', + denoise: 1, + model: ['4', 0], + positive: ['6', 0], + negative: ['7', 0], + latent_image: ['5', 0], + }, + class_type: 'KSampler', + }, + '4': { + inputs: { + ckpt_name: 'sd_xl_1.0.safetensors', + }, + class_type: 'CheckpointLoaderSimple', + }, + '5': { + inputs: { + width: 1024, + height: 1024, + batch_size: 1, + }, + class_type: 'EmptyLatentImage', + }, + '6': { + inputs: { + text: ['10', 0], + clip: ['4', 1], + }, + class_type: 'CLIPTextEncode', + }, + '7': { + inputs: { + text: ['11', 0], + clip: ['4', 1], + }, + class_type: 'CLIPTextEncode', + }, + '8': { + inputs: { + samples: ['3', 0], + vae: ['4', 2], + }, + class_type: 'VAEDecode', + }, + '9': { + inputs: { + filename_prefix: 'ComfyUI', + images: ['8', 0], + }, + class_type: 'SaveImage', + }, + '10': { + inputs: { + name: 'prompt', + value: + 'beautiful scenery nature glass bottle landscape, , purple galaxy bottle,', + }, + class_type: 'StringInput_fal', + }, + '11': { + inputs: { + name: 'negative_prompt', + value: 'text, watermark', + }, + class_type: 'StringInput_fal', + }, + }, + extra_data: {}, + fal_inputs_dev_info: { + prompt: { + key: ['10', 'inputs', 'value'], + class_type: 'StringInput_fal', + }, + negative_prompt: { + key: ['11', 'inputs', 'value'], + class_type: 'StringInput_fal', + }, + }, + fal_inputs: { + prompt: + 'beautiful scenery nature glass bottle landscape, , purple galaxy bottle,', + negative_prompt: 'text, watermark', + }, +}; + +export default function getWorkflow(object: any) { + let newWorkflow = JSON.parse(JSON.stringify(WORKFLOW)); + newWorkflow.fal_inputs = { + ...newWorkflow.fal_inputs, + ...object, + }; + + return newWorkflow; +} diff --git a/apps/demo-nextjs-page-router/pages/new_file.tsx b/apps/demo-nextjs-page-router/pages/new_file.tsx new file mode 100644 index 0000000..a80c309 --- /dev/null +++ b/apps/demo-nextjs-page-router/pages/new_file.tsx @@ -0,0 +1,86 @@ +import * as fal from '@fal-ai/serverless-client'; + +// This is a simple example of how to use the fal-js SDK to execute a workflow. +const result = fal.subscribe('fal-ai/fast-sdxl', { + input: getWorkflow({}), + logs: true, + onQueueUpdate: (update) => { + if (update.status === 'IN_PROGRESS') { + update.logs.map((log) => log.message).forEach(console.log); + } + }, +}); + +// This workflow is generated with ComfyUI-fal +const WORKFLOW = { + prompt: { + '3': { + inputs: { + seed: 156680208700286, + steps: 20, + cfg: 8, + sampler_name: 'euler', + scheduler: 'normal', + denoise: 1, + model: ['4', 0], + positive: ['6', 0], + negative: ['7', 0], + latent_image: ['5', 0], + }, + class_type: 'KSampler', + }, + '4': { + inputs: { + ckpt_name: 'v1-5-pruned-emaonly.ckpt', + }, + class_type: 'CheckpointLoaderSimple', + }, + '5': { + inputs: { + width: 512, + height: 512, + batch_size: 1, + }, + class_type: 'EmptyLatentImage', + }, + '6': { + inputs: { + clip: ['4', 1], + }, + class_type: 'CLIPTextEncode', + }, + '7': { + inputs: { + clip: ['4', 1], + }, + class_type: 'CLIPTextEncode', + }, + '8': { + inputs: { + samples: ['3', 0], + vae: ['4', 2], + }, + class_type: 'VAEDecode', + }, + '9': { + inputs: { + filename_prefix: 'ComfyUI', + images: ['8', 0], + }, + class_type: 'SaveImage', + }, + }, + extra_data: {}, + fal_inputs_dev_info: {}, + fal_inputs: {}, +}; + +function getWorkflow(object: any) { + let newWorkflow = JSON.parse(JSON.stringify(WORKFLOW)); + newWorkflow.fal_inputs = { + ...newWorkflow.fal_inputs, + ...object, + }; + + return newWorkflow; +} diff --git a/apps/demo-nextjs-page-router/pages/workflow.tsx b/apps/demo-nextjs-page-router/pages/workflow.tsx new file mode 100644 index 0000000..22ac84a --- /dev/null +++ b/apps/demo-nextjs-page-router/pages/workflow.tsx @@ -0,0 +1,91 @@ +// This workflow is generated with ComfyUI-fal +const WORKFLOW = { + prompt: { + '3': { + inputs: { + seed: 156680208700286, + steps: 20, + cfg: 8, + sampler_name: 'euler', + scheduler: 'normal', + denoise: 1, + model: ['4', 0], + positive: ['6', 0], + negative: ['7', 0], + latent_image: ['5', 0], + }, + class_type: 'KSampler', + }, + '4': { + inputs: { + ckpt_name: 'v1-5-pruned-emaonly.ckpt', + }, + class_type: 'CheckpointLoaderSimple', + }, + '5': { + inputs: { + width: 512, + height: 512, + batch_size: 1, + }, + class_type: 'EmptyLatentImage', + }, + '6': { + inputs: { + text: ['10', 0], + clip: ['4', 1], + }, + class_type: 'CLIPTextEncode', + }, + '7': { + inputs: { + text: 'text, watermark', + clip: ['4', 1], + }, + class_type: 'CLIPTextEncode', + }, + '8': { + inputs: { + samples: ['3', 0], + vae: ['4', 2], + }, + class_type: 'VAEDecode', + }, + '9': { + inputs: { + filename_prefix: 'ComfyUI', + images: ['8', 0], + }, + class_type: 'SaveImage', + }, + '10': { + inputs: { + name: 'cliptextencode_text', + value: + 'beautiful scenery nature glass bottle landscape, , purple galaxy bottle,', + }, + class_type: 'StringInput_fal', + }, + }, + extra_data: {}, + fal_inputs_dev_info: { + cliptextencode_text: { + key: ['10', 'inputs', 'value'], + class_type: 'StringInput_fal', + }, + }, + fal_inputs: { + cliptextencode_text: + 'beautiful scenery nature glass bottle landscape, , purple galaxy bottle,', + }, +}; + +export function getWorkflow(object: any) { + let newWorkflow = JSON.parse(JSON.stringify(WORKFLOW)); + newWorkflow.fal_inputs = { + ...newWorkflow.fal_inputs, + ...object, + }; + + return newWorkflow; +}