Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- {
- "id": "91f6bbe2-ed41-4fd6-bac7-71d5b5864ecb",
- "revision": 0,
- "last_node_id": 75,
- "last_link_id": 132,
- "nodes": [
- {
- "id": 39,
- "type": "VAELoader",
- "pos": [
- 20,
- 340
- ],
- "size": [
- 330,
- 60
- ],
- "flags": {},
- "order": 0,
- "mode": 0,
- "inputs": [
- {
- "localized_name": "vae_name",
- "name": "vae_name",
- "type": "COMBO",
- "widget": {
- "name": "vae_name"
- },
- "link": null
- }
- ],
- "outputs": [
- {
- "localized_name": "VAE",
- "name": "VAE",
- "type": "VAE",
- "slot_index": 0,
- "links": [
- 76
- ]
- }
- ],
- "properties": {
- "cnr_id": "comfy-core",
- "ver": "0.3.48",
- "Node name for S&R": "VAELoader",
- "models": [
- {
- "name": "qwen_image_vae.safetensors",
- "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors",
- "directory": "vae"
- }
- ],
- "enableTabs": false,
- "tabWidth": 65,
- "tabXOffset": 10,
- "hasSecondTab": false,
- "secondTabText": "Send Back",
- "secondTabOffset": 80,
- "secondTabWidth": 65,
- "ue_properties": {
- "version": "7.2.1",
- "widget_ue_connectable": {},
- "input_ue_unconnectable": {}
- }
- },
- "widgets_values": [
- "qwen_image_vae.safetensors"
- ]
- },
- {
- "id": 38,
- "type": "CLIPLoader",
- "pos": [
- 20,
- 190
- ],
- "size": [
- 330,
- 110
- ],
- "flags": {},
- "order": 1,
- "mode": 0,
- "inputs": [
- {
- "localized_name": "clip_name",
- "name": "clip_name",
- "type": "COMBO",
- "widget": {
- "name": "clip_name"
- },
- "link": null
- },
- {
- "localized_name": "type",
- "name": "type",
- "type": "COMBO",
- "widget": {
- "name": "type"
- },
- "link": null
- },
- {
- "localized_name": "device",
- "name": "device",
- "shape": 7,
- "type": "COMBO",
- "widget": {
- "name": "device"
- },
- "link": null
- }
- ],
- "outputs": [
- {
- "localized_name": "CLIP",
- "name": "CLIP",
- "type": "CLIP",
- "slot_index": 0,
- "links": [
- 74,
- 75
- ]
- }
- ],
- "properties": {
- "cnr_id": "comfy-core",
- "ver": "0.3.48",
- "Node name for S&R": "CLIPLoader",
- "models": [
- {
- "name": "qwen_2.5_vl_7b_fp8_scaled.safetensors",
- "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors",
- "directory": "text_encoders"
- }
- ],
- "enableTabs": false,
- "tabWidth": 65,
- "tabXOffset": 10,
- "hasSecondTab": false,
- "secondTabText": "Send Back",
- "secondTabOffset": 80,
- "secondTabWidth": 65,
- "ue_properties": {
- "version": "7.2.1",
- "widget_ue_connectable": {},
- "input_ue_unconnectable": {}
- }
- },
- "widgets_values": [
- "qwen_2.5_vl_7b_fp8_scaled.safetensors",
- "qwen_image",
- "default"
- ]
- },
- {
- "id": 58,
- "type": "EmptySD3LatentImage",
- "pos": [
- 50,
- 510
- ],
- "size": [
- 270,
- 106
- ],
- "flags": {},
- "order": 2,
- "mode": 0,
- "inputs": [
- {
- "localized_name": "width",
- "name": "width",
- "type": "INT",
- "widget": {
- "name": "width"
- },
- "link": null
- },
- {
- "localized_name": "height",
- "name": "height",
- "type": "INT",
- "widget": {
- "name": "height"
- },
- "link": null
- },
- {
- "localized_name": "batch_size",
- "name": "batch_size",
- "type": "INT",
- "widget": {
- "name": "batch_size"
- },
- "link": null
- }
- ],
- "outputs": [
- {
- "localized_name": "LATENT",
- "name": "LATENT",
- "type": "LATENT",
- "links": [
- 107
- ]
- }
- ],
- "properties": {
- "cnr_id": "comfy-core",
- "ver": "0.3.48",
- "Node name for S&R": "EmptySD3LatentImage",
- "enableTabs": false,
- "tabWidth": 65,
- "tabXOffset": 10,
- "hasSecondTab": false,
- "secondTabText": "Send Back",
- "secondTabOffset": 80,
- "secondTabWidth": 65,
- "ue_properties": {
- "version": "7.2.1",
- "widget_ue_connectable": {},
- "input_ue_unconnectable": {}
- }
- },
- "widgets_values": [
- 1328,
- 1328,
- 1
- ]
- },
- {
- "id": 6,
- "type": "CLIPTextEncode",
- "pos": [
- 390,
- 240
- ],
- "size": [
- 422.84503173828125,
- 164.31304931640625
- ],
- "flags": {},
- "order": 9,
- "mode": 0,
- "inputs": [
- {
- "localized_name": "clip",
- "name": "clip",
- "type": "CLIP",
- "link": 74
- },
- {
- "localized_name": "text",
- "name": "text",
- "type": "STRING",
- "widget": {
- "name": "text"
- },
- "link": null
- }
- ],
- "outputs": [
- {
- "localized_name": "CONDITIONING",
- "name": "CONDITIONING",
- "type": "CONDITIONING",
- "slot_index": 0,
- "links": [
- 46
- ]
- }
- ],
- "title": "CLIP Text Encode (Positive Prompt)",
- "properties": {
- "cnr_id": "comfy-core",
- "ver": "0.3.48",
- "Node name for S&R": "CLIPTextEncode",
- "enableTabs": false,
- "tabWidth": 65,
- "tabXOffset": 10,
- "hasSecondTab": false,
- "secondTabText": "Send Back",
- "secondTabOffset": 80,
- "secondTabWidth": 65,
- "ue_properties": {
- "version": "7.2.1",
- "widget_ue_connectable": {},
- "input_ue_unconnectable": {}
- }
- },
- "widgets_values": [
- "\"A vibrant, warm neon-lit street scene in Hong Kong at the afternoon, with a mix of colorful Chinese and English signs glowing brightly. The atmosphere is lively, cinematic, and rain-washed with reflections on the pavement. The colors are vivid, full of pink, blue, red, and green hues. Crowded buildings with overlapping neon signs. 1980s Hong Kong style. Signs include:\n\"龍鳳冰室\" \"金華燒臘\" \"HAPPY HAIR\" \"鴻運茶餐廳\" \"EASY BAR\" \"永發魚蛋粉\" \"添記粥麵\" \"SUNSHINE MOTEL\" \"美都餐室\" \"富記糖水\" \"太平館\" \"雅芳髮型屋\" \"STAR KTV\" \"銀河娛樂城\" \"百樂門舞廳\" \"BUBBLE CAFE\" \"萬豪麻雀館\" \"CITY LIGHTS BAR\" \"瑞祥香燭莊\" \"文記文具\" \"GOLDEN JADE HOTEL\" \"LOVELY BEAUTY\" \"合興百貨\" \"興旺電器\" And the background is warm yellow street and with all stores' lights on."
- ],
- "color": "#232",
- "bgcolor": "#353"
- },
- {
- "id": 7,
- "type": "CLIPTextEncode",
- "pos": [
- 390,
- 440
- ],
- "size": [
- 425.27801513671875,
- 180.6060791015625
- ],
- "flags": {},
- "order": 10,
- "mode": 0,
- "inputs": [
- {
- "localized_name": "clip",
- "name": "clip",
- "type": "CLIP",
- "link": 75
- },
- {
- "localized_name": "text",
- "name": "text",
- "type": "STRING",
- "widget": {
- "name": "text"
- },
- "link": null
- }
- ],
- "outputs": [
- {
- "localized_name": "CONDITIONING",
- "name": "CONDITIONING",
- "type": "CONDITIONING",
- "slot_index": 0,
- "links": [
- 52
- ]
- }
- ],
- "title": "CLIP Text Encode (Negative Prompt)",
- "properties": {
- "cnr_id": "comfy-core",
- "ver": "0.3.48",
- "Node name for S&R": "CLIPTextEncode",
- "enableTabs": false,
- "tabWidth": 65,
- "tabXOffset": 10,
- "hasSecondTab": false,
- "secondTabText": "Send Back",
- "secondTabOffset": 80,
- "secondTabWidth": 65,
- "ue_properties": {
- "version": "7.2.1",
- "widget_ue_connectable": {},
- "input_ue_unconnectable": {}
- }
- },
- "widgets_values": [
- ""
- ],
- "color": "#322",
- "bgcolor": "#533"
- },
- {
- "id": 60,
- "type": "SaveImage",
- "pos": [
- 1170,
- 10
- ],
- "size": [
- 490,
- 600
- ],
- "flags": {},
- "order": 16,
- "mode": 0,
- "inputs": [
- {
- "localized_name": "images",
- "name": "images",
- "type": "IMAGE",
- "link": 110
- },
- {
- "localized_name": "filename_prefix",
- "name": "filename_prefix",
- "type": "STRING",
- "widget": {
- "name": "filename_prefix"
- },
- "link": null
- }
- ],
- "outputs": [],
- "properties": {
- "cnr_id": "comfy-core",
- "ver": "0.3.48",
- "Node name for S&R": "SaveImage",
- "enableTabs": false,
- "tabWidth": 65,
- "tabXOffset": 10,
- "hasSecondTab": false,
- "secondTabText": "Send Back",
- "secondTabOffset": 80,
- "secondTabWidth": 65,
- "ue_properties": {
- "version": "7.2.1",
- "widget_ue_connectable": {},
- "input_ue_unconnectable": {}
- }
- },
- "widgets_values": [
- "ComfyUI"
- ]
- },
- {
- "id": 66,
- "type": "ModelSamplingAuraFlow",
- "pos": [
- 850,
- 10
- ],
- "size": [
- 300,
- 58
- ],
- "flags": {},
- "order": 13,
- "mode": 0,
- "inputs": [
- {
- "localized_name": "model",
- "name": "model",
- "type": "MODEL",
- "link": 130
- },
- {
- "localized_name": "shift",
- "name": "shift",
- "type": "FLOAT",
- "widget": {
- "name": "shift"
- },
- "link": null
- }
- ],
- "outputs": [
- {
- "localized_name": "MODEL",
- "name": "MODEL",
- "type": "MODEL",
- "links": [
- 125
- ]
- }
- ],
- "properties": {
- "cnr_id": "comfy-core",
- "ver": "0.3.48",
- "Node name for S&R": "ModelSamplingAuraFlow",
- "enableTabs": false,
- "tabWidth": 65,
- "tabXOffset": 10,
- "hasSecondTab": false,
- "secondTabText": "Send Back",
- "secondTabOffset": 80,
- "secondTabWidth": 65,
- "ue_properties": {
- "version": "7.2.1",
- "widget_ue_connectable": {},
- "input_ue_unconnectable": {}
- }
- },
- "widgets_values": [
- 3.1000000000000005
- ]
- },
- {
- "id": 69,
- "type": "MarkdownNote",
- "pos": [
- -540,
- -220
- ],
- "size": [
- 390,
- 180
- ],
- "flags": {},
- "order": 3,
- "mode": 0,
- "inputs": [],
- "outputs": [],
- "title": "VRAM Usage",
- "properties": {
- "ue_properties": {
- "version": "7.2.1",
- "widget_ue_connectable": {},
- "input_ue_unconnectable": {}
- }
- },
- "widgets_values": [
- "## GPU:RTX4090D 24GB\n\n| Configuration | VRAM Usage | 1st Generation | 2nd Generation |\n|---------------------|---------------|---------------|-----------------|\n| Fp8_e4m3fn | 86% | ≈ 94s | ≈ 71s |\n| With 8steps LoRA | 86% | ≈ 55s | ≈ 34s |\n| Distill fp8_e4m3fn | 86% | ≈ 69s | ≈ 36s |"
- ],
- "color": "#432",
- "bgcolor": "#653"
- },
- {
- "id": 71,
- "type": "Note",
- "pos": [
- 850,
- -120
- ],
- "size": [
- 300,
- 88
- ],
- "flags": {},
- "order": 4,
- "mode": 0,
- "inputs": [],
- "outputs": [],
- "properties": {
- "ue_properties": {
- "widget_ue_connectable": {},
- "version": "7.2.1",
- "input_ue_unconnectable": {}
- }
- },
- "widgets_values": [
- "Increase the shift if you get too many blury/dark/bad images. Decrease if you want to try increasing detail."
- ],
- "color": "#432",
- "bgcolor": "#653"
- },
- {
- "id": 8,
- "type": "VAEDecode",
- "pos": [
- 1170,
- -90
- ],
- "size": [
- 210,
- 46
- ],
- "flags": {
- "collapsed": false
- },
- "order": 15,
- "mode": 0,
- "inputs": [
- {
- "localized_name": "samples",
- "name": "samples",
- "type": "LATENT",
- "link": 128
- },
- {
- "localized_name": "vae",
- "name": "vae",
- "type": "VAE",
- "link": 76
- }
- ],
- "outputs": [
- {
- "localized_name": "IMAGE",
- "name": "IMAGE",
- "type": "IMAGE",
- "slot_index": 0,
- "links": [
- 110
- ]
- }
- ],
- "properties": {
- "cnr_id": "comfy-core",
- "ver": "0.3.48",
- "Node name for S&R": "VAEDecode",
- "enableTabs": false,
- "tabWidth": 65,
- "tabXOffset": 10,
- "hasSecondTab": false,
- "secondTabText": "Send Back",
- "secondTabOffset": 80,
- "secondTabWidth": 65,
- "ue_properties": {
- "version": "7.2.1",
- "widget_ue_connectable": {},
- "input_ue_unconnectable": {}
- }
- },
- "widgets_values": []
- },
- {
- "id": 67,
- "type": "MarkdownNote",
- "pos": [
- -540,
- 10
- ],
- "size": [
- 540,
- 630
- ],
- "flags": {},
- "order": 5,
- "mode": 0,
- "inputs": [],
- "outputs": [],
- "title": "Model links",
- "properties": {
- "ue_properties": {
- "version": "7.2.1",
- "widget_ue_connectable": {},
- "input_ue_unconnectable": {}
- }
- },
- "widgets_values": [
- "[Tutorial](https://docs.comfy.org/tutorials/image/qwen/qwen-image) | [教程](https://docs.comfy.org/zh-CN/tutorials/image/qwen/qwen-image)\n\n\n## Model links\n\nYou can find all the models on [Huggingface](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/tree/main) or [Modelscope](https://modelscope.cn/models/Comfy-Org/Qwen-Image_ComfyUI/files)\n\n**Diffusion model**\n\n- [qwen_image_fp8_e4m3fn.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors)\n\nQwen_image_distill\n\n- [qwen_image_distill_full_fp8_e4m3fn.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/non_official/diffusion_models/qwen_image_distill_full_fp8_e4m3fn.safetensors)\n- [qwen_image_distill_full_bf16.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/non_official/diffusion_models/qwen_image_distill_full_bf16.safetensors)\n\n**LoRA**\n\n- [Qwen-Image-Lightning-8steps-V1.0.safetensors](https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-8steps-V1.0.safetensors)\n\n**Text encoder**\n\n- [qwen_2.5_vl_7b_fp8_scaled.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors)\n\n**VAE**\n\n- [qwen_image_vae.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors)\n\nModel Storage Location\n\n```\n📂 ComfyUI/\n├── 📂 models/\n│ ├── 📂 diffusion_models/\n│ │ ├── qwen_image_fp8_e4m3fn.safetensors\n│ │ └── qwen_image_distill_full_fp8_e4m3fn.safetensors\n│ ├── 📂 loras/\n│ │ └── Qwen-Image-Lightning-8steps-V1.0.safetensors\n│ ├── 📂 vae/\n│ │ └── qwen_image_vae.safetensors\n│ └── 📂 text_encoders/\n│ └── qwen_2.5_vl_7b_fp8_scaled.safetensors\n```\n"
- ],
- "color": "#432",
- "bgcolor": "#653"
- },
- {
- "id": 70,
- "type": "Note",
- "pos": [
- 850,
- 910
- ],
- "size": [
- 310,
- 120
- ],
- "flags": {},
- "order": 6,
- "mode": 0,
- "inputs": [],
- "outputs": [],
- "title": "For fp8 without 8steps LoRA",
- "properties": {
- "ue_properties": {
- "widget_ue_connectable": {},
- "version": "7.2.1",
- "input_ue_unconnectable": {}
- }
- },
- "widgets_values": [
- "Set cfg to 1.0 for a speed boost at the cost of consistency. Samplers like res_multistep work pretty well at cfg 1.0\n\nThe official number of steps is 50 but I think that's too much. Even just 10 steps seems to work."
- ],
- "color": "#432",
- "bgcolor": "#653"
- },
- {
- "id": 74,
- "type": "MarkdownNote",
- "pos": [
- 850,
- 660
- ],
- "size": [
- 310,
- 210
- ],
- "flags": {},
- "order": 7,
- "mode": 0,
- "inputs": [],
- "outputs": [],
- "title": "KSampler settings",
- "properties": {
- "ue_properties": {
- "widget_ue_connectable": {},
- "version": "7.2.1",
- "input_ue_unconnectable": {}
- }
- },
- "widgets_values": [
- "You can test and find the best setting by yourself. The following table is for reference.\n\n| model | steps | cfg |\n|---------------------|---------------|---------------|\n| fp8_e4m3fn(Qwen team's suggestion) | 40 | 2.5 \n| fp8_e4m3fn | 20 | 2.5 |\n| fp8_e4m3fn + 8steps LoRA | 8 | 1.0 |\n| distill fp8_e4m3fn | 10 | 1.0 |"
- ],
- "color": "#432",
- "bgcolor": "#653"
- },
- {
- "id": 73,
- "type": "LoraLoaderModelOnly",
- "pos": [
- 460,
- 60
- ],
- "size": [
- 270,
- 82
- ],
- "flags": {},
- "order": 12,
- "mode": 0,
- "inputs": [
- {
- "localized_name": "model",
- "name": "model",
- "type": "MODEL",
- "link": 132
- },
- {
- "localized_name": "lora_name",
- "name": "lora_name",
- "type": "COMBO",
- "widget": {
- "name": "lora_name"
- },
- "link": null
- },
- {
- "localized_name": "strength_model",
- "name": "strength_model",
- "type": "FLOAT",
- "widget": {
- "name": "strength_model"
- },
- "link": null
- }
- ],
- "outputs": [
- {
- "localized_name": "MODEL",
- "name": "MODEL",
- "type": "MODEL",
- "links": [
- 130
- ]
- }
- ],
- "properties": {
- "cnr_id": "comfy-core",
- "ver": "0.3.49",
- "Node name for S&R": "LoraLoaderModelOnly",
- "models": [
- {
- "name": "Qwen-Image-Lightning-8steps-V1.0.safetensors",
- "url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-8steps-V1.0.safetensors",
- "directory": "loras"
- }
- ],
- "ue_properties": {
- "widget_ue_connectable": {},
- "version": "7.2.1",
- "input_ue_unconnectable": {}
- }
- },
- "widgets_values": [
- "Qwen-Image-Lightning-8steps-V1.0.safetensors",
- 1
- ]
- },
- {
- "id": 3,
- "type": "KSampler",
- "pos": [
- 850,
- 120
- ],
- "size": [
- 300,
- 474
- ],
- "flags": {},
- "order": 14,
- "mode": 0,
- "inputs": [
- {
- "localized_name": "model",
- "name": "model",
- "type": "MODEL",
- "link": 125
- },
- {
- "localized_name": "positive",
- "name": "positive",
- "type": "CONDITIONING",
- "link": 46
- },
- {
- "localized_name": "negative",
- "name": "negative",
- "type": "CONDITIONING",
- "link": 52
- },
- {
- "localized_name": "latent_image",
- "name": "latent_image",
- "type": "LATENT",
- "link": 107
- },
- {
- "localized_name": "seed",
- "name": "seed",
- "type": "INT",
- "widget": {
- "name": "seed"
- },
- "link": null
- },
- {
- "localized_name": "steps",
- "name": "steps",
- "type": "INT",
- "widget": {
- "name": "steps"
- },
- "link": null
- },
- {
- "localized_name": "cfg",
- "name": "cfg",
- "type": "FLOAT",
- "widget": {
- "name": "cfg"
- },
- "link": null
- },
- {
- "localized_name": "sampler_name",
- "name": "sampler_name",
- "type": "COMBO",
- "widget": {
- "name": "sampler_name"
- },
- "link": null
- },
- {
- "localized_name": "scheduler",
- "name": "scheduler",
- "type": "COMBO",
- "widget": {
- "name": "scheduler"
- },
- "link": null
- },
- {
- "localized_name": "denoise",
- "name": "denoise",
- "type": "FLOAT",
- "widget": {
- "name": "denoise"
- },
- "link": null
- }
- ],
- "outputs": [
- {
- "localized_name": "LATENT",
- "name": "LATENT",
- "type": "LATENT",
- "slot_index": 0,
- "links": [
- 128
- ]
- }
- ],
- "properties": {
- "cnr_id": "comfy-core",
- "ver": "0.3.48",
- "Node name for S&R": "KSampler",
- "enableTabs": false,
- "tabWidth": 65,
- "tabXOffset": 10,
- "hasSecondTab": false,
- "secondTabText": "Send Back",
- "secondTabOffset": 80,
- "secondTabWidth": 65,
- "ue_properties": {
- "version": "7.2.1",
- "widget_ue_connectable": {},
- "input_ue_unconnectable": {}
- }
- },
- "widgets_values": [
- 1105613325489210,
- "randomize",
- 8,
- 2.5,
- "euler",
- "simple",
- 1
- ]
- },
- {
- "id": 37,
- "type": "UNETLoader",
- "pos": [
- 20,
- 50
- ],
- "size": [
- 330,
- 90
- ],
- "flags": {},
- "order": 8,
- "mode": 0,
- "inputs": [
- {
- "localized_name": "unet_name",
- "name": "unet_name",
- "type": "COMBO",
- "widget": {
- "name": "unet_name"
- },
- "link": null
- },
- {
- "localized_name": "weight_dtype",
- "name": "weight_dtype",
- "type": "COMBO",
- "widget": {
- "name": "weight_dtype"
- },
- "link": null
- }
- ],
- "outputs": [
- {
- "localized_name": "MODEL",
- "name": "MODEL",
- "type": "MODEL",
- "slot_index": 0,
- "links": [
- 131
- ]
- }
- ],
- "properties": {
- "cnr_id": "comfy-core",
- "ver": "0.3.48",
- "Node name for S&R": "UNETLoader",
- "models": [
- {
- "name": "qwen_image_fp8_e4m3fn.safetensors",
- "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors",
- "directory": "diffusion_models"
- }
- ],
- "enableTabs": false,
- "tabWidth": 65,
- "tabXOffset": 10,
- "hasSecondTab": false,
- "secondTabText": "Send Back",
- "secondTabOffset": 80,
- "secondTabWidth": 65,
- "ue_properties": {
- "version": "7.2.1",
- "widget_ue_connectable": {},
- "input_ue_unconnectable": {}
- }
- },
- "widgets_values": [
- "qwen_image_fp8_e4m3fn.safetensors",
- "default"
- ]
- },
- {
- "id": 75,
- "type": "PathchSageAttentionKJ",
- "pos": [
- 310,
- -110
- ],
- "size": [
- 270,
- 58
- ],
- "flags": {},
- "order": 11,
- "mode": 0,
- "inputs": [
- {
- "localized_name": "model",
- "name": "model",
- "type": "MODEL",
- "link": 131
- },
- {
- "localized_name": "sage_attention",
- "name": "sage_attention",
- "type": "COMBO",
- "widget": {
- "name": "sage_attention"
- },
- "link": null
- }
- ],
- "outputs": [
- {
- "localized_name": "MODEL",
- "name": "MODEL",
- "type": "MODEL",
- "links": [
- 132
- ]
- }
- ],
- "properties": {
- "ue_properties": {
- "widget_ue_connectable": {},
- "input_ue_unconnectable": {}
- },
- "cnr_id": "comfyui-kjnodes",
- "ver": "1.1.7",
- "Node name for S&R": "PathchSageAttentionKJ"
- },
- "widgets_values": [
- "sageattn_qk_int8_pv_fp16_triton"
- ]
- }
- ],
- "links": [
- [
- 46,
- 6,
- 0,
- 3,
- 1,
- "CONDITIONING"
- ],
- [
- 52,
- 7,
- 0,
- 3,
- 2,
- "CONDITIONING"
- ],
- [
- 74,
- 38,
- 0,
- 6,
- 0,
- "CLIP"
- ],
- [
- 75,
- 38,
- 0,
- 7,
- 0,
- "CLIP"
- ],
- [
- 76,
- 39,
- 0,
- 8,
- 1,
- "VAE"
- ],
- [
- 107,
- 58,
- 0,
- 3,
- 3,
- "LATENT"
- ],
- [
- 110,
- 8,
- 0,
- 60,
- 0,
- "IMAGE"
- ],
- [
- 125,
- 66,
- 0,
- 3,
- 0,
- "MODEL"
- ],
- [
- 128,
- 3,
- 0,
- 8,
- 0,
- "LATENT"
- ],
- [
- 130,
- 73,
- 0,
- 66,
- 0,
- "MODEL"
- ],
- [
- 131,
- 37,
- 0,
- 75,
- 0,
- "MODEL"
- ],
- [
- 132,
- 75,
- 0,
- 73,
- 0,
- "MODEL"
- ]
- ],
- "groups": [
- {
- "id": 1,
- "title": "Step1 - Load models",
- "bounding": [
- 10,
- -20,
- 350,
- 433.6000061035156
- ],
- "color": "#3f789e",
- "font_size": 24,
- "flags": {}
- },
- {
- "id": 2,
- "title": "Step2 - Image size",
- "bounding": [
- 10,
- 430,
- 350,
- 210
- ],
- "color": "#3f789e",
- "font_size": 24,
- "flags": {}
- },
- {
- "id": 3,
- "title": "Step3 - Prompt",
- "bounding": [
- 380,
- 160,
- 450,
- 470
- ],
- "color": "#3f789e",
- "font_size": 24,
- "flags": {}
- },
- {
- "id": 4,
- "title": "Lightx2v 8steps LoRA",
- "bounding": [
- 380,
- -20,
- 450,
- 170
- ],
- "color": "#3f789e",
- "font_size": 24,
- "flags": {}
- }
- ],
- "config": {},
- "extra": {
- "ds": {
- "scale": 1.0361523283384082,
- "offset": [
- 614.8058780089232,
- 194.24948440035107
- ]
- },
- "frontendVersion": "1.26.6",
- "ue_links": [],
- "links_added_by_ue": [],
- "VHS_latentpreview": false,
- "VHS_latentpreviewrate": 0,
- "VHS_MetadataImage": true,
- "VHS_KeepIntermediate": true
- },
- "version": 0.4
- }
Advertisement
Add Comment
Please, Sign In to add comment