Guest User

Qwen / Wan 2K T2I Workflow

a guest
Aug 7th, 2025
1,149
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 28.59 KB | None | 0 0
  1. {
  2. "id": "00000000-0000-0000-0000-000000000000",
  3. "revision": 0,
  4. "last_node_id": 106,
  5. "last_link_id": 62,
  6. "nodes": [
  7. {
  8. "id": 38,
  9. "type": "CLIPLoader",
  10. "pos": [
  11. -579.4774169921875,
  12. 262.53173828125
  13. ],
  14. "size": [
  15. 270,
  16. 106
  17. ],
  18. "flags": {},
  19. "order": 0,
  20. "mode": 0,
  21. "inputs": [],
  22. "outputs": [
  23. {
  24. "name": "CLIP",
  25. "type": "CLIP",
  26. "links": [
  27. 37,
  28. 38
  29. ]
  30. }
  31. ],
  32. "properties": {
  33. "widget_ue_connectable": {},
  34. "Node name for S&R": "CLIPLoader"
  35. },
  36. "widgets_values": [
  37. "qwen_2.5_vl_7b_fp8_scaled.safetensors",
  38. "qwen_image",
  39. "default"
  40. ]
  41. },
  42. {
  43. "id": 92,
  44. "type": "VAEDecode",
  45. "pos": [
  46. 2955.876953125,
  47. 377.4643249511719
  48. ],
  49. "size": [
  50. 140,
  51. 46
  52. ],
  53. "flags": {},
  54. "order": 26,
  55. "mode": 0,
  56. "inputs": [
  57. {
  58. "name": "samples",
  59. "type": "LATENT",
  60. "link": 52
  61. },
  62. {
  63. "name": "vae",
  64. "type": "VAE",
  65. "link": 53
  66. }
  67. ],
  68. "outputs": [
  69. {
  70. "name": "IMAGE",
  71. "type": "IMAGE",
  72. "links": [
  73. 55
  74. ]
  75. }
  76. ],
  77. "properties": {
  78. "widget_ue_connectable": {},
  79. "Node name for S&R": "VAEDecode"
  80. }
  81. },
  82. {
  83. "id": 86,
  84. "type": "ClownsharKSampler_Beta",
  85. "pos": [
  86. 2569.0810546875,
  87. -62.45936584472656
  88. ],
  89. "size": [
  90. 270,
  91. 418
  92. ],
  93. "flags": {},
  94. "order": 25,
  95. "mode": 0,
  96. "inputs": [
  97. {
  98. "name": "model",
  99. "shape": 7,
  100. "type": "MODEL",
  101. "link": 46
  102. },
  103. {
  104. "name": "positive",
  105. "shape": 7,
  106. "type": "CONDITIONING",
  107. "link": 47
  108. },
  109. {
  110. "name": "negative",
  111. "shape": 7,
  112. "type": "CONDITIONING",
  113. "link": 48
  114. },
  115. {
  116. "name": "latent_image",
  117. "shape": 7,
  118. "type": "LATENT",
  119. "link": 49
  120. },
  121. {
  122. "name": "sigmas",
  123. "shape": 7,
  124. "type": "SIGMAS",
  125. "link": null
  126. },
  127. {
  128. "name": "guides",
  129. "shape": 7,
  130. "type": "GUIDES",
  131. "link": null
  132. },
  133. {
  134. "name": "options",
  135. "shape": 7,
  136. "type": "OPTIONS",
  137. "link": null
  138. }
  139. ],
  140. "outputs": [
  141. {
  142. "name": "output",
  143. "type": "LATENT",
  144. "links": [
  145. 52
  146. ]
  147. },
  148. {
  149. "name": "denoised",
  150. "type": "LATENT",
  151. "links": null
  152. },
  153. {
  154. "name": "options",
  155. "type": "OPTIONS",
  156. "links": null
  157. }
  158. ],
  159. "properties": {
  160. "widget_ue_connectable": {},
  161. "Node name for S&R": "ClownsharKSampler_Beta"
  162. },
  163. "widgets_values": [
  164. 0.5000000000000001,
  165. "res_2s",
  166. "beta57",
  167. 4,
  168. -1,
  169. 0.30000000000000004,
  170. 1.0000000000000002,
  171. 0,
  172. "randomize",
  173. "standard",
  174. true
  175. ]
  176. },
  177. {
  178. "id": 88,
  179. "type": "CLIPTextEncode",
  180. "pos": [
  181. 2081.955810546875,
  182. -42.529197692871094
  183. ],
  184. "size": [
  185. 400,
  186. 200
  187. ],
  188. "flags": {},
  189. "order": 18,
  190. "mode": 0,
  191. "inputs": [
  192. {
  193. "name": "clip",
  194. "type": "CLIP",
  195. "link": 51
  196. },
  197. {
  198. "name": "text",
  199. "type": "STRING",
  200. "widget": {
  201. "name": "text"
  202. },
  203. "link": 50
  204. }
  205. ],
  206. "outputs": [
  207. {
  208. "name": "CONDITIONING",
  209. "type": "CONDITIONING",
  210. "links": [
  211. 47
  212. ]
  213. }
  214. ],
  215. "properties": {
  216. "widget_ue_connectable": {},
  217. "Node name for S&R": "CLIPTextEncode"
  218. },
  219. "widgets_values": [
  220. ""
  221. ]
  222. },
  223. {
  224. "id": 8,
  225. "type": "VAEDecode",
  226. "pos": [
  227. 1084.8045654296875,
  228. 433.3717956542969
  229. ],
  230. "size": [
  231. 140,
  232. 46
  233. ],
  234. "flags": {},
  235. "order": 21,
  236. "mode": 0,
  237. "inputs": [
  238. {
  239. "name": "samples",
  240. "type": "LATENT",
  241. "link": 39
  242. },
  243. {
  244. "name": "vae",
  245. "type": "VAE",
  246. "link": 40
  247. }
  248. ],
  249. "outputs": [
  250. {
  251. "name": "IMAGE",
  252. "type": "IMAGE",
  253. "links": [
  254. 43
  255. ]
  256. }
  257. ],
  258. "properties": {
  259. "widget_ue_connectable": {},
  260. "Node name for S&R": "VAEDecode"
  261. }
  262. },
  263. {
  264. "id": 69,
  265. "type": "UnetLoaderGGUF",
  266. "pos": [
  267. -569.9808349609375,
  268. 155.05517578125
  269. ],
  270. "size": [
  271. 270,
  272. 58
  273. ],
  274. "flags": {},
  275. "order": 1,
  276. "mode": 0,
  277. "inputs": [],
  278. "outputs": [
  279. {
  280. "name": "MODEL",
  281. "type": "MODEL",
  282. "links": [
  283. 44
  284. ]
  285. }
  286. ],
  287. "properties": {
  288. "widget_ue_connectable": {},
  289. "Node name for S&R": "UnetLoaderGGUF"
  290. },
  291. "widgets_values": [
  292. "qwen-image-Q4_K_M.gguf"
  293. ]
  294. },
  295. {
  296. "id": 66,
  297. "type": "ModelSamplingAuraFlow",
  298. "pos": [
  299. -242.3684844970703,
  300. 145.2268524169922
  301. ],
  302. "size": [
  303. 270,
  304. 58
  305. ],
  306. "flags": {},
  307. "order": 10,
  308. "mode": 0,
  309. "inputs": [
  310. {
  311. "name": "model",
  312. "type": "MODEL",
  313. "link": 44
  314. }
  315. ],
  316. "outputs": [
  317. {
  318. "name": "MODEL",
  319. "type": "MODEL",
  320. "links": [
  321. 45
  322. ]
  323. }
  324. ],
  325. "properties": {
  326. "widget_ue_connectable": {},
  327. "Node name for S&R": "ModelSamplingAuraFlow"
  328. },
  329. "widgets_values": [
  330. 3.1000000000000005
  331. ]
  332. },
  333. {
  334. "id": 70,
  335. "type": "PathchSageAttentionKJ",
  336. "pos": [
  337. 99.9563980102539,
  338. 146.25221252441406
  339. ],
  340. "size": [
  341. 270,
  342. 58
  343. ],
  344. "flags": {},
  345. "order": 16,
  346. "mode": 0,
  347. "inputs": [
  348. {
  349. "name": "model",
  350. "type": "MODEL",
  351. "link": 45
  352. }
  353. ],
  354. "outputs": [
  355. {
  356. "name": "MODEL",
  357. "type": "MODEL",
  358. "links": [
  359. 32
  360. ]
  361. }
  362. ],
  363. "properties": {
  364. "widget_ue_connectable": {},
  365. "Node name for S&R": "PathchSageAttentionKJ"
  366. },
  367. "widgets_values": [
  368. "auto"
  369. ]
  370. },
  371. {
  372. "id": 3,
  373. "type": "KSampler",
  374. "pos": [
  375. 696.3853149414062,
  376. 134.33392333984375
  377. ],
  378. "size": [
  379. 270,
  380. 262
  381. ],
  382. "flags": {},
  383. "order": 19,
  384. "mode": 0,
  385. "inputs": [
  386. {
  387. "name": "model",
  388. "type": "MODEL",
  389. "link": 32
  390. },
  391. {
  392. "name": "positive",
  393. "type": "CONDITIONING",
  394. "link": 33
  395. },
  396. {
  397. "name": "negative",
  398. "type": "CONDITIONING",
  399. "link": 34
  400. },
  401. {
  402. "name": "latent_image",
  403. "type": "LATENT",
  404. "link": 35
  405. }
  406. ],
  407. "outputs": [
  408. {
  409. "name": "LATENT",
  410. "type": "LATENT",
  411. "links": [
  412. 39,
  413. 56
  414. ]
  415. }
  416. ],
  417. "properties": {
  418. "widget_ue_connectable": {},
  419. "Node name for S&R": "KSampler"
  420. },
  421. "widgets_values": [
  422. 670526180370430,
  423. "randomize",
  424. 7,
  425. 2.5,
  426. "res_2s",
  427. "bong_tangent",
  428. 1
  429. ]
  430. },
  431. {
  432. "id": 7,
  433. "type": "CLIPTextEncode",
  434. "pos": [
  435. 228.08265686035156,
  436. 703.6488037109375
  437. ],
  438. "size": [
  439. 400,
  440. 200
  441. ],
  442. "flags": {},
  443. "order": 9,
  444. "mode": 0,
  445. "inputs": [
  446. {
  447. "name": "clip",
  448. "type": "CLIP",
  449. "link": 38
  450. }
  451. ],
  452. "outputs": [
  453. {
  454. "name": "CONDITIONING",
  455. "type": "CONDITIONING",
  456. "links": [
  457. 34
  458. ]
  459. }
  460. ],
  461. "title": "CLIP Text Encode (Negative Prompt)",
  462. "properties": {
  463. "widget_ue_connectable": {},
  464. "Node name for S&R": "CLIPTextEncode"
  465. },
  466. "widgets_values": [
  467. " "
  468. ]
  469. },
  470. {
  471. "id": 6,
  472. "type": "CLIPTextEncode",
  473. "pos": [
  474. 227.79251098632812,
  475. 436.7325439453125
  476. ],
  477. "size": [
  478. 400,
  479. 200
  480. ],
  481. "flags": {},
  482. "order": 14,
  483. "mode": 0,
  484. "inputs": [
  485. {
  486. "name": "clip",
  487. "type": "CLIP",
  488. "link": 37
  489. },
  490. {
  491. "name": "text",
  492. "type": "STRING",
  493. "widget": {
  494. "name": "text"
  495. },
  496. "link": 36
  497. }
  498. ],
  499. "outputs": [
  500. {
  501. "name": "CONDITIONING",
  502. "type": "CONDITIONING",
  503. "links": [
  504. 33
  505. ]
  506. }
  507. ],
  508. "title": "CLIP Text Encode (Positive Prompt)",
  509. "properties": {
  510. "widget_ue_connectable": {},
  511. "Node name for S&R": "CLIPTextEncode"
  512. },
  513. "widgets_values": [
  514. ""
  515. ]
  516. },
  517. {
  518. "id": 58,
  519. "type": "EmptySD3LatentImage",
  520. "pos": [
  521. 217.9580078125,
  522. 261.56292724609375
  523. ],
  524. "size": [
  525. 270,
  526. 106
  527. ],
  528. "flags": {},
  529. "order": 11,
  530. "mode": 0,
  531. "inputs": [
  532. {
  533. "name": "width",
  534. "type": "INT",
  535. "widget": {
  536. "name": "width"
  537. },
  538. "link": 41
  539. },
  540. {
  541. "name": "height",
  542. "type": "INT",
  543. "widget": {
  544. "name": "height"
  545. },
  546. "link": 42
  547. }
  548. ],
  549. "outputs": [
  550. {
  551. "name": "LATENT",
  552. "type": "LATENT",
  553. "links": [
  554. 35
  555. ]
  556. }
  557. ],
  558. "properties": {
  559. "widget_ue_connectable": {},
  560. "Node name for S&R": "EmptySD3LatentImage"
  561. },
  562. "widgets_values": [
  563. 1024,
  564. 1024,
  565. 1
  566. ]
  567. },
  568. {
  569. "id": 39,
  570. "type": "VAELoader",
  571. "pos": [
  572. 718.4498291015625,
  573. 501.9953308105469
  574. ],
  575. "size": [
  576. 270,
  577. 58
  578. ],
  579. "flags": {},
  580. "order": 2,
  581. "mode": 0,
  582. "inputs": [],
  583. "outputs": [
  584. {
  585. "name": "VAE",
  586. "type": "VAE",
  587. "links": [
  588. 40
  589. ]
  590. }
  591. ],
  592. "properties": {
  593. "widget_ue_connectable": {},
  594. "Node name for S&R": "VAELoader"
  595. },
  596. "widgets_values": [
  597. "qwen_image_vae.safetensors"
  598. ]
  599. },
  600. {
  601. "id": 83,
  602. "type": "FluxResolutionNode",
  603. "pos": [
  604. -144.7438201904297,
  605. 718.0783081054688
  606. ],
  607. "size": [
  608. 274.6666564941406,
  609. 190
  610. ],
  611. "flags": {},
  612. "order": 3,
  613. "mode": 0,
  614. "inputs": [],
  615. "outputs": [
  616. {
  617. "name": "width",
  618. "type": "INT",
  619. "links": [
  620. 41
  621. ]
  622. },
  623. {
  624. "name": "height",
  625. "type": "INT",
  626. "links": [
  627. 42
  628. ]
  629. },
  630. {
  631. "name": "resolution",
  632. "type": "STRING",
  633. "links": null
  634. },
  635. {
  636. "name": "preview",
  637. "type": "IMAGE",
  638. "links": null
  639. }
  640. ],
  641. "properties": {
  642. "widget_ue_connectable": {},
  643. "Node name for S&R": "FluxResolutionNode"
  644. },
  645. "widgets_values": [
  646. "1.0",
  647. "1:1 (Perfect Square)",
  648. false,
  649. "1:1"
  650. ]
  651. },
  652. {
  653. "id": 94,
  654. "type": "CLIPTextEncode",
  655. "pos": [
  656. 2088.688232421875,
  657. 209.5572509765625
  658. ],
  659. "size": [
  660. 400,
  661. 200
  662. ],
  663. "flags": {},
  664. "order": 13,
  665. "mode": 0,
  666. "inputs": [
  667. {
  668. "name": "clip",
  669. "type": "CLIP",
  670. "link": 54
  671. }
  672. ],
  673. "outputs": [
  674. {
  675. "name": "CONDITIONING",
  676. "type": "CONDITIONING",
  677. "links": [
  678. 48
  679. ]
  680. }
  681. ],
  682. "properties": {
  683. "widget_ue_connectable": {},
  684. "Node name for S&R": "CLIPTextEncode"
  685. },
  686. "widgets_values": [
  687. " 色调艳丽,过曝,静态,细节模糊不清,字幕,风格,作品,画作,画面,静止,整体发灰,最差质量,低质量,JPEG压缩残留,丑陋的,残缺的,多余的手指,画得不好的手部,画得不好的脸部,畸形的,毁容的,形态畸形的肢体,手指融合,静止不动的画面,杂乱的背景,三条腿,背景人很多,倒着走"
  688. ]
  689. },
  690. {
  691. "id": 84,
  692. "type": "UnetLoaderGGUF",
  693. "pos": [
  694. 725.1625366210938,
  695. -460.2761535644531
  696. ],
  697. "size": [
  698. 270,
  699. 58
  700. ],
  701. "flags": {},
  702. "order": 4,
  703. "mode": 0,
  704. "inputs": [],
  705. "outputs": [
  706. {
  707. "name": "MODEL",
  708. "type": "MODEL",
  709. "links": [
  710. 57
  711. ]
  712. }
  713. ],
  714. "properties": {
  715. "widget_ue_connectable": {},
  716. "Node name for S&R": "UnetLoaderGGUF"
  717. },
  718. "widgets_values": [
  719. "wan2.2_t2v_low_noise_14B_Q4_K_M.gguf"
  720. ]
  721. },
  722. {
  723. "id": 99,
  724. "type": "PathchSageAttentionKJ",
  725. "pos": [
  726. 1039.8890380859375,
  727. -469.2546691894531
  728. ],
  729. "size": [
  730. 270,
  731. 58
  732. ],
  733. "flags": {},
  734. "order": 12,
  735. "mode": 0,
  736. "inputs": [
  737. {
  738. "name": "model",
  739. "type": "MODEL",
  740. "link": 57
  741. }
  742. ],
  743. "outputs": [
  744. {
  745. "name": "MODEL",
  746. "type": "MODEL",
  747. "links": [
  748. 58
  749. ]
  750. }
  751. ],
  752. "properties": {
  753. "widget_ue_connectable": {},
  754. "Node name for S&R": "PathchSageAttentionKJ"
  755. },
  756. "widgets_values": [
  757. "auto"
  758. ]
  759. },
  760. {
  761. "id": 100,
  762. "type": "LoraLoaderModelOnly",
  763. "pos": [
  764. 1358.0697021484375,
  765. -492.5220642089844
  766. ],
  767. "size": [
  768. 270,
  769. 82
  770. ],
  771. "flags": {},
  772. "order": 17,
  773. "mode": 0,
  774. "inputs": [
  775. {
  776. "name": "model",
  777. "type": "MODEL",
  778. "link": 58
  779. }
  780. ],
  781. "outputs": [
  782. {
  783. "name": "MODEL",
  784. "type": "MODEL",
  785. "links": [
  786. 59
  787. ]
  788. }
  789. ],
  790. "properties": {
  791. "widget_ue_connectable": {},
  792. "Node name for S&R": "LoraLoaderModelOnly"
  793. },
  794. "widgets_values": [
  795. "wan/Wan2.1_T2V_14B_FusionX_LoRA.safetensors",
  796. 0.4000000000000001
  797. ]
  798. },
  799. {
  800. "id": 101,
  801. "type": "LoraLoaderModelOnly",
  802. "pos": [
  803. 1362.7198486328125,
  804. -349.1102600097656
  805. ],
  806. "size": [
  807. 270,
  808. 82
  809. ],
  810. "flags": {},
  811. "order": 20,
  812. "mode": 0,
  813. "inputs": [
  814. {
  815. "name": "model",
  816. "type": "MODEL",
  817. "link": 59
  818. }
  819. ],
  820. "outputs": [
  821. {
  822. "name": "MODEL",
  823. "type": "MODEL",
  824. "links": [
  825. 60
  826. ]
  827. }
  828. ],
  829. "properties": {
  830. "widget_ue_connectable": {},
  831. "Node name for S&R": "LoraLoaderModelOnly"
  832. },
  833. "widgets_values": [
  834. "wan/Wan21_T2V_14B_lightx2v_cfg_step_distill_lora_rank32.safetensors",
  835. 0.4000000000000001
  836. ]
  837. },
  838. {
  839. "id": 103,
  840. "type": "LoraLoaderModelOnly",
  841. "pos": [
  842. 1670.96240234375,
  843. -480.95989990234375
  844. ],
  845. "size": [
  846. 270,
  847. 82
  848. ],
  849. "flags": {},
  850. "order": 23,
  851. "mode": 0,
  852. "inputs": [
  853. {
  854. "name": "model",
  855. "type": "MODEL",
  856. "link": 60
  857. }
  858. ],
  859. "outputs": [
  860. {
  861. "name": "MODEL",
  862. "type": "MODEL",
  863. "links": [
  864. 46
  865. ]
  866. }
  867. ],
  868. "properties": {
  869. "widget_ue_connectable": {},
  870. "Node name for S&R": "LoraLoaderModelOnly"
  871. },
  872. "widgets_values": [
  873. "wan/realism/WAN2.2-LowNoise_SmartphoneSnapshotPhotoReality_v3_by-AI_Characters.safetensors",
  874. 0.30000000000000004
  875. ]
  876. },
  877. {
  878. "id": 98,
  879. "type": "LatentUpscaleBy",
  880. "pos": [
  881. 2098.93310546875,
  882. 476.1046142578125
  883. ],
  884. "size": [
  885. 270,
  886. 82
  887. ],
  888. "flags": {},
  889. "order": 22,
  890. "mode": 0,
  891. "inputs": [
  892. {
  893. "name": "samples",
  894. "type": "LATENT",
  895. "link": 56
  896. }
  897. ],
  898. "outputs": [
  899. {
  900. "name": "LATENT",
  901. "type": "LATENT",
  902. "links": [
  903. 49
  904. ]
  905. }
  906. ],
  907. "properties": {
  908. "widget_ue_connectable": {},
  909. "Node name for S&R": "LatentUpscaleBy"
  910. },
  911. "widgets_values": [
  912. "nearest-exact",
  913. 2.0000000000000004
  914. ]
  915. },
  916. {
  917. "id": 105,
  918. "type": "JWStringConcat",
  919. "pos": [
  920. 1733.1439208984375,
  921. -22.232358932495117
  922. ],
  923. "size": [
  924. 270,
  925. 82
  926. ],
  927. "flags": {},
  928. "order": 15,
  929. "mode": 0,
  930. "inputs": [
  931. {
  932. "name": "a",
  933. "type": "STRING",
  934. "widget": {
  935. "name": "a"
  936. },
  937. "link": 61
  938. },
  939. {
  940. "name": "b",
  941. "type": "STRING",
  942. "widget": {
  943. "name": "b"
  944. },
  945. "link": 62
  946. }
  947. ],
  948. "outputs": [
  949. {
  950. "name": "STRING",
  951. "type": "STRING",
  952. "links": [
  953. 50
  954. ]
  955. }
  956. ],
  957. "properties": {
  958. "widget_ue_connectable": {},
  959. "Node name for S&R": "JWStringConcat"
  960. },
  961. "widgets_values": [
  962. "",
  963. ""
  964. ]
  965. },
  966. {
  967. "id": 97,
  968. "type": "CLIPLoaderGGUF",
  969. "pos": [
  970. 708.9107666015625,
  971. -335.6412048339844
  972. ],
  973. "size": [
  974. 270,
  975. 82
  976. ],
  977. "flags": {},
  978. "order": 5,
  979. "mode": 0,
  980. "inputs": [],
  981. "outputs": [
  982. {
  983. "name": "CLIP",
  984. "type": "CLIP",
  985. "links": [
  986. 51,
  987. 54
  988. ]
  989. }
  990. ],
  991. "properties": {
  992. "widget_ue_connectable": {},
  993. "Node name for S&R": "CLIPLoaderGGUF"
  994. },
  995. "widgets_values": [
  996. "umt5xxl-encoder-q4_0.gguf",
  997. "wan"
  998. ]
  999. },
  1000. {
  1001. "id": 106,
  1002. "type": "Textbox",
  1003. "pos": [
  1004. -246.62086486816406,
  1005. -149.1493682861328
  1006. ],
  1007. "size": [
  1008. 340.40911865234375,
  1009. 194.5826416015625
  1010. ],
  1011. "flags": {},
  1012. "order": 6,
  1013. "mode": 0,
  1014. "inputs": [
  1015. {
  1016. "name": "passthrough",
  1017. "shape": 7,
  1018. "type": "STRING",
  1019. "link": null
  1020. }
  1021. ],
  1022. "outputs": [
  1023. {
  1024. "name": "text",
  1025. "type": "STRING",
  1026. "links": [
  1027. 36,
  1028. 61
  1029. ]
  1030. }
  1031. ],
  1032. "properties": {
  1033. "widget_ue_connectable": {},
  1034. "Node name for S&R": "Textbox"
  1035. },
  1036. "widgets_values": [
  1037. "{\n \"image_id\": \"portrait_blue_veil_001\",\n \"metadata\": {\n \"title\": \"Portrait of Woman in Blue Veil\",\n \"description\": \"A dramatic low-key portrait of a young woman with long dark hair, draped in a sheer blue fabric, set against a deep blue background. The fabric softly wraps her head and shoulders, creating a mysterious and ethereal effect. Her expression is calm and intense, gazing directly at the viewer.\",\n \"date_captured\": \"unknown\",\n \"location\": \"studio\",\n \"orientation\": \"portrait\",\n \"aspect_ratio\": \"4:5\",\n \"color_palette\": [\"#1C2B3A\", \"#4B6A88\", \"#2E4A66\", \"#0F1A24\", \"#7F92A0\"],\n \"dominant_color\": \"#1C2B3A\",\n \"lighting\": {\n \"style\": \"low-key\",\n \"light_direction\": \"left-back\",\n \"color_temperature\": \"cool\",\n \"shadows\": \"soft shadows on right side of face\",\n \"highlight_area\": \"left cheek and fabric\"\n }\n },\n \"subject\": {\n \"type\": \"human\",\n \"age_range\": \"young adult\",\n \"gender\": \"female\",\n \"ethnicity\": \"unspecified\",\n \"features\": {\n \"hair_color\": \"dark brown\",\n \"eye_color\": \"brown\",\n \"skin_tone\": \"light to medium\",\n \"expression\": \"neutral to serious\",\n \"pose\": \"frontal, shoulders turned slightly right\"\n },\n \"clothing\": {\n \"type\": \"veil or sheer fabric\",\n \"color\": \"blue\",\n \"coverage\": \"head and shoulders\"\n }\n },\n \"style\": {\n \"photographic_style\": \"portraiture\",\n \"mood\": \"mysterious, introspective, ethereal\",\n \"visual_elements\": [\n \"monochromatic palette\",\n \"soft focus in shadows\",\n \"textured fabric interplay with light\"\n ]\n },\n \"technical\": {\n \"camera_settings\": {\n \"f_stop\": \"f/2.8 (approx)\",\n \"shutter_speed\": \"1/125s (approx)\",\n \"iso\": \"low ISO for minimal grain\",\n \"lens\": \"50mm or 85mm prime (approx)\"\n },\n \"post_processing\": {\n \"color_grading\": \"cool tones with high contrast\",\n \"retouching\": \"minimal\",\n \"vignette\": \"slight\"\n }\n },\n \"tags\": [\n \"portrait photography\",\n \"blue tones\",\n \"female subject\",\n \"ethereal aesthetic\",\n \"moody lighting\",\n \"veil\",\n \"studio photo\",\n \"direct eye contact\",\n \"low-key lighting\",\n \"mysterious\"\n ],\n \"usage_rights\": {\n \"copyright_owner\": \"unknown\",\n \"license\": \"unspecified\",\n \"model_release\": \"not confirmed\"\n }\n}\n"
  1038. ]
  1039. },
  1040. {
  1041. "id": 104,
  1042. "type": "Textbox",
  1043. "pos": [
  1044. -253.27684020996094,
  1045. -422.0785827636719
  1046. ],
  1047. "size": [
  1048. 400,
  1049. 200
  1050. ],
  1051. "flags": {},
  1052. "order": 7,
  1053. "mode": 0,
  1054. "inputs": [
  1055. {
  1056. "name": "passthrough",
  1057. "shape": 7,
  1058. "type": "STRING",
  1059. "link": null
  1060. }
  1061. ],
  1062. "outputs": [
  1063. {
  1064. "name": "text",
  1065. "type": "STRING",
  1066. "links": [
  1067. 62
  1068. ]
  1069. }
  1070. ],
  1071. "properties": {
  1072. "widget_ue_connectable": {},
  1073. "Node name for S&R": "Textbox"
  1074. },
  1075. "widgets_values": [
  1076. "early 2010s snapshot photo captured with a phone and uploaded to facebook, featuring dynamic natural lighting, and a neutral white color balance with washed out colors"
  1077. ]
  1078. },
  1079. {
  1080. "id": 60,
  1081. "type": "SaveImage",
  1082. "pos": [
  1083. 1451.554443359375,
  1084. 649.984130859375
  1085. ],
  1086. "size": [
  1087. 263.4991760253906,
  1088. 282
  1089. ],
  1090. "flags": {
  1091. "collapsed": true
  1092. },
  1093. "order": 24,
  1094. "mode": 0,
  1095. "inputs": [
  1096. {
  1097. "name": "images",
  1098. "type": "IMAGE",
  1099. "link": 43
  1100. }
  1101. ],
  1102. "outputs": [],
  1103. "properties": {
  1104. "widget_ue_connectable": {},
  1105. "Node name for S&R": "SaveImage"
  1106. },
  1107. "widgets_values": [
  1108. " 8-7/qwen.pipeline-v0.0-RC0/img"
  1109. ]
  1110. },
  1111. {
  1112. "id": 95,
  1113. "type": "SaveImage",
  1114. "pos": [
  1115. 1634.560546875,
  1116. 632.31298828125
  1117. ],
  1118. "size": [
  1119. 270,
  1120. 270
  1121. ],
  1122. "flags": {},
  1123. "order": 27,
  1124. "mode": 0,
  1125. "inputs": [
  1126. {
  1127. "name": "images",
  1128. "type": "IMAGE",
  1129. "link": 55
  1130. }
  1131. ],
  1132. "outputs": [],
  1133. "properties": {
  1134. "widget_ue_connectable": {},
  1135. "Node name for S&R": "SaveImage"
  1136. },
  1137. "widgets_values": [
  1138. " 8-7/qwen.pipeline-v0.0-RC0/upscaled"
  1139. ]
  1140. },
  1141. {
  1142. "id": 90,
  1143. "type": "VAELoader",
  1144. "pos": [
  1145. 2571.396484375,
  1146. 433.063232421875
  1147. ],
  1148. "size": [
  1149. 270,
  1150. 58
  1151. ],
  1152. "flags": {},
  1153. "order": 8,
  1154. "mode": 0,
  1155. "inputs": [],
  1156. "outputs": [
  1157. {
  1158. "name": "VAE",
  1159. "type": "VAE",
  1160. "links": [
  1161. 53
  1162. ]
  1163. }
  1164. ],
  1165. "properties": {
  1166. "widget_ue_connectable": {},
  1167. "Node name for S&R": "VAELoader"
  1168. },
  1169. "widgets_values": [
  1170. "Wan2_1_VAE_bf16.safetensors"
  1171. ]
  1172. }
  1173. ],
  1174. "links": [
  1175. [
  1176. 32,
  1177. 70,
  1178. 0,
  1179. 3,
  1180. 0,
  1181. "MODEL"
  1182. ],
  1183. [
  1184. 33,
  1185. 6,
  1186. 0,
  1187. 3,
  1188. 1,
  1189. "CONDITIONING"
  1190. ],
  1191. [
  1192. 34,
  1193. 7,
  1194. 0,
  1195. 3,
  1196. 2,
  1197. "CONDITIONING"
  1198. ],
  1199. [
  1200. 35,
  1201. 58,
  1202. 0,
  1203. 3,
  1204. 3,
  1205. "LATENT"
  1206. ],
  1207. [
  1208. 36,
  1209. 106,
  1210. 0,
  1211. 6,
  1212. 1,
  1213. "STRING"
  1214. ],
  1215. [
  1216. 37,
  1217. 38,
  1218. 0,
  1219. 6,
  1220. 0,
  1221. "CLIP"
  1222. ],
  1223. [
  1224. 38,
  1225. 38,
  1226. 0,
  1227. 7,
  1228. 0,
  1229. "CLIP"
  1230. ],
  1231. [
  1232. 39,
  1233. 3,
  1234. 0,
  1235. 8,
  1236. 0,
  1237. "LATENT"
  1238. ],
  1239. [
  1240. 40,
  1241. 39,
  1242. 0,
  1243. 8,
  1244. 1,
  1245. "VAE"
  1246. ],
  1247. [
  1248. 41,
  1249. 83,
  1250. 0,
  1251. 58,
  1252. 0,
  1253. "INT"
  1254. ],
  1255. [
  1256. 42,
  1257. 83,
  1258. 1,
  1259. 58,
  1260. 1,
  1261. "INT"
  1262. ],
  1263. [
  1264. 43,
  1265. 8,
  1266. 0,
  1267. 60,
  1268. 0,
  1269. "IMAGE"
  1270. ],
  1271. [
  1272. 44,
  1273. 69,
  1274. 0,
  1275. 66,
  1276. 0,
  1277. "MODEL"
  1278. ],
  1279. [
  1280. 45,
  1281. 66,
  1282. 0,
  1283. 70,
  1284. 0,
  1285. "MODEL"
  1286. ],
  1287. [
  1288. 46,
  1289. 103,
  1290. 0,
  1291. 86,
  1292. 0,
  1293. "MODEL"
  1294. ],
  1295. [
  1296. 47,
  1297. 88,
  1298. 0,
  1299. 86,
  1300. 1,
  1301. "CONDITIONING"
  1302. ],
  1303. [
  1304. 48,
  1305. 94,
  1306. 0,
  1307. 86,
  1308. 2,
  1309. "CONDITIONING"
  1310. ],
  1311. [
  1312. 49,
  1313. 98,
  1314. 0,
  1315. 86,
  1316. 3,
  1317. "LATENT"
  1318. ],
  1319. [
  1320. 50,
  1321. 105,
  1322. 0,
  1323. 88,
  1324. 1,
  1325. "STRING"
  1326. ],
  1327. [
  1328. 51,
  1329. 97,
  1330. 0,
  1331. 88,
  1332. 0,
  1333. "CLIP"
  1334. ],
  1335. [
  1336. 52,
  1337. 86,
  1338. 0,
  1339. 92,
  1340. 0,
  1341. "LATENT"
  1342. ],
  1343. [
  1344. 53,
  1345. 90,
  1346. 0,
  1347. 92,
  1348. 1,
  1349. "VAE"
  1350. ],
  1351. [
  1352. 54,
  1353. 97,
  1354. 0,
  1355. 94,
  1356. 0,
  1357. "CLIP"
  1358. ],
  1359. [
  1360. 55,
  1361. 92,
  1362. 0,
  1363. 95,
  1364. 0,
  1365. "IMAGE"
  1366. ],
  1367. [
  1368. 56,
  1369. 3,
  1370. 0,
  1371. 98,
  1372. 0,
  1373. "LATENT"
  1374. ],
  1375. [
  1376. 57,
  1377. 84,
  1378. 0,
  1379. 99,
  1380. 0,
  1381. "MODEL"
  1382. ],
  1383. [
  1384. 58,
  1385. 99,
  1386. 0,
  1387. 100,
  1388. 0,
  1389. "MODEL"
  1390. ],
  1391. [
  1392. 59,
  1393. 100,
  1394. 0,
  1395. 101,
  1396. 0,
  1397. "MODEL"
  1398. ],
  1399. [
  1400. 60,
  1401. 101,
  1402. 0,
  1403. 103,
  1404. 0,
  1405. "MODEL"
  1406. ],
  1407. [
  1408. 61,
  1409. 106,
  1410. 0,
  1411. 105,
  1412. 0,
  1413. "STRING"
  1414. ],
  1415. [
  1416. 62,
  1417. 104,
  1418. 0,
  1419. 105,
  1420. 1,
  1421. "STRING"
  1422. ]
  1423. ],
  1424. "groups": [],
  1425. "config": {},
  1426. "extra": {
  1427. "ue_links": [],
  1428. "ds": {
  1429. "scale": 0.9229599817706593,
  1430. "offset": [
  1431. 1332.021701789349,
  1432. 645.6990862086404
  1433. ]
  1434. },
  1435. "frontendVersion": "1.23.4",
  1436. "VHS_latentpreview": false,
  1437. "VHS_latentpreviewrate": 0,
  1438. "VHS_MetadataImage": true,
  1439. "VHS_KeepIntermediate": true
  1440. },
  1441. "version": 0.4
  1442. }
Advertisement
Add Comment
Please, Sign In to add comment