{ "id": 1394407, "modelId": 1237290, "name": "v1.0", "createdAt": "2025-02-10T01:08:15.801Z", "updatedAt": "2025-02-10T01:11:46.172Z", "status": "Published", "publishedAt": "2025-02-10T01:11:46.159Z", "trainedWords": [ "carisha" ], "trainingStatus": null, "trainingDetails": null, "baseModel": "Flux.1 D", "baseModelType": null, "earlyAccessEndsAt": null, "earlyAccessConfig": null, "description": null, "uploadType": "Created", "usageControl": "Download", "air": "urn:air:flux1:lora:civitai:1237290@1394407", "stats": { "downloadCount": 270, "ratingCount": 0, "rating": 0, "thumbsUpCount": 26 }, "model": { "name": "Carisha - Slovak adult model [FLUX]", "type": "LORA", "nsfw": false, "poi": true }, "files": [ { "id": 1296818, "sizeKB": 32875.20703125, "name": "Carisha_rank8_bf16-step03000.safetensors", "type": "Model", "pickleScanResult": "Success", "pickleScanMessage": "No Pickle imports", "virusScanResult": "Success", "virusScanMessage": null, "scannedAt": "2025-02-10T01:11:07.975Z", "metadata": { "format": "SafeTensor", "size": null, "fp": null }, "hashes": { "AutoV1": "E7CC590A", "AutoV2": "005911C6FF", "SHA256": "005911C6FF085C4A3E9CABB29C1DC87D7E59388B845F4D8A0F0DD77EFC6B1FE0", "CRC32": "29F792DD", "BLAKE3": "87A8E363824246D1C1341363EBD7E6ED3B9653452AC6657DB174B6FCC0347C98", "AutoV3": "8A28DD2DE012" }, "primary": true, "downloadUrl": "https://civitai.com/api/download/models/1394407" } ], "images": [ { "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/3a7ac832-80fd-409d-9740-5a63fe74309e/width=832/56868828.jpeg", "nsfwLevel": 1, "width": 832, "height": 1216, "hash": "UIECK;8}5*Mz*FRRrsM|XNxaVuR+otozV]nk", "type": "image", "metadata": { "hash": "UIECK;8}5*Mz*FRRrsM|XNxaVuR+otozV]nk", "size": 1506571, "width": 832, "height": 1216 }, "minor": false, "poi": true, "meta": { "seed": 1124074905919508, "vaes": [ "ae.safetensors" ], "comfy": "{\"prompt\": {\"3\": {\"inputs\": {\"seed\": [\"33\", 0], \"steps\": 25, \"cfg\": 1.0, \"sampler_name\": \"euler\", \"scheduler\": \"normal\", \"denoise\": 1.0, \"model\": [\"25\", 0], \"positive\": [\"6\", 0], \"negative\": [\"7\", 0], \"latent_image\": [\"34\", 0]}, \"class_type\": \"KSampler\", \"_meta\": {\"title\": \"KSampler\"}}, \"6\": {\"inputs\": {\"text\": \"closeup photo of carisha, a woman with blonde hair, brown eyes, in the forest. She wears a salmon t-shirt. The leaves of the forest create dappled light on her. She is hiding partially behind a narrow tree. She has a happy expression, smiling mischievously at the viewer\", \"clip\": [\"25\", 1]}, \"class_type\": \"CLIPTextEncode\", \"_meta\": {\"title\": \"CLIP Text Encode (Positive)\"}}, \"7\": {\"inputs\": {\"text\": \"\", \"clip\": [\"25\", 1]}, \"class_type\": \"CLIPTextEncode\", \"_meta\": {\"title\": \"CLIP Text Encode (Negative)\"}}, \"8\": {\"inputs\": {\"samples\": [\"3\", 0], \"vae\": [\"30\", 0]}, \"class_type\": \"VAEDecode\", \"_meta\": {\"title\": \"VAE Decode\"}}, \"9\": {\"inputs\": {\"filename_prefix\": \"2025-02-09/carisha_183202\", \"images\": [\"8\", 0]}, \"class_type\": \"SaveImage\", \"_meta\": {\"title\": \"Save Image\"}}, \"17\": {\"inputs\": {\"model_name\": \"4x_NMKD-Siax_200k.pth\", \"+\": null}, \"class_type\": \"UpscaleModelLoader\", \"_meta\": {\"title\": \"Load Upscale Model\"}}, \"25\": {\"inputs\": {\"PowerLoraLoaderHeaderWidget\": {\"type\": \"PowerLoraLoaderHeaderWidget\"}, \"lora_1\": {\"on\": true, \"lora\": \"!Myloras\\\\Carisha\\\\Carisha_rank8_bf16-step03000.safetensors\", \"strength\": 1}, \"\\u2795 Add Lora\": \"\", \"model\": [\"29\", 0], \"clip\": [\"28\", 0]}, \"class_type\": \"Power Lora Loader (rgthree)\", \"_meta\": {\"title\": \"Power Lora Loader (rgthree)\"}}, \"28\": {\"inputs\": {\"clip_name1\": \"t5xxl_fp8_e4m3fn.safetensors\", \"clip_name2\": \"clip_l.safetensors\", \"type\": \"flux\", \"+\": null}, \"class_type\": \"DualCLIPLoader\", \"_meta\": {\"title\": \"DualCLIPLoader\"}}, \"29\": {\"inputs\": {\"unet_name\": \"flux1-dev-fp8.safetensors\", \"weight_dtype\": \"fp8_e4m3fn_fast\", \"+\": null}, \"class_type\": \"UNETLoader\", \"_meta\": {\"title\": \"Load Diffusion Model\"}}, \"30\": {\"inputs\": {\"vae_name\": \"ae.safetensors\", \"+\": null}, \"class_type\": \"VAELoader\", \"_meta\": {\"title\": \"Load VAE\"}}, \"33\": {\"inputs\": {\"seed\": 1124074905919508}, \"class_type\": \"Seed (rgthree)\", \"_meta\": {\"title\": \"Seed (rgthree)\"}, \"is_changed\": [1124074905919508]}, \"34\": {\"inputs\": {\"resolution\": \"832x1216 (0.68)\", \"batch_size\": 1, \"width_override\": 0, \"height_override\": 0}, \"class_type\": \"SDXLEmptyLatentSizePicker+\", \"_meta\": {\"title\": \"\\ud83d\\udd27 Empty Latent Size Picker\"}}}, \"workflow\": {\"last_node_id\": 39, \"last_link_id\": 64, \"nodes\": [{\"id\": 8, \"type\": \"VAEDecode\", \"pos\": [1211.7613525390625, 207.52772521972656], \"size\": [210, 46], \"flags\": {\"collapsed\": true}, \"order\": 11, \"mode\": 0, \"inputs\": [{\"name\": \"samples\", \"type\": \"LATENT\", \"link\": 7}, {\"name\": \"vae\", \"type\": \"VAE\", \"link\": 54}], \"outputs\": [{\"name\": \"IMAGE\", \"type\": \"IMAGE\", \"links\": [9, 24], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"VAEDecode\"}, \"widgets_values\": []}, {\"id\": 18, \"type\": \"SaveImage\", \"pos\": [2197, 223.68397521972656], \"size\": [1249.5550537109375, 1464.7823486328125], \"flags\": {}, \"order\": 14, \"mode\": 4, \"inputs\": [{\"name\": \"images\", \"type\": \"IMAGE\", \"link\": 30}], \"outputs\": [], \"properties\": {}, \"widgets_values\": [\"%date:yyyy-MM-dd%/image_%KSampler.seed%\"]}, {\"id\": 16, \"type\": \"UltimateSDUpscale\", \"pos\": [1868.0947265625, 227], \"size\": [315, 826], \"flags\": {}, \"order\": 13, \"mode\": 4, \"inputs\": [{\"name\": \"image\", \"type\": \"IMAGE\", \"link\": 24}, {\"name\": \"model\", \"type\": \"MODEL\", \"link\": 38}, {\"name\": \"positive\", \"type\": \"CONDITIONING\", \"link\": 27}, {\"name\": \"negative\", \"type\": \"CONDITIONING\", \"link\": 31}, {\"name\": \"vae\", \"type\": \"VAE\", \"link\": 55}, {\"name\": \"upscale_model\", \"type\": \"UPSCALE_MODEL\", \"link\": 25}, {\"name\": \"tile_width\", \"type\": \"INT\", \"link\": 50, \"widget\": {\"name\": \"tile_width\"}}, {\"name\": \"tile_height\", \"type\": \"INT\", \"link\": 51, \"widget\": {\"name\": \"tile_height\"}}, {\"name\": \"seed\", \"type\": \"INT\", \"link\": 48, \"widget\": {\"name\": \"seed\"}}], \"outputs\": [{\"name\": \"IMAGE\", \"type\": \"IMAGE\", \"links\": [30], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"UltimateSDUpscale\"}, \"widgets_values\": [4, 451481000293243, \"randomize\", 2, 9, \"euler\", \"normal\", 0.15, \"Linear\", 768, 1024, 8, 32, \"None\", 1, 64, 8, 16, true, false]}, {\"id\": 17, \"type\": \"UpscaleModelLoader\", \"pos\": [1628.652587890625, 202.11581420898438], \"size\": [228.74729919433594, 82], \"flags\": {\"collapsed\": true}, \"order\": 0, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"UPSCALE_MODEL\", \"type\": \"UPSCALE_MODEL\", \"links\": [25]}], \"properties\": {\"Node name for S&R\": \"UpscaleModelLoader\"}, \"widgets_values\": [\"4x_NMKD-Siax_200k.pth\", null]}, {\"id\": 34, \"type\": \"SDXLEmptyLatentSizePicker+\", \"pos\": [275.15325927734375, 800.7346801757812], \"size\": [259.20001220703125, 170], \"flags\": {}, \"order\": 1, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [49], \"slot_index\": 0}, {\"name\": \"width\", \"type\": \"INT\", \"links\": [50], \"slot_index\": 1}, {\"name\": \"height\", \"type\": \"INT\", \"links\": [51], \"slot_index\": 2}], \"properties\": {\"Node name for S&R\": \"SDXLEmptyLatentSizePicker+\"}, \"widgets_values\": [\"832x1216 (0.68)\", 1, 0, 0]}, {\"id\": 33, \"type\": \"Seed (rgthree)\", \"pos\": [549.7734985351562, 806.7191772460938], \"size\": [244.56271362304688, 159.79544067382812], \"flags\": {}, \"order\": 2, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"SEED\", \"type\": \"INT\", \"links\": [47, 48], \"slot_index\": 0, \"shape\": 3, \"dir\": 4}], \"properties\": {}, \"widgets_values\": [1124074905919508, null, null, null]}, {\"id\": 38, \"type\": \"Reroute\", \"pos\": [944.2876586914062, 218.80259704589844], \"size\": [75, 26], \"flags\": {}, \"order\": 6, \"mode\": 0, \"inputs\": [{\"name\": \"\", \"type\": \"*\", \"link\": 61}], \"outputs\": [{\"name\": \"\", \"type\": \"VAE\", \"links\": [54, 55], \"slot_index\": 0}], \"properties\": {\"showOutputText\": false, \"horizontal\": false}}, {\"id\": 3, \"type\": \"KSampler\", \"pos\": [844, 232], \"size\": [349.8854064941406, 747.8931274414062], \"flags\": {}, \"order\": 10, \"mode\": 0, \"inputs\": [{\"name\": \"model\", \"type\": \"MODEL\", \"link\": 37}, {\"name\": \"positive\", \"type\": \"CONDITIONING\", \"link\": 4}, {\"name\": \"negative\", \"type\": \"CONDITIONING\", \"link\": 21}, {\"name\": \"latent_image\", \"type\": \"LATENT\", \"link\": 49}, {\"name\": \"seed\", \"type\": \"INT\", \"link\": 47, \"widget\": {\"name\": \"seed\"}}], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [7], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"KSampler\"}, \"widgets_values\": [1049717502302660, \"randomize\", 25, 1, \"euler\", \"normal\", 1]}, {\"id\": 30, \"type\": \"VAELoader\", \"pos\": [-522.5015869140625, 230.6754608154297], \"size\": [318.4554138183594, 84.38363647460938], \"flags\": {\"collapsed\": true}, \"order\": 3, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"VAE\", \"type\": \"VAE\", \"links\": [61], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"VAELoader\"}, \"widgets_values\": [\"ae.safetensors\", null]}, {\"id\": 29, \"type\": \"UNETLoader\", \"pos\": [-545.8380126953125, 272.9270935058594], \"size\": [343.6680603027344, 106], \"flags\": {\"collapsed\": true}, \"order\": 4, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"MODEL\", \"type\": \"MODEL\", \"links\": [64], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"UNETLoader\"}, \"widgets_values\": [\"flux1-dev-fp8.safetensors\", \"fp8_e4m3fn_fast\", null]}, {\"id\": 28, \"type\": \"DualCLIPLoader\", \"pos\": [-531.428955078125, 319.86724853515625], \"size\": [347.35186767578125, 130], \"flags\": {\"collapsed\": true}, \"order\": 5, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [62], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"DualCLIPLoader\"}, \"widgets_values\": [\"t5xxl_fp8_e4m3fn.safetensors\", \"clip_l.safetensors\", \"flux\", null]}, {\"id\": 25, \"type\": \"Power Lora Loader (rgthree)\", \"pos\": [-313.511962890625, 235.59800720214844], \"size\": [645.4066772460938, 233.36923217773438], \"flags\": {}, \"order\": 7, \"mode\": 0, \"inputs\": [{\"name\": \"model\", \"type\": \"MODEL\", \"link\": 64, \"dir\": 3}, {\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 62, \"dir\": 3}], \"outputs\": [{\"name\": \"MODEL\", \"type\": \"MODEL\", \"links\": [37, 38], \"slot_index\": 0, \"shape\": 3, \"dir\": 4}, {\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [35, 36], \"slot_index\": 1, \"shape\": 3, \"dir\": 4}], \"properties\": {\"Show Strengths\": \"Single Strength\"}, \"widgets_values\": [null, {\"type\": \"PowerLoraLoaderHeaderWidget\"}, {\"on\": true, \"lora\": \"!Myloras\\\\Carisha\\\\Carisha_rank8_bf16-step03000.safetensors\", \"strength\": 1, \"strengthTwo\": null}, null, \"\"]}, {\"id\": 9, \"type\": \"SaveImage\", \"pos\": [1215, 192.27305603027344], \"size\": [629.7261352539062, 859.7188720703125], \"flags\": {}, \"order\": 12, \"mode\": 0, \"inputs\": [{\"name\": \"images\", \"type\": \"IMAGE\", \"link\": 9}], \"outputs\": [], \"properties\": {}, \"widgets_values\": [\"%date:yyyy-MM-dd%/carisha_%date:hhmmss%\"]}, {\"id\": 7, \"type\": \"CLIPTextEncode\", \"pos\": [536.8186645507812, 755.2752075195312], \"size\": [442.9576416015625, 94.61177062988281], \"flags\": {\"collapsed\": true}, \"order\": 9, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 36}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [21, 31], \"slot_index\": 0}], \"title\": \"CLIP Text Encode (Negative)\", \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"\"], \"color\": \"#322\", \"bgcolor\": \"#533\"}, {\"id\": 6, \"type\": \"CLIPTextEncode\", \"pos\": [343.7825012207031, 215.52549743652344], \"size\": [482.2969665527344, 494.79656982421875], \"flags\": {}, \"order\": 8, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 35}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [4, 27], \"slot_index\": 0}], \"title\": \"CLIP Text Encode (Positive)\", \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"closeup photo of carisha, a woman with blonde hair, brown eyes, in the forest. She wears a salmon t-shirt. The leaves of the forest create dappled light on her. She is hiding partially behind a narrow tree. She has a happy expression, smiling mischievously at the viewer\"], \"color\": \"#232\", \"bgcolor\": \"#353\"}], \"links\": [[4, 6, 0, 3, 1, \"CONDITIONING\"], [7, 3, 0, 8, 0, \"LATENT\"], [9, 8, 0, 9, 0, \"IMAGE\"], [21, 7, 0, 3, 2, \"CONDITIONING\"], [24, 8, 0, 16, 0, \"IMAGE\"], [25, 17, 0, 16, 5, \"UPSCALE_MODEL\"], [27, 6, 0, 16, 2, \"CONDITIONING\"], [30, 16, 0, 18, 0, \"IMAGE\"], [31, 7, 0, 16, 3, \"CONDITIONING\"], [35, 25, 1, 6, 0, \"CLIP\"], [36, 25, 1, 7, 0, \"CLIP\"], [37, 25, 0, 3, 0, \"MODEL\"], [38, 25, 0, 16, 1, \"MODEL\"], [47, 33, 0, 3, 4, \"INT\"], [48, 33, 0, 16, 8, \"INT\"], [49, 34, 0, 3, 3, \"LATENT\"], [50, 34, 1, 16, 6, \"INT\"], [51, 34, 2, 16, 7, \"INT\"], [54, 38, 0, 8, 1, \"VAE\"], [55, 38, 0, 16, 4, \"VAE\"], [61, 30, 0, 38, 0, \"*\"], [62, 28, 0, 25, 1, \"CLIP\"], [64, 29, 0, 25, 0, \"MODEL\"]], \"groups\": [], \"config\": {}, \"extra\": {\"ds\": {\"scale\": 0.8390545288824038, \"offset\": [66.01527400302358, -164.71021253714747]}, \"VHS_latentpreview\": false, \"VHS_latentpreviewrate\": 0, \"ue_links\": []}, \"version\": 0.4, \"widget_idx_map\": {\"3\": {\"sampler_name\": 4, \"scheduler\": 5}, \"16\": {\"sampler_name\": 5, \"scheduler\": 6}, \"33\": {\"seed\": 0}}}}", "steps": 25, "models": [], "prompt": "closeup photo of carisha, a woman with blonde hair, brown eyes, in the forest. She wears a salmon t-shirt. The leaves of the forest create dappled light on her. She is hiding partially behind a narrow tree. She has a happy expression, smiling mischievously at the viewer", "denoise": 1, "sampler": "Euler", "cfgScale": 1, "modelIds": [], "scheduler": "normal", "upscalers": [ "4x_NMKD-Siax_200k.pth" ], "versionIds": [], "controlNets": [], "additionalResources": [] }, "availability": "Public", "hasMeta": true, "hasPositivePrompt": true, "onSite": false, "remixOfId": null }, { "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/9a518bdc-56fb-49b9-bea4-959384b02bba/width=832/56868829.jpeg", "nsfwLevel": 1, "width": 832, "height": 1216, "hash": "U8E_:M?^ovIo?sb^D%M{OkkD-o-:F{-pRQM_", "type": "image", "metadata": { "hash": "U8E_:M?^ovIo?sb^D%M{OkkD-o-:F{-pRQM_", "size": 1451317, "width": 832, "height": 1216 }, "minor": false, "poi": true, "meta": { "seed": 921815968279263, "vaes": [ "ae.safetensors" ], "comfy": "{\"prompt\": {\"3\": {\"inputs\": {\"seed\": [\"33\", 0], \"steps\": 25, \"cfg\": 1.0, \"sampler_name\": \"euler\", \"scheduler\": \"normal\", \"denoise\": 1.0, \"model\": [\"25\", 0], \"positive\": [\"6\", 0], \"negative\": [\"7\", 0], \"latent_image\": [\"34\", 0]}, \"class_type\": \"KSampler\", \"_meta\": {\"title\": \"KSampler\"}}, \"6\": {\"inputs\": {\"text\": \"photo of carisha, a woman with blonde hair, brown eyes, with striking features, standing proudly in front of a traditional Bavarian backdrop, her long braid cascading down her back like a waterfall. She wears a intricately detailed dirndl attire, complete with delicate lace and embroidery, perfectly capturing the essence of Oktoberfest. Side view. The camera captures her from a 45-degree angle, emphasizing her slender neck and shoulders as she gazes directly at the viewer. Soft, warm light from a nearby lantern casts a flattering glow on her skin, accentuating her high cheekbones and full lips. In the background, the real Oktoberfest festivities unfold: revelers in traditional attire dance and sing around tables laden with steins of beer, while others play lively tunes on accordions and brass instruments. The atmosphere is electric, with vibrant colors and sharp lines that create a sense of depth and dimensionality. As the camera lingers, it becomes clear that this woman is not just any festival-goer, she's the star of the show. Her dirndl attire is immaculately detailed, from the intricate patterns on her apron to the delicate flowers in her hair. Every stitch, every thread, every detail is meticulously rendered, creating a photorealistic masterpiece. In a stunning double exposure effect, the woman's image is superimposed over a blurred background of revelers and festivities, creating an otherworldly sense of presence. The result is a truly cinematic scene that transports the viewer to the heart of Oktoberfest. This image is a true masterpiece, showcasing the artist's skill and attention to detail in capturing the essence of Oktoberfest. With its ultra-sharp lines, intricate details, and photorealistic quality, it's sure to be an award-winning work that leaves a lasting impression on all who see it.\", \"clip\": [\"25\", 1]}, \"class_type\": \"CLIPTextEncode\", \"_meta\": {\"title\": \"CLIP Text Encode (Positive)\"}}, \"7\": {\"inputs\": {\"text\": \"\", \"clip\": [\"25\", 1]}, \"class_type\": \"CLIPTextEncode\", \"_meta\": {\"title\": \"CLIP Text Encode (Negative)\"}}, \"8\": {\"inputs\": {\"samples\": [\"3\", 0], \"vae\": [\"30\", 0]}, \"class_type\": \"VAEDecode\", \"_meta\": {\"title\": \"VAE Decode\"}}, \"9\": {\"inputs\": {\"filename_prefix\": \"2025-02-09/carisha_185405\", \"images\": [\"8\", 0]}, \"class_type\": \"SaveImage\", \"_meta\": {\"title\": \"Save Image\"}}, \"17\": {\"inputs\": {\"model_name\": \"4x_NMKD-Siax_200k.pth\", \"+\": null}, \"class_type\": \"UpscaleModelLoader\", \"_meta\": {\"title\": \"Load Upscale Model\"}}, \"25\": {\"inputs\": {\"PowerLoraLoaderHeaderWidget\": {\"type\": \"PowerLoraLoaderHeaderWidget\"}, \"lora_1\": {\"on\": true, \"lora\": \"!Myloras\\\\Carisha\\\\Carisha_rank8_bf16-step03000.safetensors\", \"strength\": 1.05}, \"\\u2795 Add Lora\": \"\", \"model\": [\"29\", 0], \"clip\": [\"28\", 0]}, \"class_type\": \"Power Lora Loader (rgthree)\", \"_meta\": {\"title\": \"Power Lora Loader (rgthree)\"}}, \"28\": {\"inputs\": {\"clip_name1\": \"t5xxl_fp8_e4m3fn.safetensors\", \"clip_name2\": \"clip_l.safetensors\", \"type\": \"flux\", \"+\": null}, \"class_type\": \"DualCLIPLoader\", \"_meta\": {\"title\": \"DualCLIPLoader\"}}, \"29\": {\"inputs\": {\"unet_name\": \"flux1-dev-fp8.safetensors\", \"weight_dtype\": \"fp8_e4m3fn_fast\", \"+\": null}, \"class_type\": \"UNETLoader\", \"_meta\": {\"title\": \"Load Diffusion Model\"}}, \"30\": {\"inputs\": {\"vae_name\": \"ae.safetensors\", \"+\": null}, \"class_type\": \"VAELoader\", \"_meta\": {\"title\": \"Load VAE\"}}, \"33\": {\"inputs\": {\"seed\": 921815968279263}, \"class_type\": \"Seed (rgthree)\", \"_meta\": {\"title\": \"Seed (rgthree)\"}, \"is_changed\": [921815968279263]}, \"34\": {\"inputs\": {\"resolution\": \"832x1216 (0.68)\", \"batch_size\": 1, \"width_override\": 0, \"height_override\": 0}, \"class_type\": \"SDXLEmptyLatentSizePicker+\", \"_meta\": {\"title\": \"\\ud83d\\udd27 Empty Latent Size Picker\"}}}, \"workflow\": {\"last_node_id\": 39, \"last_link_id\": 64, \"nodes\": [{\"id\": 8, \"type\": \"VAEDecode\", \"pos\": [1211.7613525390625, 207.52772521972656], \"size\": [210, 46], \"flags\": {\"collapsed\": true}, \"order\": 11, \"mode\": 0, \"inputs\": [{\"name\": \"samples\", \"type\": \"LATENT\", \"link\": 7}, {\"name\": \"vae\", \"type\": \"VAE\", \"link\": 54}], \"outputs\": [{\"name\": \"IMAGE\", \"type\": \"IMAGE\", \"links\": [9, 24], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"VAEDecode\"}, \"widgets_values\": []}, {\"id\": 18, \"type\": \"SaveImage\", \"pos\": [2197, 223.68397521972656], \"size\": [1249.5550537109375, 1464.7823486328125], \"flags\": {}, \"order\": 14, \"mode\": 4, \"inputs\": [{\"name\": \"images\", \"type\": \"IMAGE\", \"link\": 30}], \"outputs\": [], \"properties\": {}, \"widgets_values\": [\"%date:yyyy-MM-dd%/image_%KSampler.seed%\"]}, {\"id\": 16, \"type\": \"UltimateSDUpscale\", \"pos\": [1868.0947265625, 227], \"size\": [315, 826], \"flags\": {}, \"order\": 13, \"mode\": 4, \"inputs\": [{\"name\": \"image\", \"type\": \"IMAGE\", \"link\": 24}, {\"name\": \"model\", \"type\": \"MODEL\", \"link\": 38}, {\"name\": \"positive\", \"type\": \"CONDITIONING\", \"link\": 27}, {\"name\": \"negative\", \"type\": \"CONDITIONING\", \"link\": 31}, {\"name\": \"vae\", \"type\": \"VAE\", \"link\": 55}, {\"name\": \"upscale_model\", \"type\": \"UPSCALE_MODEL\", \"link\": 25}, {\"name\": \"tile_width\", \"type\": \"INT\", \"link\": 50, \"widget\": {\"name\": \"tile_width\"}}, {\"name\": \"tile_height\", \"type\": \"INT\", \"link\": 51, \"widget\": {\"name\": \"tile_height\"}}, {\"name\": \"seed\", \"type\": \"INT\", \"link\": 48, \"widget\": {\"name\": \"seed\"}}], \"outputs\": [{\"name\": \"IMAGE\", \"type\": \"IMAGE\", \"links\": [30], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"UltimateSDUpscale\"}, \"widgets_values\": [4, 70824410404959, \"randomize\", 2, 9, \"euler\", \"normal\", 0.15, \"Linear\", 768, 1024, 8, 32, \"None\", 1, 64, 8, 16, true, false]}, {\"id\": 17, \"type\": \"UpscaleModelLoader\", \"pos\": [1628.652587890625, 202.11581420898438], \"size\": [228.74729919433594, 82], \"flags\": {\"collapsed\": true}, \"order\": 0, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"UPSCALE_MODEL\", \"type\": \"UPSCALE_MODEL\", \"links\": [25]}], \"properties\": {\"Node name for S&R\": \"UpscaleModelLoader\"}, \"widgets_values\": [\"4x_NMKD-Siax_200k.pth\", null]}, {\"id\": 34, \"type\": \"SDXLEmptyLatentSizePicker+\", \"pos\": [275.15325927734375, 800.7346801757812], \"size\": [259.20001220703125, 170], \"flags\": {}, \"order\": 1, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [49], \"slot_index\": 0}, {\"name\": \"width\", \"type\": \"INT\", \"links\": [50], \"slot_index\": 1}, {\"name\": \"height\", \"type\": \"INT\", \"links\": [51], \"slot_index\": 2}], \"properties\": {\"Node name for S&R\": \"SDXLEmptyLatentSizePicker+\"}, \"widgets_values\": [\"832x1216 (0.68)\", 1, 0, 0]}, {\"id\": 33, \"type\": \"Seed (rgthree)\", \"pos\": [549.7734985351562, 806.7191772460938], \"size\": [244.56271362304688, 159.79544067382812], \"flags\": {}, \"order\": 2, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"SEED\", \"type\": \"INT\", \"links\": [47, 48], \"slot_index\": 0, \"shape\": 3, \"dir\": 4}], \"properties\": {}, \"widgets_values\": [921815968279263, null, null, null]}, {\"id\": 38, \"type\": \"Reroute\", \"pos\": [944.2876586914062, 218.80259704589844], \"size\": [75, 26], \"flags\": {}, \"order\": 6, \"mode\": 0, \"inputs\": [{\"name\": \"\", \"type\": \"*\", \"link\": 61}], \"outputs\": [{\"name\": \"\", \"type\": \"VAE\", \"links\": [54, 55], \"slot_index\": 0}], \"properties\": {\"showOutputText\": false, \"horizontal\": false}}, {\"id\": 3, \"type\": \"KSampler\", \"pos\": [844, 232], \"size\": [349.8854064941406, 747.8931274414062], \"flags\": {}, \"order\": 10, \"mode\": 0, \"inputs\": [{\"name\": \"model\", \"type\": \"MODEL\", \"link\": 37}, {\"name\": \"positive\", \"type\": \"CONDITIONING\", \"link\": 4}, {\"name\": \"negative\", \"type\": \"CONDITIONING\", \"link\": 21}, {\"name\": \"latent_image\", \"type\": \"LATENT\", \"link\": 49}, {\"name\": \"seed\", \"type\": \"INT\", \"link\": 47, \"widget\": {\"name\": \"seed\"}}], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [7], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"KSampler\"}, \"widgets_values\": [1089935520894654, \"randomize\", 25, 1, \"euler\", \"normal\", 1]}, {\"id\": 30, \"type\": \"VAELoader\", \"pos\": [-522.5015869140625, 230.6754608154297], \"size\": [318.4554138183594, 84.38363647460938], \"flags\": {\"collapsed\": true}, \"order\": 3, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"VAE\", \"type\": \"VAE\", \"links\": [61], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"VAELoader\"}, \"widgets_values\": [\"ae.safetensors\", null]}, {\"id\": 29, \"type\": \"UNETLoader\", \"pos\": [-545.8380126953125, 272.9270935058594], \"size\": [343.6680603027344, 106], \"flags\": {\"collapsed\": true}, \"order\": 4, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"MODEL\", \"type\": \"MODEL\", \"links\": [64], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"UNETLoader\"}, \"widgets_values\": [\"flux1-dev-fp8.safetensors\", \"fp8_e4m3fn_fast\", null]}, {\"id\": 28, \"type\": \"DualCLIPLoader\", \"pos\": [-531.428955078125, 319.86724853515625], \"size\": [347.35186767578125, 130], \"flags\": {\"collapsed\": true}, \"order\": 5, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [62], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"DualCLIPLoader\"}, \"widgets_values\": [\"t5xxl_fp8_e4m3fn.safetensors\", \"clip_l.safetensors\", \"flux\", null]}, {\"id\": 9, \"type\": \"SaveImage\", \"pos\": [1215, 192.27305603027344], \"size\": [629.7261352539062, 859.7188720703125], \"flags\": {}, \"order\": 12, \"mode\": 0, \"inputs\": [{\"name\": \"images\", \"type\": \"IMAGE\", \"link\": 9}], \"outputs\": [], \"properties\": {}, \"widgets_values\": [\"%date:yyyy-MM-dd%/carisha_%date:hhmmss%\"]}, {\"id\": 7, \"type\": \"CLIPTextEncode\", \"pos\": [536.8186645507812, 755.2752075195312], \"size\": [442.9576416015625, 94.61177062988281], \"flags\": {\"collapsed\": true}, \"order\": 9, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 36}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [21, 31], \"slot_index\": 0}], \"title\": \"CLIP Text Encode (Negative)\", \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"\"], \"color\": \"#322\", \"bgcolor\": \"#533\"}, {\"id\": 25, \"type\": \"Power Lora Loader (rgthree)\", \"pos\": [-313.511962890625, 235.59800720214844], \"size\": [645.4066772460938, 233.36923217773438], \"flags\": {}, \"order\": 7, \"mode\": 0, \"inputs\": [{\"name\": \"model\", \"type\": \"MODEL\", \"link\": 64, \"dir\": 3}, {\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 62, \"dir\": 3}], \"outputs\": [{\"name\": \"MODEL\", \"type\": \"MODEL\", \"links\": [37, 38], \"slot_index\": 0, \"shape\": 3, \"dir\": 4}, {\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [35, 36], \"slot_index\": 1, \"shape\": 3, \"dir\": 4}], \"properties\": {\"Show Strengths\": \"Single Strength\"}, \"widgets_values\": [null, {\"type\": \"PowerLoraLoaderHeaderWidget\"}, {\"on\": true, \"lora\": \"!Myloras\\\\Carisha\\\\Carisha_rank8_bf16-step03000.safetensors\", \"strength\": 1.05, \"strengthTwo\": null}, null, \"\"]}, {\"id\": 6, \"type\": \"CLIPTextEncode\", \"pos\": [343.7825012207031, 215.52549743652344], \"size\": [482.2969665527344, 494.79656982421875], \"flags\": {}, \"order\": 8, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 35}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [4, 27], \"slot_index\": 0}], \"title\": \"CLIP Text Encode (Positive)\", \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"photo of carisha, a woman with blonde hair, brown eyes, with striking features, standing proudly in front of a traditional Bavarian backdrop, her long braid cascading down her back like a waterfall. She wears a intricately detailed dirndl attire, complete with delicate lace and embroidery, perfectly capturing the essence of Oktoberfest. Side view. The camera captures her from a 45-degree angle, emphasizing her slender neck and shoulders as she gazes directly at the viewer. Soft, warm light from a nearby lantern casts a flattering glow on her skin, accentuating her high cheekbones and full lips. In the background, the real Oktoberfest festivities unfold: revelers in traditional attire dance and sing around tables laden with steins of beer, while others play lively tunes on accordions and brass instruments. The atmosphere is electric, with vibrant colors and sharp lines that create a sense of depth and dimensionality. As the camera lingers, it becomes clear that this woman is not just any festival-goer, she's the star of the show. Her dirndl attire is immaculately detailed, from the intricate patterns on her apron to the delicate flowers in her hair. Every stitch, every thread, every detail is meticulously rendered, creating a photorealistic masterpiece. In a stunning double exposure effect, the woman's image is superimposed over a blurred background of revelers and festivities, creating an otherworldly sense of presence. The result is a truly cinematic scene that transports the viewer to the heart of Oktoberfest. This image is a true masterpiece, showcasing the artist's skill and attention to detail in capturing the essence of Oktoberfest. With its ultra-sharp lines, intricate details, and photorealistic quality, it's sure to be an award-winning work that leaves a lasting impression on all who see it.\"], \"color\": \"#232\", \"bgcolor\": \"#353\"}], \"links\": [[4, 6, 0, 3, 1, \"CONDITIONING\"], [7, 3, 0, 8, 0, \"LATENT\"], [9, 8, 0, 9, 0, \"IMAGE\"], [21, 7, 0, 3, 2, \"CONDITIONING\"], [24, 8, 0, 16, 0, \"IMAGE\"], [25, 17, 0, 16, 5, \"UPSCALE_MODEL\"], [27, 6, 0, 16, 2, \"CONDITIONING\"], [30, 16, 0, 18, 0, \"IMAGE\"], [31, 7, 0, 16, 3, \"CONDITIONING\"], [35, 25, 1, 6, 0, \"CLIP\"], [36, 25, 1, 7, 0, \"CLIP\"], [37, 25, 0, 3, 0, \"MODEL\"], [38, 25, 0, 16, 1, \"MODEL\"], [47, 33, 0, 3, 4, \"INT\"], [48, 33, 0, 16, 8, \"INT\"], [49, 34, 0, 3, 3, \"LATENT\"], [50, 34, 1, 16, 6, \"INT\"], [51, 34, 2, 16, 7, \"INT\"], [54, 38, 0, 8, 1, \"VAE\"], [55, 38, 0, 16, 4, \"VAE\"], [61, 30, 0, 38, 0, \"*\"], [62, 28, 0, 25, 1, \"CLIP\"], [64, 29, 0, 25, 0, \"MODEL\"]], \"groups\": [], \"config\": {}, \"extra\": {\"ds\": {\"scale\": 0.8390545288824038, \"offset\": [66.01527400302358, -164.71021253714747]}, \"VHS_latentpreview\": false, \"VHS_latentpreviewrate\": 0, \"ue_links\": []}, \"version\": 0.4, \"widget_idx_map\": {\"3\": {\"sampler_name\": 4, \"scheduler\": 5}, \"16\": {\"sampler_name\": 5, \"scheduler\": 6}, \"33\": {\"seed\": 0}}}}", "steps": 25, "models": [], "prompt": "photo of carisha, a woman with blonde hair, brown eyes, with striking features, standing proudly in front of a traditional Bavarian backdrop, her long braid cascading down her back like a waterfall. She wears a intricately detailed dirndl attire, complete with delicate lace and embroidery, perfectly capturing the essence of Oktoberfest. Side view. The camera captures her from a 45-degree angle, emphasizing her slender neck and shoulders as she gazes directly at the viewer. Soft, warm light from a nearby lantern casts a flattering glow on her skin, accentuating her high cheekbones and full lips. In the background, the real Oktoberfest festivities unfold: revelers in traditional attire dance and sing around tables laden with steins of beer, while others play lively tunes on accordions and brass instruments. The atmosphere is electric, with vibrant colors and sharp lines that create a sense of depth and dimensionality. As the camera lingers, it becomes clear that this woman is not just any festival-goer, she's the star of the show. Her dirndl attire is immaculately detailed, from the intricate patterns on her apron to the delicate flowers in her hair. Every stitch, every thread, every detail is meticulously rendered, creating a photorealistic masterpiece. In a stunning double exposure effect, the woman's image is superimposed over a blurred background of revelers and festivities, creating an otherworldly sense of presence. The result is a truly cinematic scene that transports the viewer to the heart of Oktoberfest. This image is a true masterpiece, showcasing the artist's skill and attention to detail in capturing the essence of Oktoberfest. With its ultra-sharp lines, intricate details, and photorealistic quality, it's sure to be an award-winning work that leaves a lasting impression on all who see it.", "denoise": 1, "sampler": "Euler", "cfgScale": 1, "modelIds": [], "scheduler": "normal", "upscalers": [ "4x_NMKD-Siax_200k.pth" ], "versionIds": [], "controlNets": [], "additionalResources": [] }, "availability": "Public", "hasMeta": true, "hasPositivePrompt": true, "onSite": false, "remixOfId": null }, { "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/4914983e-8269-4c95-8420-db919dd4573a/width=832/56868751.jpeg", "nsfwLevel": 1, "width": 832, "height": 1216, "hash": "UKFr9O~V9]E1D*IUIpR+9va~$*xZE1M{xZju", "type": "image", "metadata": { "hash": "UKFr9O~V9]E1D*IUIpR+9va~$*xZE1M{xZju", "size": 1561225, "width": 832, "height": 1216 }, "minor": false, "poi": true, "meta": { "seed": 67039400128730, "vaes": [ "ae.safetensors" ], "comfy": "{\"prompt\": {\"3\": {\"inputs\": {\"seed\": [\"33\", 0], \"steps\": 25, \"cfg\": 1.0, \"sampler_name\": \"euler\", \"scheduler\": \"normal\", \"denoise\": 1.0, \"model\": [\"25\", 0], \"positive\": [\"6\", 0], \"negative\": [\"7\", 0], \"latent_image\": [\"34\", 0]}, \"class_type\": \"KSampler\", \"_meta\": {\"title\": \"KSampler\"}}, \"6\": {\"inputs\": {\"text\": \"closeup photo of carisha, a woman with long blonde hair and brown eyes, wearing a shirt, looking at the viewer with a smile\", \"clip\": [\"25\", 1]}, \"class_type\": \"CLIPTextEncode\", \"_meta\": {\"title\": \"CLIP Text Encode (Positive)\"}}, \"7\": {\"inputs\": {\"text\": \"\", \"clip\": [\"25\", 1]}, \"class_type\": \"CLIPTextEncode\", \"_meta\": {\"title\": \"CLIP Text Encode (Negative)\"}}, \"8\": {\"inputs\": {\"samples\": [\"3\", 0], \"vae\": [\"30\", 0]}, \"class_type\": \"VAEDecode\", \"_meta\": {\"title\": \"VAE Decode\"}}, \"9\": {\"inputs\": {\"filename_prefix\": \"2025-02-09/carisha_173936\", \"images\": [\"8\", 0]}, \"class_type\": \"SaveImage\", \"_meta\": {\"title\": \"Save Image\"}}, \"17\": {\"inputs\": {\"model_name\": \"4x_NMKD-Siax_200k.pth\", \"+\": null}, \"class_type\": \"UpscaleModelLoader\", \"_meta\": {\"title\": \"Load Upscale Model\"}}, \"25\": {\"inputs\": {\"PowerLoraLoaderHeaderWidget\": {\"type\": \"PowerLoraLoaderHeaderWidget\"}, \"lora_1\": {\"on\": true, \"lora\": \"!Myloras\\\\Carisha\\\\Carisha_rank8_bf16-step03000.safetensors\", \"strength\": 1}, \"\\u2795 Add Lora\": \"\", \"model\": [\"29\", 0], \"clip\": [\"28\", 0]}, \"class_type\": \"Power Lora Loader (rgthree)\", \"_meta\": {\"title\": \"Power Lora Loader (rgthree)\"}}, \"28\": {\"inputs\": {\"clip_name1\": \"t5xxl_fp8_e4m3fn.safetensors\", \"clip_name2\": \"clip_l.safetensors\", \"type\": \"flux\", \"+\": null}, \"class_type\": \"DualCLIPLoader\", \"_meta\": {\"title\": \"DualCLIPLoader\"}}, \"29\": {\"inputs\": {\"unet_name\": \"flux1-dev-fp8.safetensors\", \"weight_dtype\": \"fp8_e4m3fn_fast\", \"+\": null}, \"class_type\": \"UNETLoader\", \"_meta\": {\"title\": \"Load Diffusion Model\"}}, \"30\": {\"inputs\": {\"vae_name\": \"ae.safetensors\", \"+\": null}, \"class_type\": \"VAELoader\", \"_meta\": {\"title\": \"Load VAE\"}}, \"33\": {\"inputs\": {\"seed\": 67039400128730}, \"class_type\": \"Seed (rgthree)\", \"_meta\": {\"title\": \"Seed (rgthree)\"}, \"is_changed\": [67039400128730]}, \"34\": {\"inputs\": {\"resolution\": \"832x1216 (0.68)\", \"batch_size\": 1, \"width_override\": 0, \"height_override\": 0}, \"class_type\": \"SDXLEmptyLatentSizePicker+\", \"_meta\": {\"title\": \"\\ud83d\\udd27 Empty Latent Size Picker\"}}}, \"workflow\": {\"last_node_id\": 39, \"last_link_id\": 64, \"nodes\": [{\"id\": 8, \"type\": \"VAEDecode\", \"pos\": [1211.7613525390625, 207.52772521972656], \"size\": [210, 46], \"flags\": {\"collapsed\": true}, \"order\": 11, \"mode\": 0, \"inputs\": [{\"name\": \"samples\", \"type\": \"LATENT\", \"link\": 7}, {\"name\": \"vae\", \"type\": \"VAE\", \"link\": 54}], \"outputs\": [{\"name\": \"IMAGE\", \"type\": \"IMAGE\", \"links\": [9, 24], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"VAEDecode\"}, \"widgets_values\": []}, {\"id\": 18, \"type\": \"SaveImage\", \"pos\": [2197, 223.68397521972656], \"size\": [1249.5550537109375, 1464.7823486328125], \"flags\": {}, \"order\": 14, \"mode\": 4, \"inputs\": [{\"name\": \"images\", \"type\": \"IMAGE\", \"link\": 30}], \"outputs\": [], \"properties\": {}, \"widgets_values\": [\"%date:yyyy-MM-dd%/image_%KSampler.seed%\"]}, {\"id\": 16, \"type\": \"UltimateSDUpscale\", \"pos\": [1868.0947265625, 227], \"size\": [315, 826], \"flags\": {}, \"order\": 13, \"mode\": 4, \"inputs\": [{\"name\": \"image\", \"type\": \"IMAGE\", \"link\": 24}, {\"name\": \"model\", \"type\": \"MODEL\", \"link\": 38}, {\"name\": \"positive\", \"type\": \"CONDITIONING\", \"link\": 27}, {\"name\": \"negative\", \"type\": \"CONDITIONING\", \"link\": 31}, {\"name\": \"vae\", \"type\": \"VAE\", \"link\": 55}, {\"name\": \"upscale_model\", \"type\": \"UPSCALE_MODEL\", \"link\": 25}, {\"name\": \"tile_width\", \"type\": \"INT\", \"link\": 50, \"widget\": {\"name\": \"tile_width\"}}, {\"name\": \"tile_height\", \"type\": \"INT\", \"link\": 51, \"widget\": {\"name\": \"tile_height\"}}, {\"name\": \"seed\", \"type\": \"INT\", \"link\": 48, \"widget\": {\"name\": \"seed\"}}], \"outputs\": [{\"name\": \"IMAGE\", \"type\": \"IMAGE\", \"links\": [30], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"UltimateSDUpscale\"}, \"widgets_values\": [4, 681013407909189, \"randomize\", 2, 9, \"euler\", \"normal\", 0.15, \"Linear\", 768, 1024, 8, 32, \"None\", 1, 64, 8, 16, true, false]}, {\"id\": 17, \"type\": \"UpscaleModelLoader\", \"pos\": [1628.652587890625, 202.11581420898438], \"size\": [228.74729919433594, 82], \"flags\": {\"collapsed\": true}, \"order\": 0, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"UPSCALE_MODEL\", \"type\": \"UPSCALE_MODEL\", \"links\": [25]}], \"properties\": {\"Node name for S&R\": \"UpscaleModelLoader\"}, \"widgets_values\": [\"4x_NMKD-Siax_200k.pth\", null]}, {\"id\": 34, \"type\": \"SDXLEmptyLatentSizePicker+\", \"pos\": [275.15325927734375, 800.7346801757812], \"size\": [259.20001220703125, 170], \"flags\": {}, \"order\": 1, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [49], \"slot_index\": 0}, {\"name\": \"width\", \"type\": \"INT\", \"links\": [50], \"slot_index\": 1}, {\"name\": \"height\", \"type\": \"INT\", \"links\": [51], \"slot_index\": 2}], \"properties\": {\"Node name for S&R\": \"SDXLEmptyLatentSizePicker+\"}, \"widgets_values\": [\"832x1216 (0.68)\", 1, 0, 0]}, {\"id\": 33, \"type\": \"Seed (rgthree)\", \"pos\": [549.7734985351562, 806.7191772460938], \"size\": [244.56271362304688, 159.79544067382812], \"flags\": {}, \"order\": 2, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"SEED\", \"type\": \"INT\", \"links\": [47, 48], \"slot_index\": 0, \"shape\": 3, \"dir\": 4}], \"properties\": {}, \"widgets_values\": [67039400128730, null, null, null]}, {\"id\": 38, \"type\": \"Reroute\", \"pos\": [944.2876586914062, 218.80259704589844], \"size\": [75, 26], \"flags\": {}, \"order\": 6, \"mode\": 0, \"inputs\": [{\"name\": \"\", \"type\": \"*\", \"link\": 61}], \"outputs\": [{\"name\": \"\", \"type\": \"VAE\", \"links\": [54, 55], \"slot_index\": 0}], \"properties\": {\"showOutputText\": false, \"horizontal\": false}}, {\"id\": 3, \"type\": \"KSampler\", \"pos\": [844, 232], \"size\": [349.8854064941406, 747.8931274414062], \"flags\": {}, \"order\": 10, \"mode\": 0, \"inputs\": [{\"name\": \"model\", \"type\": \"MODEL\", \"link\": 37}, {\"name\": \"positive\", \"type\": \"CONDITIONING\", \"link\": 4}, {\"name\": \"negative\", \"type\": \"CONDITIONING\", \"link\": 21}, {\"name\": \"latent_image\", \"type\": \"LATENT\", \"link\": 49}, {\"name\": \"seed\", \"type\": \"INT\", \"link\": 47, \"widget\": {\"name\": \"seed\"}}], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [7], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"KSampler\"}, \"widgets_values\": [19400959146344, \"randomize\", 25, 1, \"euler\", \"normal\", 1]}, {\"id\": 30, \"type\": \"VAELoader\", \"pos\": [-522.5015869140625, 230.6754608154297], \"size\": [318.4554138183594, 84.38363647460938], \"flags\": {\"collapsed\": true}, \"order\": 3, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"VAE\", \"type\": \"VAE\", \"links\": [61], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"VAELoader\"}, \"widgets_values\": [\"ae.safetensors\", null]}, {\"id\": 29, \"type\": \"UNETLoader\", \"pos\": [-545.8380126953125, 272.9270935058594], \"size\": [343.6680603027344, 106], \"flags\": {\"collapsed\": true}, \"order\": 4, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"MODEL\", \"type\": \"MODEL\", \"links\": [64], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"UNETLoader\"}, \"widgets_values\": [\"flux1-dev-fp8.safetensors\", \"fp8_e4m3fn_fast\", null]}, {\"id\": 28, \"type\": \"DualCLIPLoader\", \"pos\": [-531.428955078125, 319.86724853515625], \"size\": [347.35186767578125, 130], \"flags\": {\"collapsed\": true}, \"order\": 5, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [62], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"DualCLIPLoader\"}, \"widgets_values\": [\"t5xxl_fp8_e4m3fn.safetensors\", \"clip_l.safetensors\", \"flux\", null]}, {\"id\": 25, \"type\": \"Power Lora Loader (rgthree)\", \"pos\": [-313.511962890625, 235.59800720214844], \"size\": [645.4066772460938, 233.36923217773438], \"flags\": {}, \"order\": 7, \"mode\": 0, \"inputs\": [{\"name\": \"model\", \"type\": \"MODEL\", \"link\": 64, \"dir\": 3}, {\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 62, \"dir\": 3}], \"outputs\": [{\"name\": \"MODEL\", \"type\": \"MODEL\", \"links\": [37, 38], \"slot_index\": 0, \"shape\": 3, \"dir\": 4}, {\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [35, 36], \"slot_index\": 1, \"shape\": 3, \"dir\": 4}], \"properties\": {\"Show Strengths\": \"Single Strength\"}, \"widgets_values\": [null, {\"type\": \"PowerLoraLoaderHeaderWidget\"}, {\"on\": true, \"lora\": \"!Myloras\\\\Carisha\\\\Carisha_rank8_bf16-step03000.safetensors\", \"strength\": 1, \"strengthTwo\": null}, null, \"\"]}, {\"id\": 9, \"type\": \"SaveImage\", \"pos\": [1215, 192.27305603027344], \"size\": [629.7261352539062, 859.7188720703125], \"flags\": {}, \"order\": 12, \"mode\": 0, \"inputs\": [{\"name\": \"images\", \"type\": \"IMAGE\", \"link\": 9}], \"outputs\": [], \"properties\": {}, \"widgets_values\": [\"%date:yyyy-MM-dd%/carisha_%date:hhmmss%\"]}, {\"id\": 7, \"type\": \"CLIPTextEncode\", \"pos\": [536.8186645507812, 755.2752075195312], \"size\": [442.9576416015625, 94.61177062988281], \"flags\": {\"collapsed\": true}, \"order\": 9, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 36}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [21, 31], \"slot_index\": 0}], \"title\": \"CLIP Text Encode (Negative)\", \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"\"], \"color\": \"#322\", \"bgcolor\": \"#533\"}, {\"id\": 6, \"type\": \"CLIPTextEncode\", \"pos\": [343.7825012207031, 215.52549743652344], \"size\": [482.2969665527344, 494.79656982421875], \"flags\": {}, \"order\": 8, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 35}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [4, 27], \"slot_index\": 0}], \"title\": \"CLIP Text Encode (Positive)\", \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"closeup photo of carisha, a woman with long blonde hair and brown eyes, wearing a shirt, looking at the viewer with a smile\"], \"color\": \"#232\", \"bgcolor\": \"#353\"}], \"links\": [[4, 6, 0, 3, 1, \"CONDITIONING\"], [7, 3, 0, 8, 0, \"LATENT\"], [9, 8, 0, 9, 0, \"IMAGE\"], [21, 7, 0, 3, 2, \"CONDITIONING\"], [24, 8, 0, 16, 0, \"IMAGE\"], [25, 17, 0, 16, 5, \"UPSCALE_MODEL\"], [27, 6, 0, 16, 2, \"CONDITIONING\"], [30, 16, 0, 18, 0, \"IMAGE\"], [31, 7, 0, 16, 3, \"CONDITIONING\"], [35, 25, 1, 6, 0, \"CLIP\"], [36, 25, 1, 7, 0, \"CLIP\"], [37, 25, 0, 3, 0, \"MODEL\"], [38, 25, 0, 16, 1, \"MODEL\"], [47, 33, 0, 3, 4, \"INT\"], [48, 33, 0, 16, 8, \"INT\"], [49, 34, 0, 3, 3, \"LATENT\"], [50, 34, 1, 16, 6, \"INT\"], [51, 34, 2, 16, 7, \"INT\"], [54, 38, 0, 8, 1, \"VAE\"], [55, 38, 0, 16, 4, \"VAE\"], [61, 30, 0, 38, 0, \"*\"], [62, 28, 0, 25, 1, \"CLIP\"], [64, 29, 0, 25, 0, \"MODEL\"]], \"groups\": [], \"config\": {}, \"extra\": {\"ds\": {\"scale\": 0.8390545288824038, \"offset\": [66.01527400302358, -164.71021253714747]}, \"VHS_latentpreview\": false, \"VHS_latentpreviewrate\": 0, \"ue_links\": []}, \"version\": 0.4, \"widget_idx_map\": {\"3\": {\"sampler_name\": 4, \"scheduler\": 5}, \"16\": {\"sampler_name\": 5, \"scheduler\": 6}, \"33\": {\"seed\": 0}}}}", "steps": 25, "models": [], "prompt": "closeup photo of carisha, a woman with long blonde hair and brown eyes, wearing a shirt, looking at the viewer with a smile", "denoise": 1, "sampler": "Euler", "cfgScale": 1, "modelIds": [], "scheduler": "normal", "upscalers": [ "4x_NMKD-Siax_200k.pth" ], "versionIds": [], "controlNets": [], "additionalResources": [] }, "availability": "Public", "hasMeta": true, "hasPositivePrompt": true, "onSite": false, "remixOfId": null }, { "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/82e2a9a2-aca0-4a97-a640-b679c8a63325/width=832/56868752.jpeg", "nsfwLevel": 1, "width": 832, "height": 1216, "hash": "UIG*+dkC-$%1?wWqIqt7yEogD+bI~pozs,j?", "type": "image", "metadata": { "hash": "UIG*+dkC-$%1?wWqIqt7yEogD+bI~pozs,j?", "size": 1698356, "width": 832, "height": 1216 }, "minor": false, "poi": true, "meta": { "seed": 415240059864150, "vaes": [ "ae.safetensors" ], "comfy": "{\"prompt\": {\"3\": {\"inputs\": {\"seed\": [\"33\", 0], \"steps\": 25, \"cfg\": 1.0, \"sampler_name\": \"euler\", \"scheduler\": \"normal\", \"denoise\": 1.0, \"model\": [\"25\", 0], \"positive\": [\"6\", 0], \"negative\": [\"7\", 0], \"latent_image\": [\"34\", 0]}, \"class_type\": \"KSampler\", \"_meta\": {\"title\": \"KSampler\"}}, \"6\": {\"inputs\": {\"text\": \"photo of carisha, a woman with brown and and with long, wavy hair adorned with flowers. She wears a modest, flowy, patterned dress and is standing in a sunlit meadow, surrounded by wildflowers. The image has a warm, faded tone, evoking a peaceful, bohemian vibe, 1970s nostalgia, with a slight film grain for that vintage, hippie look.\", \"clip\": [\"25\", 1]}, \"class_type\": \"CLIPTextEncode\", \"_meta\": {\"title\": \"CLIP Text Encode (Positive)\"}}, \"7\": {\"inputs\": {\"text\": \"\", \"clip\": [\"25\", 1]}, \"class_type\": \"CLIPTextEncode\", \"_meta\": {\"title\": \"CLIP Text Encode (Negative)\"}}, \"8\": {\"inputs\": {\"samples\": [\"3\", 0], \"vae\": [\"30\", 0]}, \"class_type\": \"VAEDecode\", \"_meta\": {\"title\": \"VAE Decode\"}}, \"9\": {\"inputs\": {\"filename_prefix\": \"2025-02-09/carisha_180352\", \"images\": [\"8\", 0]}, \"class_type\": \"SaveImage\", \"_meta\": {\"title\": \"Save Image\"}}, \"17\": {\"inputs\": {\"model_name\": \"4x_NMKD-Siax_200k.pth\", \"+\": null}, \"class_type\": \"UpscaleModelLoader\", \"_meta\": {\"title\": \"Load Upscale Model\"}}, \"25\": {\"inputs\": {\"PowerLoraLoaderHeaderWidget\": {\"type\": \"PowerLoraLoaderHeaderWidget\"}, \"lora_1\": {\"on\": true, \"lora\": \"!Myloras\\\\Carisha\\\\Carisha_rank8_bf16-step03000.safetensors\", \"strength\": 1}, \"\\u2795 Add Lora\": \"\", \"model\": [\"29\", 0], \"clip\": [\"28\", 0]}, \"class_type\": \"Power Lora Loader (rgthree)\", \"_meta\": {\"title\": \"Power Lora Loader (rgthree)\"}}, \"28\": {\"inputs\": {\"clip_name1\": \"t5xxl_fp8_e4m3fn.safetensors\", \"clip_name2\": \"clip_l.safetensors\", \"type\": \"flux\", \"+\": null}, \"class_type\": \"DualCLIPLoader\", \"_meta\": {\"title\": \"DualCLIPLoader\"}}, \"29\": {\"inputs\": {\"unet_name\": \"flux1-dev-fp8.safetensors\", \"weight_dtype\": \"fp8_e4m3fn_fast\", \"+\": null}, \"class_type\": \"UNETLoader\", \"_meta\": {\"title\": \"Load Diffusion Model\"}}, \"30\": {\"inputs\": {\"vae_name\": \"ae.safetensors\", \"+\": null}, \"class_type\": \"VAELoader\", \"_meta\": {\"title\": \"Load VAE\"}}, \"33\": {\"inputs\": {\"seed\": 415240059864150}, \"class_type\": \"Seed (rgthree)\", \"_meta\": {\"title\": \"Seed (rgthree)\"}, \"is_changed\": [415240059864150]}, \"34\": {\"inputs\": {\"resolution\": \"832x1216 (0.68)\", \"batch_size\": 1, \"width_override\": 0, \"height_override\": 0}, \"class_type\": \"SDXLEmptyLatentSizePicker+\", \"_meta\": {\"title\": \"\\ud83d\\udd27 Empty Latent Size Picker\"}}}, \"workflow\": {\"last_node_id\": 39, \"last_link_id\": 64, \"nodes\": [{\"id\": 8, \"type\": \"VAEDecode\", \"pos\": [1211.7613525390625, 207.52772521972656], \"size\": [210, 46], \"flags\": {\"collapsed\": true}, \"order\": 11, \"mode\": 0, \"inputs\": [{\"name\": \"samples\", \"type\": \"LATENT\", \"link\": 7}, {\"name\": \"vae\", \"type\": \"VAE\", \"link\": 54}], \"outputs\": [{\"name\": \"IMAGE\", \"type\": \"IMAGE\", \"links\": [9, 24], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"VAEDecode\"}, \"widgets_values\": []}, {\"id\": 18, \"type\": \"SaveImage\", \"pos\": [2197, 223.68397521972656], \"size\": [1249.5550537109375, 1464.7823486328125], \"flags\": {}, \"order\": 14, \"mode\": 4, \"inputs\": [{\"name\": \"images\", \"type\": \"IMAGE\", \"link\": 30}], \"outputs\": [], \"properties\": {}, \"widgets_values\": [\"%date:yyyy-MM-dd%/image_%KSampler.seed%\"]}, {\"id\": 16, \"type\": \"UltimateSDUpscale\", \"pos\": [1868.0947265625, 227], \"size\": [315, 826], \"flags\": {}, \"order\": 13, \"mode\": 4, \"inputs\": [{\"name\": \"image\", \"type\": \"IMAGE\", \"link\": 24}, {\"name\": \"model\", \"type\": \"MODEL\", \"link\": 38}, {\"name\": \"positive\", \"type\": \"CONDITIONING\", \"link\": 27}, {\"name\": \"negative\", \"type\": \"CONDITIONING\", \"link\": 31}, {\"name\": \"vae\", \"type\": \"VAE\", \"link\": 55}, {\"name\": \"upscale_model\", \"type\": \"UPSCALE_MODEL\", \"link\": 25}, {\"name\": \"tile_width\", \"type\": \"INT\", \"link\": 50, \"widget\": {\"name\": \"tile_width\"}}, {\"name\": \"tile_height\", \"type\": \"INT\", \"link\": 51, \"widget\": {\"name\": \"tile_height\"}}, {\"name\": \"seed\", \"type\": \"INT\", \"link\": 48, \"widget\": {\"name\": \"seed\"}}], \"outputs\": [{\"name\": \"IMAGE\", \"type\": \"IMAGE\", \"links\": [30], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"UltimateSDUpscale\"}, \"widgets_values\": [4, 872114290619192, \"randomize\", 2, 9, \"euler\", \"normal\", 0.15, \"Linear\", 768, 1024, 8, 32, \"None\", 1, 64, 8, 16, true, false]}, {\"id\": 17, \"type\": \"UpscaleModelLoader\", \"pos\": [1628.652587890625, 202.11581420898438], \"size\": [228.74729919433594, 82], \"flags\": {\"collapsed\": true}, \"order\": 0, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"UPSCALE_MODEL\", \"type\": \"UPSCALE_MODEL\", \"links\": [25]}], \"properties\": {\"Node name for S&R\": \"UpscaleModelLoader\"}, \"widgets_values\": [\"4x_NMKD-Siax_200k.pth\", null]}, {\"id\": 34, \"type\": \"SDXLEmptyLatentSizePicker+\", \"pos\": [275.15325927734375, 800.7346801757812], \"size\": [259.20001220703125, 170], \"flags\": {}, \"order\": 1, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [49], \"slot_index\": 0}, {\"name\": \"width\", \"type\": \"INT\", \"links\": [50], \"slot_index\": 1}, {\"name\": \"height\", \"type\": \"INT\", \"links\": [51], \"slot_index\": 2}], \"properties\": {\"Node name for S&R\": \"SDXLEmptyLatentSizePicker+\"}, \"widgets_values\": [\"832x1216 (0.68)\", 1, 0, 0]}, {\"id\": 33, \"type\": \"Seed (rgthree)\", \"pos\": [549.7734985351562, 806.7191772460938], \"size\": [244.56271362304688, 159.79544067382812], \"flags\": {}, \"order\": 2, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"SEED\", \"type\": \"INT\", \"links\": [47, 48], \"slot_index\": 0, \"shape\": 3, \"dir\": 4}], \"properties\": {}, \"widgets_values\": [415240059864150, null, null, null]}, {\"id\": 38, \"type\": \"Reroute\", \"pos\": [944.2876586914062, 218.80259704589844], \"size\": [75, 26], \"flags\": {}, \"order\": 6, \"mode\": 0, \"inputs\": [{\"name\": \"\", \"type\": \"*\", \"link\": 61}], \"outputs\": [{\"name\": \"\", \"type\": \"VAE\", \"links\": [54, 55], \"slot_index\": 0}], \"properties\": {\"showOutputText\": false, \"horizontal\": false}}, {\"id\": 3, \"type\": \"KSampler\", \"pos\": [844, 232], \"size\": [349.8854064941406, 747.8931274414062], \"flags\": {}, \"order\": 10, \"mode\": 0, \"inputs\": [{\"name\": \"model\", \"type\": \"MODEL\", \"link\": 37}, {\"name\": \"positive\", \"type\": \"CONDITIONING\", \"link\": 4}, {\"name\": \"negative\", \"type\": \"CONDITIONING\", \"link\": 21}, {\"name\": \"latent_image\", \"type\": \"LATENT\", \"link\": 49}, {\"name\": \"seed\", \"type\": \"INT\", \"link\": 47, \"widget\": {\"name\": \"seed\"}}], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [7], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"KSampler\"}, \"widgets_values\": [935186576478102, \"randomize\", 25, 1, \"euler\", \"normal\", 1]}, {\"id\": 30, \"type\": \"VAELoader\", \"pos\": [-522.5015869140625, 230.6754608154297], \"size\": [318.4554138183594, 84.38363647460938], \"flags\": {\"collapsed\": true}, \"order\": 3, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"VAE\", \"type\": \"VAE\", \"links\": [61], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"VAELoader\"}, \"widgets_values\": [\"ae.safetensors\", null]}, {\"id\": 29, \"type\": \"UNETLoader\", \"pos\": [-545.8380126953125, 272.9270935058594], \"size\": [343.6680603027344, 106], \"flags\": {\"collapsed\": true}, \"order\": 4, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"MODEL\", \"type\": \"MODEL\", \"links\": [64], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"UNETLoader\"}, \"widgets_values\": [\"flux1-dev-fp8.safetensors\", \"fp8_e4m3fn_fast\", null]}, {\"id\": 28, \"type\": \"DualCLIPLoader\", \"pos\": [-531.428955078125, 319.86724853515625], \"size\": [347.35186767578125, 130], \"flags\": {\"collapsed\": true}, \"order\": 5, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [62], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"DualCLIPLoader\"}, \"widgets_values\": [\"t5xxl_fp8_e4m3fn.safetensors\", \"clip_l.safetensors\", \"flux\", null]}, {\"id\": 25, \"type\": \"Power Lora Loader (rgthree)\", \"pos\": [-313.511962890625, 235.59800720214844], \"size\": [645.4066772460938, 233.36923217773438], \"flags\": {}, \"order\": 7, \"mode\": 0, \"inputs\": [{\"name\": \"model\", \"type\": \"MODEL\", \"link\": 64, \"dir\": 3}, {\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 62, \"dir\": 3}], \"outputs\": [{\"name\": \"MODEL\", \"type\": \"MODEL\", \"links\": [37, 38], \"slot_index\": 0, \"shape\": 3, \"dir\": 4}, {\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [35, 36], \"slot_index\": 1, \"shape\": 3, \"dir\": 4}], \"properties\": {\"Show Strengths\": \"Single Strength\"}, \"widgets_values\": [null, {\"type\": \"PowerLoraLoaderHeaderWidget\"}, {\"on\": true, \"lora\": \"!Myloras\\\\Carisha\\\\Carisha_rank8_bf16-step03000.safetensors\", \"strength\": 1, \"strengthTwo\": null}, null, \"\"]}, {\"id\": 9, \"type\": \"SaveImage\", \"pos\": [1215, 192.27305603027344], \"size\": [629.7261352539062, 859.7188720703125], \"flags\": {}, \"order\": 12, \"mode\": 0, \"inputs\": [{\"name\": \"images\", \"type\": \"IMAGE\", \"link\": 9}], \"outputs\": [], \"properties\": {}, \"widgets_values\": [\"%date:yyyy-MM-dd%/carisha_%date:hhmmss%\"]}, {\"id\": 7, \"type\": \"CLIPTextEncode\", \"pos\": [536.8186645507812, 755.2752075195312], \"size\": [442.9576416015625, 94.61177062988281], \"flags\": {\"collapsed\": true}, \"order\": 9, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 36}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [21, 31], \"slot_index\": 0}], \"title\": \"CLIP Text Encode (Negative)\", \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"\"], \"color\": \"#322\", \"bgcolor\": \"#533\"}, {\"id\": 6, \"type\": \"CLIPTextEncode\", \"pos\": [343.7825012207031, 215.52549743652344], \"size\": [482.2969665527344, 494.79656982421875], \"flags\": {}, \"order\": 8, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 35}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [4, 27], \"slot_index\": 0}], \"title\": \"CLIP Text Encode (Positive)\", \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"photo of carisha, a woman with brown and and with long, wavy hair adorned with flowers. She wears a modest, flowy, patterned dress and is standing in a sunlit meadow, surrounded by wildflowers. The image has a warm, faded tone, evoking a peaceful, bohemian vibe, 1970s nostalgia, with a slight film grain for that vintage, hippie look.\"], \"color\": \"#232\", \"bgcolor\": \"#353\"}], \"links\": [[4, 6, 0, 3, 1, \"CONDITIONING\"], [7, 3, 0, 8, 0, \"LATENT\"], [9, 8, 0, 9, 0, \"IMAGE\"], [21, 7, 0, 3, 2, \"CONDITIONING\"], [24, 8, 0, 16, 0, \"IMAGE\"], [25, 17, 0, 16, 5, \"UPSCALE_MODEL\"], [27, 6, 0, 16, 2, \"CONDITIONING\"], [30, 16, 0, 18, 0, \"IMAGE\"], [31, 7, 0, 16, 3, \"CONDITIONING\"], [35, 25, 1, 6, 0, \"CLIP\"], [36, 25, 1, 7, 0, \"CLIP\"], [37, 25, 0, 3, 0, \"MODEL\"], [38, 25, 0, 16, 1, \"MODEL\"], [47, 33, 0, 3, 4, \"INT\"], [48, 33, 0, 16, 8, \"INT\"], [49, 34, 0, 3, 3, \"LATENT\"], [50, 34, 1, 16, 6, \"INT\"], [51, 34, 2, 16, 7, \"INT\"], [54, 38, 0, 8, 1, \"VAE\"], [55, 38, 0, 16, 4, \"VAE\"], [61, 30, 0, 38, 0, \"*\"], [62, 28, 0, 25, 1, \"CLIP\"], [64, 29, 0, 25, 0, \"MODEL\"]], \"groups\": [], \"config\": {}, \"extra\": {\"ds\": {\"scale\": 0.8390545288824038, \"offset\": [66.01527400302358, -164.71021253714747]}, \"VHS_latentpreview\": false, \"VHS_latentpreviewrate\": 0, \"ue_links\": []}, \"version\": 0.4, \"widget_idx_map\": {\"3\": {\"sampler_name\": 4, \"scheduler\": 5}, \"16\": {\"sampler_name\": 5, \"scheduler\": 6}, \"33\": {\"seed\": 0}}}}", "steps": 25, "models": [], "prompt": "photo of carisha, a woman with brown and and with long, wavy hair adorned with flowers. She wears a modest, flowy, patterned dress and is standing in a sunlit meadow, surrounded by wildflowers. The image has a warm, faded tone, evoking a peaceful, bohemian vibe, 1970s nostalgia, with a slight film grain for that vintage, hippie look.", "denoise": 1, "sampler": "Euler", "cfgScale": 1, "modelIds": [], "scheduler": "normal", "upscalers": [ "4x_NMKD-Siax_200k.pth" ], "versionIds": [], "controlNets": [], "additionalResources": [] }, "availability": "Public", "hasMeta": true, "hasPositivePrompt": true, "onSite": false, "remixOfId": null }, { "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/a6cac0c6-651e-4b97-84d4-6ea883409fe1/width=832/56868698.jpeg", "nsfwLevel": 1, "width": 832, "height": 1216, "hash": "UHJs~~?Ey156*0jbD%NG0-t64pxY?Z%KIUng", "type": "image", "metadata": { "hash": "UHJs~~?Ey156*0jbD%NG0-t64pxY?Z%KIUng", "size": 1567608, "width": 832, "height": 1216 }, "minor": false, "poi": true, "meta": { "seed": 1053861404917665, "vaes": [ "ae.safetensors" ], "comfy": "{\"prompt\": {\"3\": {\"inputs\": {\"seed\": [\"33\", 0], \"steps\": 25, \"cfg\": 1.0, \"sampler_name\": \"euler\", \"scheduler\": \"normal\", \"denoise\": 1.0, \"model\": [\"25\", 0], \"positive\": [\"6\", 0], \"negative\": [\"7\", 0], \"latent_image\": [\"34\", 0]}, \"class_type\": \"KSampler\", \"_meta\": {\"title\": \"KSampler\"}}, \"6\": {\"inputs\": {\"text\": \"photo of carisha, standing in a sunlit garden filled with blooming yellow flowers. She has bright, blonde wavy hair that glows in the sunlight, framing her freckled face and wide, joyful smile. Her large, sparkling green eyes convey a sense of warmth and happiness. The woman is wearing a loose, airy yellow blouse that gently reflects the surrounding light, adding softness to the scene. She holds a small bouquet of yellow blossoms in her hand, complementing the background of lush greenery and golden blooms. The composition is filled with dynamic brushstrokes and warm tones, capturing the playful energy of the moment. The focus is on the interplay of light and colors, creating a radiant and lively atmosphere. \", \"clip\": [\"25\", 1]}, \"class_type\": \"CLIPTextEncode\", \"_meta\": {\"title\": \"CLIP Text Encode (Positive)\"}}, \"7\": {\"inputs\": {\"text\": \"\", \"clip\": [\"25\", 1]}, \"class_type\": \"CLIPTextEncode\", \"_meta\": {\"title\": \"CLIP Text Encode (Negative)\"}}, \"8\": {\"inputs\": {\"samples\": [\"3\", 0], \"vae\": [\"30\", 0]}, \"class_type\": \"VAEDecode\", \"_meta\": {\"title\": \"VAE Decode\"}}, \"9\": {\"inputs\": {\"filename_prefix\": \"2025-02-09/carisha_180558\", \"images\": [\"8\", 0]}, \"class_type\": \"SaveImage\", \"_meta\": {\"title\": \"Save Image\"}}, \"17\": {\"inputs\": {\"model_name\": \"4x_NMKD-Siax_200k.pth\", \"+\": null}, \"class_type\": \"UpscaleModelLoader\", \"_meta\": {\"title\": \"Load Upscale Model\"}}, \"25\": {\"inputs\": {\"PowerLoraLoaderHeaderWidget\": {\"type\": \"PowerLoraLoaderHeaderWidget\"}, \"lora_1\": {\"on\": true, \"lora\": \"!Myloras\\\\Carisha\\\\Carisha_rank8_bf16-step03000.safetensors\", \"strength\": 1}, \"\\u2795 Add Lora\": \"\", \"model\": [\"29\", 0], \"clip\": [\"28\", 0]}, \"class_type\": \"Power Lora Loader (rgthree)\", \"_meta\": {\"title\": \"Power Lora Loader (rgthree)\"}}, \"28\": {\"inputs\": {\"clip_name1\": \"t5xxl_fp8_e4m3fn.safetensors\", \"clip_name2\": \"clip_l.safetensors\", \"type\": \"flux\", \"+\": null}, \"class_type\": \"DualCLIPLoader\", \"_meta\": {\"title\": \"DualCLIPLoader\"}}, \"29\": {\"inputs\": {\"unet_name\": \"flux1-dev-fp8.safetensors\", \"weight_dtype\": \"fp8_e4m3fn_fast\", \"+\": null}, \"class_type\": \"UNETLoader\", \"_meta\": {\"title\": \"Load Diffusion Model\"}}, \"30\": {\"inputs\": {\"vae_name\": \"ae.safetensors\", \"+\": null}, \"class_type\": \"VAELoader\", \"_meta\": {\"title\": \"Load VAE\"}}, \"33\": {\"inputs\": {\"seed\": 1053861404917665}, \"class_type\": \"Seed (rgthree)\", \"_meta\": {\"title\": \"Seed (rgthree)\"}, \"is_changed\": [1053861404917665]}, \"34\": {\"inputs\": {\"resolution\": \"832x1216 (0.68)\", \"batch_size\": 1, \"width_override\": 0, \"height_override\": 0}, \"class_type\": \"SDXLEmptyLatentSizePicker+\", \"_meta\": {\"title\": \"\\ud83d\\udd27 Empty Latent Size Picker\"}}}, \"workflow\": {\"last_node_id\": 39, \"last_link_id\": 64, \"nodes\": [{\"id\": 8, \"type\": \"VAEDecode\", \"pos\": [1211.7613525390625, 207.52772521972656], \"size\": [210, 46], \"flags\": {\"collapsed\": true}, \"order\": 11, \"mode\": 0, \"inputs\": [{\"name\": \"samples\", \"type\": \"LATENT\", \"link\": 7}, {\"name\": \"vae\", \"type\": \"VAE\", \"link\": 54}], \"outputs\": [{\"name\": \"IMAGE\", \"type\": \"IMAGE\", \"links\": [9, 24], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"VAEDecode\"}, \"widgets_values\": []}, {\"id\": 18, \"type\": \"SaveImage\", \"pos\": [2197, 223.68397521972656], \"size\": [1249.5550537109375, 1464.7823486328125], \"flags\": {}, \"order\": 14, \"mode\": 4, \"inputs\": [{\"name\": \"images\", \"type\": \"IMAGE\", \"link\": 30}], \"outputs\": [], \"properties\": {}, \"widgets_values\": [\"%date:yyyy-MM-dd%/image_%KSampler.seed%\"]}, {\"id\": 16, \"type\": \"UltimateSDUpscale\", \"pos\": [1868.0947265625, 227], \"size\": [315, 826], \"flags\": {}, \"order\": 13, \"mode\": 4, \"inputs\": [{\"name\": \"image\", \"type\": \"IMAGE\", \"link\": 24}, {\"name\": \"model\", \"type\": \"MODEL\", \"link\": 38}, {\"name\": \"positive\", \"type\": \"CONDITIONING\", \"link\": 27}, {\"name\": \"negative\", \"type\": \"CONDITIONING\", \"link\": 31}, {\"name\": \"vae\", \"type\": \"VAE\", \"link\": 55}, {\"name\": \"upscale_model\", \"type\": \"UPSCALE_MODEL\", \"link\": 25}, {\"name\": \"tile_width\", \"type\": \"INT\", \"link\": 50, \"widget\": {\"name\": \"tile_width\"}}, {\"name\": \"tile_height\", \"type\": \"INT\", \"link\": 51, \"widget\": {\"name\": \"tile_height\"}}, {\"name\": \"seed\", \"type\": \"INT\", \"link\": 48, \"widget\": {\"name\": \"seed\"}}], \"outputs\": [{\"name\": \"IMAGE\", \"type\": \"IMAGE\", \"links\": [30], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"UltimateSDUpscale\"}, \"widgets_values\": [4, 296571186221844, \"randomize\", 2, 9, \"euler\", \"normal\", 0.15, \"Linear\", 768, 1024, 8, 32, \"None\", 1, 64, 8, 16, true, false]}, {\"id\": 17, \"type\": \"UpscaleModelLoader\", \"pos\": [1628.652587890625, 202.11581420898438], \"size\": [228.74729919433594, 82], \"flags\": {\"collapsed\": true}, \"order\": 0, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"UPSCALE_MODEL\", \"type\": \"UPSCALE_MODEL\", \"links\": [25]}], \"properties\": {\"Node name for S&R\": \"UpscaleModelLoader\"}, \"widgets_values\": [\"4x_NMKD-Siax_200k.pth\", null]}, {\"id\": 34, \"type\": \"SDXLEmptyLatentSizePicker+\", \"pos\": [275.15325927734375, 800.7346801757812], \"size\": [259.20001220703125, 170], \"flags\": {}, \"order\": 1, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [49], \"slot_index\": 0}, {\"name\": \"width\", \"type\": \"INT\", \"links\": [50], \"slot_index\": 1}, {\"name\": \"height\", \"type\": \"INT\", \"links\": [51], \"slot_index\": 2}], \"properties\": {\"Node name for S&R\": \"SDXLEmptyLatentSizePicker+\"}, \"widgets_values\": [\"832x1216 (0.68)\", 1, 0, 0]}, {\"id\": 33, \"type\": \"Seed (rgthree)\", \"pos\": [549.7734985351562, 806.7191772460938], \"size\": [244.56271362304688, 159.79544067382812], \"flags\": {}, \"order\": 2, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"SEED\", \"type\": \"INT\", \"links\": [47, 48], \"slot_index\": 0, \"shape\": 3, \"dir\": 4}], \"properties\": {}, \"widgets_values\": [1053861404917665, null, null, null]}, {\"id\": 38, \"type\": \"Reroute\", \"pos\": [944.2876586914062, 218.80259704589844], \"size\": [75, 26], \"flags\": {}, \"order\": 6, \"mode\": 0, \"inputs\": [{\"name\": \"\", \"type\": \"*\", \"link\": 61}], \"outputs\": [{\"name\": \"\", \"type\": \"VAE\", \"links\": [54, 55], \"slot_index\": 0}], \"properties\": {\"showOutputText\": false, \"horizontal\": false}}, {\"id\": 3, \"type\": \"KSampler\", \"pos\": [844, 232], \"size\": [349.8854064941406, 747.8931274414062], \"flags\": {}, \"order\": 10, \"mode\": 0, \"inputs\": [{\"name\": \"model\", \"type\": \"MODEL\", \"link\": 37}, {\"name\": \"positive\", \"type\": \"CONDITIONING\", \"link\": 4}, {\"name\": \"negative\", \"type\": \"CONDITIONING\", \"link\": 21}, {\"name\": \"latent_image\", \"type\": \"LATENT\", \"link\": 49}, {\"name\": \"seed\", \"type\": \"INT\", \"link\": 47, \"widget\": {\"name\": \"seed\"}}], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [7], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"KSampler\"}, \"widgets_values\": [51029103743813, \"randomize\", 25, 1, \"euler\", \"normal\", 1]}, {\"id\": 30, \"type\": \"VAELoader\", \"pos\": [-522.5015869140625, 230.6754608154297], \"size\": [318.4554138183594, 84.38363647460938], \"flags\": {\"collapsed\": true}, \"order\": 3, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"VAE\", \"type\": \"VAE\", \"links\": [61], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"VAELoader\"}, \"widgets_values\": [\"ae.safetensors\", null]}, {\"id\": 29, \"type\": \"UNETLoader\", \"pos\": [-545.8380126953125, 272.9270935058594], \"size\": [343.6680603027344, 106], \"flags\": {\"collapsed\": true}, \"order\": 4, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"MODEL\", \"type\": \"MODEL\", \"links\": [64], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"UNETLoader\"}, \"widgets_values\": [\"flux1-dev-fp8.safetensors\", \"fp8_e4m3fn_fast\", null]}, {\"id\": 28, \"type\": \"DualCLIPLoader\", \"pos\": [-531.428955078125, 319.86724853515625], \"size\": [347.35186767578125, 130], \"flags\": {\"collapsed\": true}, \"order\": 5, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [62], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"DualCLIPLoader\"}, \"widgets_values\": [\"t5xxl_fp8_e4m3fn.safetensors\", \"clip_l.safetensors\", \"flux\", null]}, {\"id\": 25, \"type\": \"Power Lora Loader (rgthree)\", \"pos\": [-313.511962890625, 235.59800720214844], \"size\": [645.4066772460938, 233.36923217773438], \"flags\": {}, \"order\": 7, \"mode\": 0, \"inputs\": [{\"name\": \"model\", \"type\": \"MODEL\", \"link\": 64, \"dir\": 3}, {\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 62, \"dir\": 3}], \"outputs\": [{\"name\": \"MODEL\", \"type\": \"MODEL\", \"links\": [37, 38], \"slot_index\": 0, \"shape\": 3, \"dir\": 4}, {\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [35, 36], \"slot_index\": 1, \"shape\": 3, \"dir\": 4}], \"properties\": {\"Show Strengths\": \"Single Strength\"}, \"widgets_values\": [null, {\"type\": \"PowerLoraLoaderHeaderWidget\"}, {\"on\": true, \"lora\": \"!Myloras\\\\Carisha\\\\Carisha_rank8_bf16-step03000.safetensors\", \"strength\": 1, \"strengthTwo\": null}, null, \"\"]}, {\"id\": 9, \"type\": \"SaveImage\", \"pos\": [1215, 192.27305603027344], \"size\": [629.7261352539062, 859.7188720703125], \"flags\": {}, \"order\": 12, \"mode\": 0, \"inputs\": [{\"name\": \"images\", \"type\": \"IMAGE\", \"link\": 9}], \"outputs\": [], \"properties\": {}, \"widgets_values\": [\"%date:yyyy-MM-dd%/carisha_%date:hhmmss%\"]}, {\"id\": 7, \"type\": \"CLIPTextEncode\", \"pos\": [536.8186645507812, 755.2752075195312], \"size\": [442.9576416015625, 94.61177062988281], \"flags\": {\"collapsed\": true}, \"order\": 9, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 36}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [21, 31], \"slot_index\": 0}], \"title\": \"CLIP Text Encode (Negative)\", \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"\"], \"color\": \"#322\", \"bgcolor\": \"#533\"}, {\"id\": 6, \"type\": \"CLIPTextEncode\", \"pos\": [343.7825012207031, 215.52549743652344], \"size\": [482.2969665527344, 494.79656982421875], \"flags\": {}, \"order\": 8, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 35}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [4, 27], \"slot_index\": 0}], \"title\": \"CLIP Text Encode (Positive)\", \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"photo of carisha, standing in a sunlit garden filled with blooming yellow flowers. She has bright, blonde wavy hair that glows in the sunlight, framing her freckled face and wide, joyful smile. Her large, sparkling green eyes convey a sense of warmth and happiness. The woman is wearing a loose, airy yellow blouse that gently reflects the surrounding light, adding softness to the scene. She holds a small bouquet of yellow blossoms in her hand, complementing the background of lush greenery and golden blooms. The composition is filled with dynamic brushstrokes and warm tones, capturing the playful energy of the moment. The focus is on the interplay of light and colors, creating a radiant and lively atmosphere. \"], \"color\": \"#232\", \"bgcolor\": \"#353\"}], \"links\": [[4, 6, 0, 3, 1, \"CONDITIONING\"], [7, 3, 0, 8, 0, \"LATENT\"], [9, 8, 0, 9, 0, \"IMAGE\"], [21, 7, 0, 3, 2, \"CONDITIONING\"], [24, 8, 0, 16, 0, \"IMAGE\"], [25, 17, 0, 16, 5, \"UPSCALE_MODEL\"], [27, 6, 0, 16, 2, \"CONDITIONING\"], [30, 16, 0, 18, 0, \"IMAGE\"], [31, 7, 0, 16, 3, \"CONDITIONING\"], [35, 25, 1, 6, 0, \"CLIP\"], [36, 25, 1, 7, 0, \"CLIP\"], [37, 25, 0, 3, 0, \"MODEL\"], [38, 25, 0, 16, 1, \"MODEL\"], [47, 33, 0, 3, 4, \"INT\"], [48, 33, 0, 16, 8, \"INT\"], [49, 34, 0, 3, 3, \"LATENT\"], [50, 34, 1, 16, 6, \"INT\"], [51, 34, 2, 16, 7, \"INT\"], [54, 38, 0, 8, 1, \"VAE\"], [55, 38, 0, 16, 4, \"VAE\"], [61, 30, 0, 38, 0, \"*\"], [62, 28, 0, 25, 1, \"CLIP\"], [64, 29, 0, 25, 0, \"MODEL\"]], \"groups\": [], \"config\": {}, \"extra\": {\"ds\": {\"scale\": 0.8390545288824038, \"offset\": [66.01527400302358, -164.71021253714747]}, \"VHS_latentpreview\": false, \"VHS_latentpreviewrate\": 0, \"ue_links\": []}, \"version\": 0.4, \"widget_idx_map\": {\"3\": {\"sampler_name\": 4, \"scheduler\": 5}, \"16\": {\"sampler_name\": 5, \"scheduler\": 6}, \"33\": {\"seed\": 0}}}}", "steps": 25, "models": [], "prompt": "photo of carisha, standing in a sunlit garden filled with blooming yellow flowers. She has bright, blonde wavy hair that glows in the sunlight, framing her freckled face and wide, joyful smile. Her large, sparkling green eyes convey a sense of warmth and happiness. The woman is wearing a loose, airy yellow blouse that gently reflects the surrounding light, adding softness to the scene. She holds a small bouquet of yellow blossoms in her hand, complementing the background of lush greenery and golden blooms. The composition is filled with dynamic brushstrokes and warm tones, capturing the playful energy of the moment. The focus is on the interplay of light and colors, creating a radiant and lively atmosphere. ", "denoise": 1, "sampler": "Euler", "cfgScale": 1, "modelIds": [], "scheduler": "normal", "upscalers": [ "4x_NMKD-Siax_200k.pth" ], "versionIds": [], "controlNets": [], "additionalResources": [] }, "availability": "Public", "hasMeta": true, "hasPositivePrompt": true, "onSite": false, "remixOfId": null }, { "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/44658024-452b-4291-9867-152ab9f4aa74/width=832/56868750.jpeg", "nsfwLevel": 1, "width": 832, "height": 1216, "hash": "UFFFHV]~_N4.~pE2EMM|9aa#RPxuI;RP-Uxu", "type": "image", "metadata": { "hash": "UFFFHV]~_N4.~pE2EMM|9aa#RPxuI;RP-Uxu", "size": 1479555, "width": 832, "height": 1216 }, "minor": false, "poi": true, "meta": { "seed": 658106992801418, "vaes": [ "ae.safetensors" ], "comfy": "{\"prompt\": {\"3\": {\"inputs\": {\"seed\": [\"33\", 0], \"steps\": 25, \"cfg\": 1.0, \"sampler_name\": \"euler\", \"scheduler\": \"normal\", \"denoise\": 1.0, \"model\": [\"25\", 0], \"positive\": [\"6\", 0], \"negative\": [\"7\", 0], \"latent_image\": [\"34\", 0]}, \"class_type\": \"KSampler\", \"_meta\": {\"title\": \"KSampler\"}}, \"6\": {\"inputs\": {\"text\": \"photo of carisha as a sexy business woman. She has beautiful blonde hair with an asymmetrical hair cut. She has large breasts. sleeveless turtleneck. brown eyes. long eyelashes. slim waist. Her eyes show a lot of lust towards the viewer. Her head is tilted forward, head at an angle. She is inside in a office environment.\", \"clip\": [\"25\", 1]}, \"class_type\": \"CLIPTextEncode\", \"_meta\": {\"title\": \"CLIP Text Encode (Positive)\"}}, \"7\": {\"inputs\": {\"text\": \"\", \"clip\": [\"25\", 1]}, \"class_type\": \"CLIPTextEncode\", \"_meta\": {\"title\": \"CLIP Text Encode (Negative)\"}}, \"8\": {\"inputs\": {\"samples\": [\"3\", 0], \"vae\": [\"30\", 0]}, \"class_type\": \"VAEDecode\", \"_meta\": {\"title\": \"VAE Decode\"}}, \"9\": {\"inputs\": {\"filename_prefix\": \"2025-02-09/carisha_180222\", \"images\": [\"8\", 0]}, \"class_type\": \"SaveImage\", \"_meta\": {\"title\": \"Save Image\"}}, \"17\": {\"inputs\": {\"model_name\": \"4x_NMKD-Siax_200k.pth\", \"+\": null}, \"class_type\": \"UpscaleModelLoader\", \"_meta\": {\"title\": \"Load Upscale Model\"}}, \"25\": {\"inputs\": {\"PowerLoraLoaderHeaderWidget\": {\"type\": \"PowerLoraLoaderHeaderWidget\"}, \"lora_1\": {\"on\": true, \"lora\": \"!Myloras\\\\Carisha\\\\Carisha_rank8_bf16-step03000.safetensors\", \"strength\": 1}, \"\\u2795 Add Lora\": \"\", \"model\": [\"29\", 0], \"clip\": [\"28\", 0]}, \"class_type\": \"Power Lora Loader (rgthree)\", \"_meta\": {\"title\": \"Power Lora Loader (rgthree)\"}}, \"28\": {\"inputs\": {\"clip_name1\": \"t5xxl_fp8_e4m3fn.safetensors\", \"clip_name2\": \"clip_l.safetensors\", \"type\": \"flux\", \"+\": null}, \"class_type\": \"DualCLIPLoader\", \"_meta\": {\"title\": \"DualCLIPLoader\"}}, \"29\": {\"inputs\": {\"unet_name\": \"flux1-dev-fp8.safetensors\", \"weight_dtype\": \"fp8_e4m3fn_fast\", \"+\": null}, \"class_type\": \"UNETLoader\", \"_meta\": {\"title\": \"Load Diffusion Model\"}}, \"30\": {\"inputs\": {\"vae_name\": \"ae.safetensors\", \"+\": null}, \"class_type\": \"VAELoader\", \"_meta\": {\"title\": \"Load VAE\"}}, \"33\": {\"inputs\": {\"seed\": 658106992801418}, \"class_type\": \"Seed (rgthree)\", \"_meta\": {\"title\": \"Seed (rgthree)\"}, \"is_changed\": [658106992801418]}, \"34\": {\"inputs\": {\"resolution\": \"832x1216 (0.68)\", \"batch_size\": 1, \"width_override\": 0, \"height_override\": 0}, \"class_type\": \"SDXLEmptyLatentSizePicker+\", \"_meta\": {\"title\": \"\\ud83d\\udd27 Empty Latent Size Picker\"}}}, \"workflow\": {\"last_node_id\": 39, \"last_link_id\": 64, \"nodes\": [{\"id\": 8, \"type\": \"VAEDecode\", \"pos\": [1211.7613525390625, 207.52772521972656], \"size\": [210, 46], \"flags\": {\"collapsed\": true}, \"order\": 11, \"mode\": 0, \"inputs\": [{\"name\": \"samples\", \"type\": \"LATENT\", \"link\": 7}, {\"name\": \"vae\", \"type\": \"VAE\", \"link\": 54}], \"outputs\": [{\"name\": \"IMAGE\", \"type\": \"IMAGE\", \"links\": [9, 24], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"VAEDecode\"}, \"widgets_values\": []}, {\"id\": 18, \"type\": \"SaveImage\", \"pos\": [2197, 223.68397521972656], \"size\": [1249.5550537109375, 1464.7823486328125], \"flags\": {}, \"order\": 14, \"mode\": 4, \"inputs\": [{\"name\": \"images\", \"type\": \"IMAGE\", \"link\": 30}], \"outputs\": [], \"properties\": {}, \"widgets_values\": [\"%date:yyyy-MM-dd%/image_%KSampler.seed%\"]}, {\"id\": 16, \"type\": \"UltimateSDUpscale\", \"pos\": [1868.0947265625, 227], \"size\": [315, 826], \"flags\": {}, \"order\": 13, \"mode\": 4, \"inputs\": [{\"name\": \"image\", \"type\": \"IMAGE\", \"link\": 24}, {\"name\": \"model\", \"type\": \"MODEL\", \"link\": 38}, {\"name\": \"positive\", \"type\": \"CONDITIONING\", \"link\": 27}, {\"name\": \"negative\", \"type\": \"CONDITIONING\", \"link\": 31}, {\"name\": \"vae\", \"type\": \"VAE\", \"link\": 55}, {\"name\": \"upscale_model\", \"type\": \"UPSCALE_MODEL\", \"link\": 25}, {\"name\": \"tile_width\", \"type\": \"INT\", \"link\": 50, \"widget\": {\"name\": \"tile_width\"}}, {\"name\": \"tile_height\", \"type\": \"INT\", \"link\": 51, \"widget\": {\"name\": \"tile_height\"}}, {\"name\": \"seed\", \"type\": \"INT\", \"link\": 48, \"widget\": {\"name\": \"seed\"}}], \"outputs\": [{\"name\": \"IMAGE\", \"type\": \"IMAGE\", \"links\": [30], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"UltimateSDUpscale\"}, \"widgets_values\": [4, 574381639448066, \"randomize\", 2, 9, \"euler\", \"normal\", 0.15, \"Linear\", 768, 1024, 8, 32, \"None\", 1, 64, 8, 16, true, false]}, {\"id\": 17, \"type\": \"UpscaleModelLoader\", \"pos\": [1628.652587890625, 202.11581420898438], \"size\": [228.74729919433594, 82], \"flags\": {\"collapsed\": true}, \"order\": 0, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"UPSCALE_MODEL\", \"type\": \"UPSCALE_MODEL\", \"links\": [25]}], \"properties\": {\"Node name for S&R\": \"UpscaleModelLoader\"}, \"widgets_values\": [\"4x_NMKD-Siax_200k.pth\", null]}, {\"id\": 34, \"type\": \"SDXLEmptyLatentSizePicker+\", \"pos\": [275.15325927734375, 800.7346801757812], \"size\": [259.20001220703125, 170], \"flags\": {}, \"order\": 1, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [49], \"slot_index\": 0}, {\"name\": \"width\", \"type\": \"INT\", \"links\": [50], \"slot_index\": 1}, {\"name\": \"height\", \"type\": \"INT\", \"links\": [51], \"slot_index\": 2}], \"properties\": {\"Node name for S&R\": \"SDXLEmptyLatentSizePicker+\"}, \"widgets_values\": [\"832x1216 (0.68)\", 1, 0, 0]}, {\"id\": 33, \"type\": \"Seed (rgthree)\", \"pos\": [549.7734985351562, 806.7191772460938], \"size\": [244.56271362304688, 159.79544067382812], \"flags\": {}, \"order\": 2, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"SEED\", \"type\": \"INT\", \"links\": [47, 48], \"slot_index\": 0, \"shape\": 3, \"dir\": 4}], \"properties\": {}, \"widgets_values\": [658106992801418, null, null, null]}, {\"id\": 38, \"type\": \"Reroute\", \"pos\": [944.2876586914062, 218.80259704589844], \"size\": [75, 26], \"flags\": {}, \"order\": 6, \"mode\": 0, \"inputs\": [{\"name\": \"\", \"type\": \"*\", \"link\": 61}], \"outputs\": [{\"name\": \"\", \"type\": \"VAE\", \"links\": [54, 55], \"slot_index\": 0}], \"properties\": {\"showOutputText\": false, \"horizontal\": false}}, {\"id\": 3, \"type\": \"KSampler\", \"pos\": [844, 232], \"size\": [349.8854064941406, 747.8931274414062], \"flags\": {}, \"order\": 10, \"mode\": 0, \"inputs\": [{\"name\": \"model\", \"type\": \"MODEL\", \"link\": 37}, {\"name\": \"positive\", \"type\": \"CONDITIONING\", \"link\": 4}, {\"name\": \"negative\", \"type\": \"CONDITIONING\", \"link\": 21}, {\"name\": \"latent_image\", \"type\": \"LATENT\", \"link\": 49}, {\"name\": \"seed\", \"type\": \"INT\", \"link\": 47, \"widget\": {\"name\": \"seed\"}}], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [7], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"KSampler\"}, \"widgets_values\": [1083962778012264, \"randomize\", 25, 1, \"euler\", \"normal\", 1]}, {\"id\": 30, \"type\": \"VAELoader\", \"pos\": [-522.5015869140625, 230.6754608154297], \"size\": [318.4554138183594, 84.38363647460938], \"flags\": {\"collapsed\": true}, \"order\": 3, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"VAE\", \"type\": \"VAE\", \"links\": [61], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"VAELoader\"}, \"widgets_values\": [\"ae.safetensors\", null]}, {\"id\": 29, \"type\": \"UNETLoader\", \"pos\": [-545.8380126953125, 272.9270935058594], \"size\": [343.6680603027344, 106], \"flags\": {\"collapsed\": true}, \"order\": 4, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"MODEL\", \"type\": \"MODEL\", \"links\": [64], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"UNETLoader\"}, \"widgets_values\": [\"flux1-dev-fp8.safetensors\", \"fp8_e4m3fn_fast\", null]}, {\"id\": 28, \"type\": \"DualCLIPLoader\", \"pos\": [-531.428955078125, 319.86724853515625], \"size\": [347.35186767578125, 130], \"flags\": {\"collapsed\": true}, \"order\": 5, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [62], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"DualCLIPLoader\"}, \"widgets_values\": [\"t5xxl_fp8_e4m3fn.safetensors\", \"clip_l.safetensors\", \"flux\", null]}, {\"id\": 25, \"type\": \"Power Lora Loader (rgthree)\", \"pos\": [-313.511962890625, 235.59800720214844], \"size\": [645.4066772460938, 233.36923217773438], \"flags\": {}, \"order\": 7, \"mode\": 0, \"inputs\": [{\"name\": \"model\", \"type\": \"MODEL\", \"link\": 64, \"dir\": 3}, {\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 62, \"dir\": 3}], \"outputs\": [{\"name\": \"MODEL\", \"type\": \"MODEL\", \"links\": [37, 38], \"slot_index\": 0, \"shape\": 3, \"dir\": 4}, {\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [35, 36], \"slot_index\": 1, \"shape\": 3, \"dir\": 4}], \"properties\": {\"Show Strengths\": \"Single Strength\"}, \"widgets_values\": [null, {\"type\": \"PowerLoraLoaderHeaderWidget\"}, {\"on\": true, \"lora\": \"!Myloras\\\\Carisha\\\\Carisha_rank8_bf16-step03000.safetensors\", \"strength\": 1, \"strengthTwo\": null}, null, \"\"]}, {\"id\": 9, \"type\": \"SaveImage\", \"pos\": [1215, 192.27305603027344], \"size\": [629.7261352539062, 859.7188720703125], \"flags\": {}, \"order\": 12, \"mode\": 0, \"inputs\": [{\"name\": \"images\", \"type\": \"IMAGE\", \"link\": 9}], \"outputs\": [], \"properties\": {}, \"widgets_values\": [\"%date:yyyy-MM-dd%/carisha_%date:hhmmss%\"]}, {\"id\": 7, \"type\": \"CLIPTextEncode\", \"pos\": [536.8186645507812, 755.2752075195312], \"size\": [442.9576416015625, 94.61177062988281], \"flags\": {\"collapsed\": true}, \"order\": 9, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 36}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [21, 31], \"slot_index\": 0}], \"title\": \"CLIP Text Encode (Negative)\", \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"\"], \"color\": \"#322\", \"bgcolor\": \"#533\"}, {\"id\": 6, \"type\": \"CLIPTextEncode\", \"pos\": [343.7825012207031, 215.52549743652344], \"size\": [482.2969665527344, 494.79656982421875], \"flags\": {}, \"order\": 8, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 35}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [4, 27], \"slot_index\": 0}], \"title\": \"CLIP Text Encode (Positive)\", \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"photo of carisha as a sexy business woman. She has beautiful blonde hair with an asymmetrical hair cut. She has large breasts. sleeveless turtleneck. brown eyes. long eyelashes. slim waist. Her eyes show a lot of lust towards the viewer. Her head is tilted forward, head at an angle. She is inside in a office environment.\"], \"color\": \"#232\", \"bgcolor\": \"#353\"}], \"links\": [[4, 6, 0, 3, 1, \"CONDITIONING\"], [7, 3, 0, 8, 0, \"LATENT\"], [9, 8, 0, 9, 0, \"IMAGE\"], [21, 7, 0, 3, 2, \"CONDITIONING\"], [24, 8, 0, 16, 0, \"IMAGE\"], [25, 17, 0, 16, 5, \"UPSCALE_MODEL\"], [27, 6, 0, 16, 2, \"CONDITIONING\"], [30, 16, 0, 18, 0, \"IMAGE\"], [31, 7, 0, 16, 3, \"CONDITIONING\"], [35, 25, 1, 6, 0, \"CLIP\"], [36, 25, 1, 7, 0, \"CLIP\"], [37, 25, 0, 3, 0, \"MODEL\"], [38, 25, 0, 16, 1, \"MODEL\"], [47, 33, 0, 3, 4, \"INT\"], [48, 33, 0, 16, 8, \"INT\"], [49, 34, 0, 3, 3, \"LATENT\"], [50, 34, 1, 16, 6, \"INT\"], [51, 34, 2, 16, 7, \"INT\"], [54, 38, 0, 8, 1, \"VAE\"], [55, 38, 0, 16, 4, \"VAE\"], [61, 30, 0, 38, 0, \"*\"], [62, 28, 0, 25, 1, \"CLIP\"], [64, 29, 0, 25, 0, \"MODEL\"]], \"groups\": [], \"config\": {}, \"extra\": {\"ds\": {\"scale\": 0.8390545288824038, \"offset\": [66.01527400302358, -164.71021253714747]}, \"VHS_latentpreview\": false, \"VHS_latentpreviewrate\": 0, \"ue_links\": []}, \"version\": 0.4, \"widget_idx_map\": {\"3\": {\"sampler_name\": 4, \"scheduler\": 5}, \"16\": {\"sampler_name\": 5, \"scheduler\": 6}, \"33\": {\"seed\": 0}}}}", "steps": 25, "models": [], "prompt": "photo of carisha as a sexy business woman. She has beautiful blonde hair with an asymmetrical hair cut. She has large breasts. sleeveless turtleneck. brown eyes. long eyelashes. slim waist. Her eyes show a lot of lust towards the viewer. Her head is tilted forward, head at an angle. She is inside in a office environment.", "denoise": 1, "sampler": "Euler", "cfgScale": 1, "modelIds": [], "scheduler": "normal", "upscalers": [ "4x_NMKD-Siax_200k.pth" ], "versionIds": [], "controlNets": [], "additionalResources": [] }, "availability": "Public", "hasMeta": true, "hasPositivePrompt": true, "onSite": false, "remixOfId": null }, { "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/456f6643-cd52-41cd-b50e-8f1c7b3eaec5/width=832/56868837.jpeg", "nsfwLevel": 1, "width": 832, "height": 1216, "hash": "U7Dbvb#60ea1~URPD*IV00t8?Gi^RQ=|^*xt", "type": "image", "metadata": { "hash": "U7Dbvb#60ea1~URPD*IV00t8?Gi^RQ=|^*xt", "size": 1398223, "width": 832, "height": 1216 }, "minor": false, "poi": true, "meta": { "seed": 225338963370165, "vaes": [ "ae.safetensors" ], "comfy": "{\"prompt\": {\"3\": {\"inputs\": {\"seed\": [\"33\", 0], \"steps\": 25, \"cfg\": 1.0, \"sampler_name\": \"euler\", \"scheduler\": \"normal\", \"denoise\": 1.0, \"model\": [\"25\", 0], \"positive\": [\"6\", 0], \"negative\": [\"7\", 0], \"latent_image\": [\"34\", 0]}, \"class_type\": \"KSampler\", \"_meta\": {\"title\": \"KSampler\"}}, \"6\": {\"inputs\": {\"text\": \"photo of carisha, a woman with blonde hair, brown eyes, as a waitress, in a stylish uniform, working in a caf\\u00e9\", \"clip\": [\"25\", 1]}, \"class_type\": \"CLIPTextEncode\", \"_meta\": {\"title\": \"CLIP Text Encode (Positive)\"}}, \"7\": {\"inputs\": {\"text\": \"\", \"clip\": [\"25\", 1]}, \"class_type\": \"CLIPTextEncode\", \"_meta\": {\"title\": \"CLIP Text Encode (Negative)\"}}, \"8\": {\"inputs\": {\"samples\": [\"3\", 0], \"vae\": [\"30\", 0]}, \"class_type\": \"VAEDecode\", \"_meta\": {\"title\": \"VAE Decode\"}}, \"9\": {\"inputs\": {\"filename_prefix\": \"2025-02-09/carisha_180833\", \"images\": [\"8\", 0]}, \"class_type\": \"SaveImage\", \"_meta\": {\"title\": \"Save Image\"}}, \"17\": {\"inputs\": {\"model_name\": \"4x_NMKD-Siax_200k.pth\", \"+\": null}, \"class_type\": \"UpscaleModelLoader\", \"_meta\": {\"title\": \"Load Upscale Model\"}}, \"25\": {\"inputs\": {\"PowerLoraLoaderHeaderWidget\": {\"type\": \"PowerLoraLoaderHeaderWidget\"}, \"lora_1\": {\"on\": true, \"lora\": \"!Myloras\\\\Carisha\\\\Carisha_rank8_bf16-step03000.safetensors\", \"strength\": 1}, \"\\u2795 Add Lora\": \"\", \"model\": [\"29\", 0], \"clip\": [\"28\", 0]}, \"class_type\": \"Power Lora Loader (rgthree)\", \"_meta\": {\"title\": \"Power Lora Loader (rgthree)\"}}, \"28\": {\"inputs\": {\"clip_name1\": \"t5xxl_fp8_e4m3fn.safetensors\", \"clip_name2\": \"clip_l.safetensors\", \"type\": \"flux\", \"+\": null}, \"class_type\": \"DualCLIPLoader\", \"_meta\": {\"title\": \"DualCLIPLoader\"}}, \"29\": {\"inputs\": {\"unet_name\": \"flux1-dev-fp8.safetensors\", \"weight_dtype\": \"fp8_e4m3fn_fast\", \"+\": null}, \"class_type\": \"UNETLoader\", \"_meta\": {\"title\": \"Load Diffusion Model\"}}, \"30\": {\"inputs\": {\"vae_name\": \"ae.safetensors\", \"+\": null}, \"class_type\": \"VAELoader\", \"_meta\": {\"title\": \"Load VAE\"}}, \"33\": {\"inputs\": {\"seed\": 225338963370165}, \"class_type\": \"Seed (rgthree)\", \"_meta\": {\"title\": \"Seed (rgthree)\"}, \"is_changed\": [225338963370165]}, \"34\": {\"inputs\": {\"resolution\": \"832x1216 (0.68)\", \"batch_size\": 1, \"width_override\": 0, \"height_override\": 0}, \"class_type\": \"SDXLEmptyLatentSizePicker+\", \"_meta\": {\"title\": \"\\ud83d\\udd27 Empty Latent Size Picker\"}}}, \"workflow\": {\"last_node_id\": 39, \"last_link_id\": 64, \"nodes\": [{\"id\": 8, \"type\": \"VAEDecode\", \"pos\": [1211.7613525390625, 207.52772521972656], \"size\": [210, 46], \"flags\": {\"collapsed\": true}, \"order\": 11, \"mode\": 0, \"inputs\": [{\"name\": \"samples\", \"type\": \"LATENT\", \"link\": 7}, {\"name\": \"vae\", \"type\": \"VAE\", \"link\": 54}], \"outputs\": [{\"name\": \"IMAGE\", \"type\": \"IMAGE\", \"links\": [9, 24], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"VAEDecode\"}, \"widgets_values\": []}, {\"id\": 18, \"type\": \"SaveImage\", \"pos\": [2197, 223.68397521972656], \"size\": [1249.5550537109375, 1464.7823486328125], \"flags\": {}, \"order\": 14, \"mode\": 4, \"inputs\": [{\"name\": \"images\", \"type\": \"IMAGE\", \"link\": 30}], \"outputs\": [], \"properties\": {}, \"widgets_values\": [\"%date:yyyy-MM-dd%/image_%KSampler.seed%\"]}, {\"id\": 16, \"type\": \"UltimateSDUpscale\", \"pos\": [1868.0947265625, 227], \"size\": [315, 826], \"flags\": {}, \"order\": 13, \"mode\": 4, \"inputs\": [{\"name\": \"image\", \"type\": \"IMAGE\", \"link\": 24}, {\"name\": \"model\", \"type\": \"MODEL\", \"link\": 38}, {\"name\": \"positive\", \"type\": \"CONDITIONING\", \"link\": 27}, {\"name\": \"negative\", \"type\": \"CONDITIONING\", \"link\": 31}, {\"name\": \"vae\", \"type\": \"VAE\", \"link\": 55}, {\"name\": \"upscale_model\", \"type\": \"UPSCALE_MODEL\", \"link\": 25}, {\"name\": \"tile_width\", \"type\": \"INT\", \"link\": 50, \"widget\": {\"name\": \"tile_width\"}}, {\"name\": \"tile_height\", \"type\": \"INT\", \"link\": 51, \"widget\": {\"name\": \"tile_height\"}}, {\"name\": \"seed\", \"type\": \"INT\", \"link\": 48, \"widget\": {\"name\": \"seed\"}}], \"outputs\": [{\"name\": \"IMAGE\", \"type\": \"IMAGE\", \"links\": [30], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"UltimateSDUpscale\"}, \"widgets_values\": [4, 870018727435540, \"randomize\", 2, 9, \"euler\", \"normal\", 0.15, \"Linear\", 768, 1024, 8, 32, \"None\", 1, 64, 8, 16, true, false]}, {\"id\": 17, \"type\": \"UpscaleModelLoader\", \"pos\": [1628.652587890625, 202.11581420898438], \"size\": [228.74729919433594, 82], \"flags\": {\"collapsed\": true}, \"order\": 0, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"UPSCALE_MODEL\", \"type\": \"UPSCALE_MODEL\", \"links\": [25]}], \"properties\": {\"Node name for S&R\": \"UpscaleModelLoader\"}, \"widgets_values\": [\"4x_NMKD-Siax_200k.pth\", null]}, {\"id\": 34, \"type\": \"SDXLEmptyLatentSizePicker+\", \"pos\": [275.15325927734375, 800.7346801757812], \"size\": [259.20001220703125, 170], \"flags\": {}, \"order\": 1, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [49], \"slot_index\": 0}, {\"name\": \"width\", \"type\": \"INT\", \"links\": [50], \"slot_index\": 1}, {\"name\": \"height\", \"type\": \"INT\", \"links\": [51], \"slot_index\": 2}], \"properties\": {\"Node name for S&R\": \"SDXLEmptyLatentSizePicker+\"}, \"widgets_values\": [\"832x1216 (0.68)\", 1, 0, 0]}, {\"id\": 33, \"type\": \"Seed (rgthree)\", \"pos\": [549.7734985351562, 806.7191772460938], \"size\": [244.56271362304688, 159.79544067382812], \"flags\": {}, \"order\": 2, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"SEED\", \"type\": \"INT\", \"links\": [47, 48], \"slot_index\": 0, \"shape\": 3, \"dir\": 4}], \"properties\": {}, \"widgets_values\": [225338963370165, null, null, null]}, {\"id\": 38, \"type\": \"Reroute\", \"pos\": [944.2876586914062, 218.80259704589844], \"size\": [75, 26], \"flags\": {}, \"order\": 6, \"mode\": 0, \"inputs\": [{\"name\": \"\", \"type\": \"*\", \"link\": 61}], \"outputs\": [{\"name\": \"\", \"type\": \"VAE\", \"links\": [54, 55], \"slot_index\": 0}], \"properties\": {\"showOutputText\": false, \"horizontal\": false}}, {\"id\": 3, \"type\": \"KSampler\", \"pos\": [844, 232], \"size\": [349.8854064941406, 747.8931274414062], \"flags\": {}, \"order\": 10, \"mode\": 0, \"inputs\": [{\"name\": \"model\", \"type\": \"MODEL\", \"link\": 37}, {\"name\": \"positive\", \"type\": \"CONDITIONING\", \"link\": 4}, {\"name\": \"negative\", \"type\": \"CONDITIONING\", \"link\": 21}, {\"name\": \"latent_image\", \"type\": \"LATENT\", \"link\": 49}, {\"name\": \"seed\", \"type\": \"INT\", \"link\": 47, \"widget\": {\"name\": \"seed\"}}], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [7], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"KSampler\"}, \"widgets_values\": [690406144326165, \"randomize\", 25, 1, \"euler\", \"normal\", 1]}, {\"id\": 30, \"type\": \"VAELoader\", \"pos\": [-522.5015869140625, 230.6754608154297], \"size\": [318.4554138183594, 84.38363647460938], \"flags\": {\"collapsed\": true}, \"order\": 3, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"VAE\", \"type\": \"VAE\", \"links\": [61], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"VAELoader\"}, \"widgets_values\": [\"ae.safetensors\", null]}, {\"id\": 29, \"type\": \"UNETLoader\", \"pos\": [-545.8380126953125, 272.9270935058594], \"size\": [343.6680603027344, 106], \"flags\": {\"collapsed\": true}, \"order\": 4, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"MODEL\", \"type\": \"MODEL\", \"links\": [64], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"UNETLoader\"}, \"widgets_values\": [\"flux1-dev-fp8.safetensors\", \"fp8_e4m3fn_fast\", null]}, {\"id\": 28, \"type\": \"DualCLIPLoader\", \"pos\": [-531.428955078125, 319.86724853515625], \"size\": [347.35186767578125, 130], \"flags\": {\"collapsed\": true}, \"order\": 5, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [62], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"DualCLIPLoader\"}, \"widgets_values\": [\"t5xxl_fp8_e4m3fn.safetensors\", \"clip_l.safetensors\", \"flux\", null]}, {\"id\": 25, \"type\": \"Power Lora Loader (rgthree)\", \"pos\": [-313.511962890625, 235.59800720214844], \"size\": [645.4066772460938, 233.36923217773438], \"flags\": {}, \"order\": 7, \"mode\": 0, \"inputs\": [{\"name\": \"model\", \"type\": \"MODEL\", \"link\": 64, \"dir\": 3}, {\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 62, \"dir\": 3}], \"outputs\": [{\"name\": \"MODEL\", \"type\": \"MODEL\", \"links\": [37, 38], \"slot_index\": 0, \"shape\": 3, \"dir\": 4}, {\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [35, 36], \"slot_index\": 1, \"shape\": 3, \"dir\": 4}], \"properties\": {\"Show Strengths\": \"Single Strength\"}, \"widgets_values\": [null, {\"type\": \"PowerLoraLoaderHeaderWidget\"}, {\"on\": true, \"lora\": \"!Myloras\\\\Carisha\\\\Carisha_rank8_bf16-step03000.safetensors\", \"strength\": 1, \"strengthTwo\": null}, null, \"\"]}, {\"id\": 9, \"type\": \"SaveImage\", \"pos\": [1215, 192.27305603027344], \"size\": [629.7261352539062, 859.7188720703125], \"flags\": {}, \"order\": 12, \"mode\": 0, \"inputs\": [{\"name\": \"images\", \"type\": \"IMAGE\", \"link\": 9}], \"outputs\": [], \"properties\": {}, \"widgets_values\": [\"%date:yyyy-MM-dd%/carisha_%date:hhmmss%\"]}, {\"id\": 7, \"type\": \"CLIPTextEncode\", \"pos\": [536.8186645507812, 755.2752075195312], \"size\": [442.9576416015625, 94.61177062988281], \"flags\": {\"collapsed\": true}, \"order\": 9, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 36}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [21, 31], \"slot_index\": 0}], \"title\": \"CLIP Text Encode (Negative)\", \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"\"], \"color\": \"#322\", \"bgcolor\": \"#533\"}, {\"id\": 6, \"type\": \"CLIPTextEncode\", \"pos\": [343.7825012207031, 215.52549743652344], \"size\": [482.2969665527344, 494.79656982421875], \"flags\": {}, \"order\": 8, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 35}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [4, 27], \"slot_index\": 0}], \"title\": \"CLIP Text Encode (Positive)\", \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"photo of carisha, a woman with blonde hair, brown eyes, as a waitress, in a stylish uniform, working in a caf\\u00e9\"], \"color\": \"#232\", \"bgcolor\": \"#353\"}], \"links\": [[4, 6, 0, 3, 1, \"CONDITIONING\"], [7, 3, 0, 8, 0, \"LATENT\"], [9, 8, 0, 9, 0, \"IMAGE\"], [21, 7, 0, 3, 2, \"CONDITIONING\"], [24, 8, 0, 16, 0, \"IMAGE\"], [25, 17, 0, 16, 5, \"UPSCALE_MODEL\"], [27, 6, 0, 16, 2, \"CONDITIONING\"], [30, 16, 0, 18, 0, \"IMAGE\"], [31, 7, 0, 16, 3, \"CONDITIONING\"], [35, 25, 1, 6, 0, \"CLIP\"], [36, 25, 1, 7, 0, \"CLIP\"], [37, 25, 0, 3, 0, \"MODEL\"], [38, 25, 0, 16, 1, \"MODEL\"], [47, 33, 0, 3, 4, \"INT\"], [48, 33, 0, 16, 8, \"INT\"], [49, 34, 0, 3, 3, \"LATENT\"], [50, 34, 1, 16, 6, \"INT\"], [51, 34, 2, 16, 7, \"INT\"], [54, 38, 0, 8, 1, \"VAE\"], [55, 38, 0, 16, 4, \"VAE\"], [61, 30, 0, 38, 0, \"*\"], [62, 28, 0, 25, 1, \"CLIP\"], [64, 29, 0, 25, 0, \"MODEL\"]], \"groups\": [], \"config\": {}, \"extra\": {\"ds\": {\"scale\": 0.8390545288824038, \"offset\": [66.01527400302358, -164.71021253714747]}, \"VHS_latentpreview\": false, \"VHS_latentpreviewrate\": 0, \"ue_links\": []}, \"version\": 0.4, \"widget_idx_map\": {\"3\": {\"sampler_name\": 4, \"scheduler\": 5}, \"16\": {\"sampler_name\": 5, \"scheduler\": 6}, \"33\": {\"seed\": 0}}}}", "steps": 25, "models": [], "prompt": "photo of carisha, a woman with blonde hair, brown eyes, as a waitress, in a stylish uniform, working in a caf\u00e9", "denoise": 1, "sampler": "Euler", "cfgScale": 1, "modelIds": [], "scheduler": "normal", "upscalers": [ "4x_NMKD-Siax_200k.pth" ], "versionIds": [], "controlNets": [], "additionalResources": [] }, "availability": "Public", "hasMeta": true, "hasPositivePrompt": true, "onSite": false, "remixOfId": null }, { "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/b2a58df5-5a49-4fd4-96a3-b4259595c67c/width=832/56868833.jpeg", "nsfwLevel": 1, "width": 832, "height": 1216, "hash": "U99u1%_NL}Md_}wHT|pc4nI=NGa00gE1rCH?", "type": "image", "metadata": { "hash": "U99u1%_NL}Md_}wHT|pc4nI=NGa00gE1rCH?", "size": 1364329, "width": 832, "height": 1216 }, "minor": false, "poi": true, "meta": { "seed": 150044244114163, "vaes": [ "ae.safetensors" ], "comfy": "{\"prompt\": {\"3\": {\"inputs\": {\"seed\": [\"33\", 0], \"steps\": 25, \"cfg\": 1.0, \"sampler_name\": \"euler\", \"scheduler\": \"normal\", \"denoise\": 1.0, \"model\": [\"25\", 0], \"positive\": [\"6\", 0], \"negative\": [\"7\", 0], \"latent_image\": [\"34\", 0]}, \"class_type\": \"KSampler\", \"_meta\": {\"title\": \"KSampler\"}}, \"6\": {\"inputs\": {\"text\": \"photo of carisha, a woman with blonde hair, brown eyes, inside a gym, wearing a longsleeve ocean blue gym hoodie, and yoga pants. She is standing in front of gym equipment, parted lips, smile, hair in a ponytail\", \"clip\": [\"25\", 1]}, \"class_type\": \"CLIPTextEncode\", \"_meta\": {\"title\": \"CLIP Text Encode (Positive)\"}}, \"7\": {\"inputs\": {\"text\": \"\", \"clip\": [\"25\", 1]}, \"class_type\": \"CLIPTextEncode\", \"_meta\": {\"title\": \"CLIP Text Encode (Negative)\"}}, \"8\": {\"inputs\": {\"samples\": [\"3\", 0], \"vae\": [\"30\", 0]}, \"class_type\": \"VAEDecode\", \"_meta\": {\"title\": \"VAE Decode\"}}, \"9\": {\"inputs\": {\"filename_prefix\": \"2025-02-09/carisha_181918\", \"images\": [\"8\", 0]}, \"class_type\": \"SaveImage\", \"_meta\": {\"title\": \"Save Image\"}}, \"17\": {\"inputs\": {\"model_name\": \"4x_NMKD-Siax_200k.pth\", \"+\": null}, \"class_type\": \"UpscaleModelLoader\", \"_meta\": {\"title\": \"Load Upscale Model\"}}, \"25\": {\"inputs\": {\"PowerLoraLoaderHeaderWidget\": {\"type\": \"PowerLoraLoaderHeaderWidget\"}, \"lora_1\": {\"on\": true, \"lora\": \"!Myloras\\\\Carisha\\\\Carisha_rank8_bf16-step03000.safetensors\", \"strength\": 1}, \"\\u2795 Add Lora\": \"\", \"model\": [\"29\", 0], \"clip\": [\"28\", 0]}, \"class_type\": \"Power Lora Loader (rgthree)\", \"_meta\": {\"title\": \"Power Lora Loader (rgthree)\"}}, \"28\": {\"inputs\": {\"clip_name1\": \"t5xxl_fp8_e4m3fn.safetensors\", \"clip_name2\": \"clip_l.safetensors\", \"type\": \"flux\", \"+\": null}, \"class_type\": \"DualCLIPLoader\", \"_meta\": {\"title\": \"DualCLIPLoader\"}}, \"29\": {\"inputs\": {\"unet_name\": \"flux1-dev-fp8.safetensors\", \"weight_dtype\": \"fp8_e4m3fn_fast\", \"+\": null}, \"class_type\": \"UNETLoader\", \"_meta\": {\"title\": \"Load Diffusion Model\"}}, \"30\": {\"inputs\": {\"vae_name\": \"ae.safetensors\", \"+\": null}, \"class_type\": \"VAELoader\", \"_meta\": {\"title\": \"Load VAE\"}}, \"33\": {\"inputs\": {\"seed\": 150044244114163}, \"class_type\": \"Seed (rgthree)\", \"_meta\": {\"title\": \"Seed (rgthree)\"}, \"is_changed\": [150044244114163]}, \"34\": {\"inputs\": {\"resolution\": \"832x1216 (0.68)\", \"batch_size\": 1, \"width_override\": 0, \"height_override\": 0}, \"class_type\": \"SDXLEmptyLatentSizePicker+\", \"_meta\": {\"title\": \"\\ud83d\\udd27 Empty Latent Size Picker\"}}}, \"workflow\": {\"last_node_id\": 39, \"last_link_id\": 64, \"nodes\": [{\"id\": 8, \"type\": \"VAEDecode\", \"pos\": [1211.7613525390625, 207.52772521972656], \"size\": [210, 46], \"flags\": {\"collapsed\": true}, \"order\": 11, \"mode\": 0, \"inputs\": [{\"name\": \"samples\", \"type\": \"LATENT\", \"link\": 7}, {\"name\": \"vae\", \"type\": \"VAE\", \"link\": 54}], \"outputs\": [{\"name\": \"IMAGE\", \"type\": \"IMAGE\", \"links\": [9, 24], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"VAEDecode\"}, \"widgets_values\": []}, {\"id\": 18, \"type\": \"SaveImage\", \"pos\": [2197, 223.68397521972656], \"size\": [1249.5550537109375, 1464.7823486328125], \"flags\": {}, \"order\": 14, \"mode\": 4, \"inputs\": [{\"name\": \"images\", \"type\": \"IMAGE\", \"link\": 30}], \"outputs\": [], \"properties\": {}, \"widgets_values\": [\"%date:yyyy-MM-dd%/image_%KSampler.seed%\"]}, {\"id\": 16, \"type\": \"UltimateSDUpscale\", \"pos\": [1868.0947265625, 227], \"size\": [315, 826], \"flags\": {}, \"order\": 13, \"mode\": 4, \"inputs\": [{\"name\": \"image\", \"type\": \"IMAGE\", \"link\": 24}, {\"name\": \"model\", \"type\": \"MODEL\", \"link\": 38}, {\"name\": \"positive\", \"type\": \"CONDITIONING\", \"link\": 27}, {\"name\": \"negative\", \"type\": \"CONDITIONING\", \"link\": 31}, {\"name\": \"vae\", \"type\": \"VAE\", \"link\": 55}, {\"name\": \"upscale_model\", \"type\": \"UPSCALE_MODEL\", \"link\": 25}, {\"name\": \"tile_width\", \"type\": \"INT\", \"link\": 50, \"widget\": {\"name\": \"tile_width\"}}, {\"name\": \"tile_height\", \"type\": \"INT\", \"link\": 51, \"widget\": {\"name\": \"tile_height\"}}, {\"name\": \"seed\", \"type\": \"INT\", \"link\": 48, \"widget\": {\"name\": \"seed\"}}], \"outputs\": [{\"name\": \"IMAGE\", \"type\": \"IMAGE\", \"links\": [30], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"UltimateSDUpscale\"}, \"widgets_values\": [4, 68699959389967, \"randomize\", 2, 9, \"euler\", \"normal\", 0.15, \"Linear\", 768, 1024, 8, 32, \"None\", 1, 64, 8, 16, true, false]}, {\"id\": 17, \"type\": \"UpscaleModelLoader\", \"pos\": [1628.652587890625, 202.11581420898438], \"size\": [228.74729919433594, 82], \"flags\": {\"collapsed\": true}, \"order\": 0, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"UPSCALE_MODEL\", \"type\": \"UPSCALE_MODEL\", \"links\": [25]}], \"properties\": {\"Node name for S&R\": \"UpscaleModelLoader\"}, \"widgets_values\": [\"4x_NMKD-Siax_200k.pth\", null]}, {\"id\": 34, \"type\": \"SDXLEmptyLatentSizePicker+\", \"pos\": [275.15325927734375, 800.7346801757812], \"size\": [259.20001220703125, 170], \"flags\": {}, \"order\": 1, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [49], \"slot_index\": 0}, {\"name\": \"width\", \"type\": \"INT\", \"links\": [50], \"slot_index\": 1}, {\"name\": \"height\", \"type\": \"INT\", \"links\": [51], \"slot_index\": 2}], \"properties\": {\"Node name for S&R\": \"SDXLEmptyLatentSizePicker+\"}, \"widgets_values\": [\"832x1216 (0.68)\", 1, 0, 0]}, {\"id\": 33, \"type\": \"Seed (rgthree)\", \"pos\": [549.7734985351562, 806.7191772460938], \"size\": [244.56271362304688, 159.79544067382812], \"flags\": {}, \"order\": 2, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"SEED\", \"type\": \"INT\", \"links\": [47, 48], \"slot_index\": 0, \"shape\": 3, \"dir\": 4}], \"properties\": {}, \"widgets_values\": [150044244114163, null, null, null]}, {\"id\": 38, \"type\": \"Reroute\", \"pos\": [944.2876586914062, 218.80259704589844], \"size\": [75, 26], \"flags\": {}, \"order\": 6, \"mode\": 0, \"inputs\": [{\"name\": \"\", \"type\": \"*\", \"link\": 61}], \"outputs\": [{\"name\": \"\", \"type\": \"VAE\", \"links\": [54, 55], \"slot_index\": 0}], \"properties\": {\"showOutputText\": false, \"horizontal\": false}}, {\"id\": 3, \"type\": \"KSampler\", \"pos\": [844, 232], \"size\": [349.8854064941406, 747.8931274414062], \"flags\": {}, \"order\": 10, \"mode\": 0, \"inputs\": [{\"name\": \"model\", \"type\": \"MODEL\", \"link\": 37}, {\"name\": \"positive\", \"type\": \"CONDITIONING\", \"link\": 4}, {\"name\": \"negative\", \"type\": \"CONDITIONING\", \"link\": 21}, {\"name\": \"latent_image\", \"type\": \"LATENT\", \"link\": 49}, {\"name\": \"seed\", \"type\": \"INT\", \"link\": 47, \"widget\": {\"name\": \"seed\"}}], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [7], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"KSampler\"}, \"widgets_values\": [975110353138552, \"randomize\", 25, 1, \"euler\", \"normal\", 1]}, {\"id\": 30, \"type\": \"VAELoader\", \"pos\": [-522.5015869140625, 230.6754608154297], \"size\": [318.4554138183594, 84.38363647460938], \"flags\": {\"collapsed\": true}, \"order\": 3, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"VAE\", \"type\": \"VAE\", \"links\": [61], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"VAELoader\"}, \"widgets_values\": [\"ae.safetensors\", null]}, {\"id\": 29, \"type\": \"UNETLoader\", \"pos\": [-545.8380126953125, 272.9270935058594], \"size\": [343.6680603027344, 106], \"flags\": {\"collapsed\": true}, \"order\": 4, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"MODEL\", \"type\": \"MODEL\", \"links\": [64], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"UNETLoader\"}, \"widgets_values\": [\"flux1-dev-fp8.safetensors\", \"fp8_e4m3fn_fast\", null]}, {\"id\": 28, \"type\": \"DualCLIPLoader\", \"pos\": [-531.428955078125, 319.86724853515625], \"size\": [347.35186767578125, 130], \"flags\": {\"collapsed\": true}, \"order\": 5, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [62], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"DualCLIPLoader\"}, \"widgets_values\": [\"t5xxl_fp8_e4m3fn.safetensors\", \"clip_l.safetensors\", \"flux\", null]}, {\"id\": 25, \"type\": \"Power Lora Loader (rgthree)\", \"pos\": [-313.511962890625, 235.59800720214844], \"size\": [645.4066772460938, 233.36923217773438], \"flags\": {}, \"order\": 7, \"mode\": 0, \"inputs\": [{\"name\": \"model\", \"type\": \"MODEL\", \"link\": 64, \"dir\": 3}, {\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 62, \"dir\": 3}], \"outputs\": [{\"name\": \"MODEL\", \"type\": \"MODEL\", \"links\": [37, 38], \"slot_index\": 0, \"shape\": 3, \"dir\": 4}, {\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [35, 36], \"slot_index\": 1, \"shape\": 3, \"dir\": 4}], \"properties\": {\"Show Strengths\": \"Single Strength\"}, \"widgets_values\": [null, {\"type\": \"PowerLoraLoaderHeaderWidget\"}, {\"on\": true, \"lora\": \"!Myloras\\\\Carisha\\\\Carisha_rank8_bf16-step03000.safetensors\", \"strength\": 1, \"strengthTwo\": null}, null, \"\"]}, {\"id\": 9, \"type\": \"SaveImage\", \"pos\": [1215, 192.27305603027344], \"size\": [629.7261352539062, 859.7188720703125], \"flags\": {}, \"order\": 12, \"mode\": 0, \"inputs\": [{\"name\": \"images\", \"type\": \"IMAGE\", \"link\": 9}], \"outputs\": [], \"properties\": {}, \"widgets_values\": [\"%date:yyyy-MM-dd%/carisha_%date:hhmmss%\"]}, {\"id\": 7, \"type\": \"CLIPTextEncode\", \"pos\": [536.8186645507812, 755.2752075195312], \"size\": [442.9576416015625, 94.61177062988281], \"flags\": {\"collapsed\": true}, \"order\": 9, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 36}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [21, 31], \"slot_index\": 0}], \"title\": \"CLIP Text Encode (Negative)\", \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"\"], \"color\": \"#322\", \"bgcolor\": \"#533\"}, {\"id\": 6, \"type\": \"CLIPTextEncode\", \"pos\": [343.7825012207031, 215.52549743652344], \"size\": [482.2969665527344, 494.79656982421875], \"flags\": {}, \"order\": 8, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 35}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [4, 27], \"slot_index\": 0}], \"title\": \"CLIP Text Encode (Positive)\", \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"photo of carisha, a woman with blonde hair, brown eyes, inside a gym, wearing a longsleeve ocean blue gym hoodie, and yoga pants. She is standing in front of gym equipment, parted lips, smile, hair in a ponytail\"], \"color\": \"#232\", \"bgcolor\": \"#353\"}], \"links\": [[4, 6, 0, 3, 1, \"CONDITIONING\"], [7, 3, 0, 8, 0, \"LATENT\"], [9, 8, 0, 9, 0, \"IMAGE\"], [21, 7, 0, 3, 2, \"CONDITIONING\"], [24, 8, 0, 16, 0, \"IMAGE\"], [25, 17, 0, 16, 5, \"UPSCALE_MODEL\"], [27, 6, 0, 16, 2, \"CONDITIONING\"], [30, 16, 0, 18, 0, \"IMAGE\"], [31, 7, 0, 16, 3, \"CONDITIONING\"], [35, 25, 1, 6, 0, \"CLIP\"], [36, 25, 1, 7, 0, \"CLIP\"], [37, 25, 0, 3, 0, \"MODEL\"], [38, 25, 0, 16, 1, \"MODEL\"], [47, 33, 0, 3, 4, \"INT\"], [48, 33, 0, 16, 8, \"INT\"], [49, 34, 0, 3, 3, \"LATENT\"], [50, 34, 1, 16, 6, \"INT\"], [51, 34, 2, 16, 7, \"INT\"], [54, 38, 0, 8, 1, \"VAE\"], [55, 38, 0, 16, 4, \"VAE\"], [61, 30, 0, 38, 0, \"*\"], [62, 28, 0, 25, 1, \"CLIP\"], [64, 29, 0, 25, 0, \"MODEL\"]], \"groups\": [], \"config\": {}, \"extra\": {\"ds\": {\"scale\": 0.8390545288824038, \"offset\": [66.01527400302358, -164.71021253714747]}, \"VHS_latentpreview\": false, \"VHS_latentpreviewrate\": 0, \"ue_links\": []}, \"version\": 0.4, \"widget_idx_map\": {\"3\": {\"sampler_name\": 4, \"scheduler\": 5}, \"16\": {\"sampler_name\": 5, \"scheduler\": 6}, \"33\": {\"seed\": 0}}}}", "steps": 25, "models": [], "prompt": "photo of carisha, a woman with blonde hair, brown eyes, inside a gym, wearing a longsleeve ocean blue gym hoodie, and yoga pants. She is standing in front of gym equipment, parted lips, smile, hair in a ponytail", "denoise": 1, "sampler": "Euler", "cfgScale": 1, "modelIds": [], "scheduler": "normal", "upscalers": [ "4x_NMKD-Siax_200k.pth" ], "versionIds": [], "controlNets": [], "additionalResources": [] }, "availability": "Public", "hasMeta": true, "hasPositivePrompt": true, "onSite": false, "remixOfId": null }, { "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/bcb98364-3c13-4ef6-81bf-98bef29dabbb/width=832/56868832.jpeg", "nsfwLevel": 1, "width": 832, "height": 1216, "hash": "UGA]sf=|i_t8}?%2t7RjrrRii^xFROs8$$$$", "type": "image", "metadata": { "hash": "UGA]sf=|i_t8}?%2t7RjrrRii^xFROs8$$$$", "size": 1487971, "width": 832, "height": 1216 }, "minor": false, "poi": true, "meta": { "seed": 630223177167048, "vaes": [ "ae.safetensors" ], "comfy": "{\"prompt\": {\"3\": {\"inputs\": {\"seed\": [\"33\", 0], \"steps\": 25, \"cfg\": 1.0, \"sampler_name\": \"euler\", \"scheduler\": \"normal\", \"denoise\": 1.0, \"model\": [\"25\", 0], \"positive\": [\"6\", 0], \"negative\": [\"7\", 0], \"latent_image\": [\"34\", 0]}, \"class_type\": \"KSampler\", \"_meta\": {\"title\": \"KSampler\"}}, \"6\": {\"inputs\": {\"text\": \"closeup photo of carisha, a woman with blonde hair, brown eyes, wearing a green turtleneck, tight jeans and high heels. She holds a beer, leaning against the bar in a pub.\", \"clip\": [\"25\", 1]}, \"class_type\": \"CLIPTextEncode\", \"_meta\": {\"title\": \"CLIP Text Encode (Positive)\"}}, \"7\": {\"inputs\": {\"text\": \"\", \"clip\": [\"25\", 1]}, \"class_type\": \"CLIPTextEncode\", \"_meta\": {\"title\": \"CLIP Text Encode (Negative)\"}}, \"8\": {\"inputs\": {\"samples\": [\"3\", 0], \"vae\": [\"30\", 0]}, \"class_type\": \"VAEDecode\", \"_meta\": {\"title\": \"VAE Decode\"}}, \"9\": {\"inputs\": {\"filename_prefix\": \"2025-02-09/carisha_182156\", \"images\": [\"8\", 0]}, \"class_type\": \"SaveImage\", \"_meta\": {\"title\": \"Save Image\"}}, \"17\": {\"inputs\": {\"model_name\": \"4x_NMKD-Siax_200k.pth\", \"+\": null}, \"class_type\": \"UpscaleModelLoader\", \"_meta\": {\"title\": \"Load Upscale Model\"}}, \"25\": {\"inputs\": {\"PowerLoraLoaderHeaderWidget\": {\"type\": \"PowerLoraLoaderHeaderWidget\"}, \"lora_1\": {\"on\": true, \"lora\": \"!Myloras\\\\Carisha\\\\Carisha_rank8_bf16-step03000.safetensors\", \"strength\": 1}, \"\\u2795 Add Lora\": \"\", \"model\": [\"29\", 0], \"clip\": [\"28\", 0]}, \"class_type\": \"Power Lora Loader (rgthree)\", \"_meta\": {\"title\": \"Power Lora Loader (rgthree)\"}}, \"28\": {\"inputs\": {\"clip_name1\": \"t5xxl_fp8_e4m3fn.safetensors\", \"clip_name2\": \"clip_l.safetensors\", \"type\": \"flux\", \"+\": null}, \"class_type\": \"DualCLIPLoader\", \"_meta\": {\"title\": \"DualCLIPLoader\"}}, \"29\": {\"inputs\": {\"unet_name\": \"flux1-dev-fp8.safetensors\", \"weight_dtype\": \"fp8_e4m3fn_fast\", \"+\": null}, \"class_type\": \"UNETLoader\", \"_meta\": {\"title\": \"Load Diffusion Model\"}}, \"30\": {\"inputs\": {\"vae_name\": \"ae.safetensors\", \"+\": null}, \"class_type\": \"VAELoader\", \"_meta\": {\"title\": \"Load VAE\"}}, \"33\": {\"inputs\": {\"seed\": 630223177167048}, \"class_type\": \"Seed (rgthree)\", \"_meta\": {\"title\": \"Seed (rgthree)\"}, \"is_changed\": [630223177167048]}, \"34\": {\"inputs\": {\"resolution\": \"832x1216 (0.68)\", \"batch_size\": 1, \"width_override\": 0, \"height_override\": 0}, \"class_type\": \"SDXLEmptyLatentSizePicker+\", \"_meta\": {\"title\": \"\\ud83d\\udd27 Empty Latent Size Picker\"}}}, \"workflow\": {\"last_node_id\": 39, \"last_link_id\": 64, \"nodes\": [{\"id\": 8, \"type\": \"VAEDecode\", \"pos\": [1211.7613525390625, 207.52772521972656], \"size\": [210, 46], \"flags\": {\"collapsed\": true}, \"order\": 11, \"mode\": 0, \"inputs\": [{\"name\": \"samples\", \"type\": \"LATENT\", \"link\": 7}, {\"name\": \"vae\", \"type\": \"VAE\", \"link\": 54}], \"outputs\": [{\"name\": \"IMAGE\", \"type\": \"IMAGE\", \"links\": [9, 24], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"VAEDecode\"}, \"widgets_values\": []}, {\"id\": 18, \"type\": \"SaveImage\", \"pos\": [2197, 223.68397521972656], \"size\": [1249.5550537109375, 1464.7823486328125], \"flags\": {}, \"order\": 14, \"mode\": 4, \"inputs\": [{\"name\": \"images\", \"type\": \"IMAGE\", \"link\": 30}], \"outputs\": [], \"properties\": {}, \"widgets_values\": [\"%date:yyyy-MM-dd%/image_%KSampler.seed%\"]}, {\"id\": 16, \"type\": \"UltimateSDUpscale\", \"pos\": [1868.0947265625, 227], \"size\": [315, 826], \"flags\": {}, \"order\": 13, \"mode\": 4, \"inputs\": [{\"name\": \"image\", \"type\": \"IMAGE\", \"link\": 24}, {\"name\": \"model\", \"type\": \"MODEL\", \"link\": 38}, {\"name\": \"positive\", \"type\": \"CONDITIONING\", \"link\": 27}, {\"name\": \"negative\", \"type\": \"CONDITIONING\", \"link\": 31}, {\"name\": \"vae\", \"type\": \"VAE\", \"link\": 55}, {\"name\": \"upscale_model\", \"type\": \"UPSCALE_MODEL\", \"link\": 25}, {\"name\": \"tile_width\", \"type\": \"INT\", \"link\": 50, \"widget\": {\"name\": \"tile_width\"}}, {\"name\": \"tile_height\", \"type\": \"INT\", \"link\": 51, \"widget\": {\"name\": \"tile_height\"}}, {\"name\": \"seed\", \"type\": \"INT\", \"link\": 48, \"widget\": {\"name\": \"seed\"}}], \"outputs\": [{\"name\": \"IMAGE\", \"type\": \"IMAGE\", \"links\": [30], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"UltimateSDUpscale\"}, \"widgets_values\": [4, 957751772802631, \"randomize\", 2, 9, \"euler\", \"normal\", 0.15, \"Linear\", 768, 1024, 8, 32, \"None\", 1, 64, 8, 16, true, false]}, {\"id\": 17, \"type\": \"UpscaleModelLoader\", \"pos\": [1628.652587890625, 202.11581420898438], \"size\": [228.74729919433594, 82], \"flags\": {\"collapsed\": true}, \"order\": 0, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"UPSCALE_MODEL\", \"type\": \"UPSCALE_MODEL\", \"links\": [25]}], \"properties\": {\"Node name for S&R\": \"UpscaleModelLoader\"}, \"widgets_values\": [\"4x_NMKD-Siax_200k.pth\", null]}, {\"id\": 34, \"type\": \"SDXLEmptyLatentSizePicker+\", \"pos\": [275.15325927734375, 800.7346801757812], \"size\": [259.20001220703125, 170], \"flags\": {}, \"order\": 1, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [49], \"slot_index\": 0}, {\"name\": \"width\", \"type\": \"INT\", \"links\": [50], \"slot_index\": 1}, {\"name\": \"height\", \"type\": \"INT\", \"links\": [51], \"slot_index\": 2}], \"properties\": {\"Node name for S&R\": \"SDXLEmptyLatentSizePicker+\"}, \"widgets_values\": [\"832x1216 (0.68)\", 1, 0, 0]}, {\"id\": 33, \"type\": \"Seed (rgthree)\", \"pos\": [549.7734985351562, 806.7191772460938], \"size\": [244.56271362304688, 159.79544067382812], \"flags\": {}, \"order\": 2, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"SEED\", \"type\": \"INT\", \"links\": [47, 48], \"slot_index\": 0, \"shape\": 3, \"dir\": 4}], \"properties\": {}, \"widgets_values\": [630223177167048, null, null, null]}, {\"id\": 38, \"type\": \"Reroute\", \"pos\": [944.2876586914062, 218.80259704589844], \"size\": [75, 26], \"flags\": {}, \"order\": 6, \"mode\": 0, \"inputs\": [{\"name\": \"\", \"type\": \"*\", \"link\": 61}], \"outputs\": [{\"name\": \"\", \"type\": \"VAE\", \"links\": [54, 55], \"slot_index\": 0}], \"properties\": {\"showOutputText\": false, \"horizontal\": false}}, {\"id\": 3, \"type\": \"KSampler\", \"pos\": [844, 232], \"size\": [349.8854064941406, 747.8931274414062], \"flags\": {}, \"order\": 10, \"mode\": 0, \"inputs\": [{\"name\": \"model\", \"type\": \"MODEL\", \"link\": 37}, {\"name\": \"positive\", \"type\": \"CONDITIONING\", \"link\": 4}, {\"name\": \"negative\", \"type\": \"CONDITIONING\", \"link\": 21}, {\"name\": \"latent_image\", \"type\": \"LATENT\", \"link\": 49}, {\"name\": \"seed\", \"type\": \"INT\", \"link\": 47, \"widget\": {\"name\": \"seed\"}}], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [7], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"KSampler\"}, \"widgets_values\": [772048303659484, \"randomize\", 25, 1, \"euler\", \"normal\", 1]}, {\"id\": 30, \"type\": \"VAELoader\", \"pos\": [-522.5015869140625, 230.6754608154297], \"size\": [318.4554138183594, 84.38363647460938], \"flags\": {\"collapsed\": true}, \"order\": 3, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"VAE\", \"type\": \"VAE\", \"links\": [61], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"VAELoader\"}, \"widgets_values\": [\"ae.safetensors\", null]}, {\"id\": 29, \"type\": \"UNETLoader\", \"pos\": [-545.8380126953125, 272.9270935058594], \"size\": [343.6680603027344, 106], \"flags\": {\"collapsed\": true}, \"order\": 4, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"MODEL\", \"type\": \"MODEL\", \"links\": [64], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"UNETLoader\"}, \"widgets_values\": [\"flux1-dev-fp8.safetensors\", \"fp8_e4m3fn_fast\", null]}, {\"id\": 28, \"type\": \"DualCLIPLoader\", \"pos\": [-531.428955078125, 319.86724853515625], \"size\": [347.35186767578125, 130], \"flags\": {\"collapsed\": true}, \"order\": 5, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [62], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"DualCLIPLoader\"}, \"widgets_values\": [\"t5xxl_fp8_e4m3fn.safetensors\", \"clip_l.safetensors\", \"flux\", null]}, {\"id\": 25, \"type\": \"Power Lora Loader (rgthree)\", \"pos\": [-313.511962890625, 235.59800720214844], \"size\": [645.4066772460938, 233.36923217773438], \"flags\": {}, \"order\": 7, \"mode\": 0, \"inputs\": [{\"name\": \"model\", \"type\": \"MODEL\", \"link\": 64, \"dir\": 3}, {\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 62, \"dir\": 3}], \"outputs\": [{\"name\": \"MODEL\", \"type\": \"MODEL\", \"links\": [37, 38], \"slot_index\": 0, \"shape\": 3, \"dir\": 4}, {\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [35, 36], \"slot_index\": 1, \"shape\": 3, \"dir\": 4}], \"properties\": {\"Show Strengths\": \"Single Strength\"}, \"widgets_values\": [null, {\"type\": \"PowerLoraLoaderHeaderWidget\"}, {\"on\": true, \"lora\": \"!Myloras\\\\Carisha\\\\Carisha_rank8_bf16-step03000.safetensors\", \"strength\": 1, \"strengthTwo\": null}, null, \"\"]}, {\"id\": 9, \"type\": \"SaveImage\", \"pos\": [1215, 192.27305603027344], \"size\": [629.7261352539062, 859.7188720703125], \"flags\": {}, \"order\": 12, \"mode\": 0, \"inputs\": [{\"name\": \"images\", \"type\": \"IMAGE\", \"link\": 9}], \"outputs\": [], \"properties\": {}, \"widgets_values\": [\"%date:yyyy-MM-dd%/carisha_%date:hhmmss%\"]}, {\"id\": 7, \"type\": \"CLIPTextEncode\", \"pos\": [536.8186645507812, 755.2752075195312], \"size\": [442.9576416015625, 94.61177062988281], \"flags\": {\"collapsed\": true}, \"order\": 9, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 36}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [21, 31], \"slot_index\": 0}], \"title\": \"CLIP Text Encode (Negative)\", \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"\"], \"color\": \"#322\", \"bgcolor\": \"#533\"}, {\"id\": 6, \"type\": \"CLIPTextEncode\", \"pos\": [343.7825012207031, 215.52549743652344], \"size\": [482.2969665527344, 494.79656982421875], \"flags\": {}, \"order\": 8, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 35}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [4, 27], \"slot_index\": 0}], \"title\": \"CLIP Text Encode (Positive)\", \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"closeup photo of carisha, a woman with blonde hair, brown eyes, wearing a green turtleneck, tight jeans and high heels. She holds a beer, leaning against the bar in a pub.\"], \"color\": \"#232\", \"bgcolor\": \"#353\"}], \"links\": [[4, 6, 0, 3, 1, \"CONDITIONING\"], [7, 3, 0, 8, 0, \"LATENT\"], [9, 8, 0, 9, 0, \"IMAGE\"], [21, 7, 0, 3, 2, \"CONDITIONING\"], [24, 8, 0, 16, 0, \"IMAGE\"], [25, 17, 0, 16, 5, \"UPSCALE_MODEL\"], [27, 6, 0, 16, 2, \"CONDITIONING\"], [30, 16, 0, 18, 0, \"IMAGE\"], [31, 7, 0, 16, 3, \"CONDITIONING\"], [35, 25, 1, 6, 0, \"CLIP\"], [36, 25, 1, 7, 0, \"CLIP\"], [37, 25, 0, 3, 0, \"MODEL\"], [38, 25, 0, 16, 1, \"MODEL\"], [47, 33, 0, 3, 4, \"INT\"], [48, 33, 0, 16, 8, \"INT\"], [49, 34, 0, 3, 3, \"LATENT\"], [50, 34, 1, 16, 6, \"INT\"], [51, 34, 2, 16, 7, \"INT\"], [54, 38, 0, 8, 1, \"VAE\"], [55, 38, 0, 16, 4, \"VAE\"], [61, 30, 0, 38, 0, \"*\"], [62, 28, 0, 25, 1, \"CLIP\"], [64, 29, 0, 25, 0, \"MODEL\"]], \"groups\": [], \"config\": {}, \"extra\": {\"ds\": {\"scale\": 0.8390545288824038, \"offset\": [66.01527400302358, -164.71021253714747]}, \"VHS_latentpreview\": false, \"VHS_latentpreviewrate\": 0, \"ue_links\": []}, \"version\": 0.4, \"widget_idx_map\": {\"3\": {\"sampler_name\": 4, \"scheduler\": 5}, \"16\": {\"sampler_name\": 5, \"scheduler\": 6}, \"33\": {\"seed\": 0}}}}", "steps": 25, "models": [], "prompt": "closeup photo of carisha, a woman with blonde hair, brown eyes, wearing a green turtleneck, tight jeans and high heels. She holds a beer, leaning against the bar in a pub.", "denoise": 1, "sampler": "Euler", "cfgScale": 1, "modelIds": [], "scheduler": "normal", "upscalers": [ "4x_NMKD-Siax_200k.pth" ], "versionIds": [], "controlNets": [], "additionalResources": [] }, "availability": "Public", "hasMeta": true, "hasPositivePrompt": true, "onSite": false, "remixOfId": null }, { "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/57b3aa92-6273-49b7-9bd7-98ec174e0e03/width=832/56868836.jpeg", "nsfwLevel": 1, "width": 832, "height": 1216, "hash": "USFOV@$#nzjE};t7S1S3xZt7afV@xtX8jbV@", "type": "image", "metadata": { "hash": "USFOV@$#nzjE};t7S1S3xZt7afV@xtX8jbV@", "size": 1539116, "width": 832, "height": 1216 }, "minor": false, "poi": true, "meta": { "seed": 867596028827431, "vaes": [ "ae.safetensors" ], "comfy": "{\"prompt\": {\"3\": {\"inputs\": {\"seed\": [\"33\", 0], \"steps\": 25, \"cfg\": 1.0, \"sampler_name\": \"euler\", \"scheduler\": \"normal\", \"denoise\": 1.0, \"model\": [\"25\", 0], \"positive\": [\"6\", 0], \"negative\": [\"7\", 0], \"latent_image\": [\"34\", 0]}, \"class_type\": \"KSampler\", \"_meta\": {\"title\": \"KSampler\"}}, \"6\": {\"inputs\": {\"text\": \"closeup photo of carisha, a woman with blonde hair, brown eyes, with long hair wearing a greenish-blue hoodie. she is hiking in an autumn forest on a sunny day. she is looking at the viewer and smiling.\", \"clip\": [\"25\", 1]}, \"class_type\": \"CLIPTextEncode\", \"_meta\": {\"title\": \"CLIP Text Encode (Positive)\"}}, \"7\": {\"inputs\": {\"text\": \"\", \"clip\": [\"25\", 1]}, \"class_type\": \"CLIPTextEncode\", \"_meta\": {\"title\": \"CLIP Text Encode (Negative)\"}}, \"8\": {\"inputs\": {\"samples\": [\"3\", 0], \"vae\": [\"30\", 0]}, \"class_type\": \"VAEDecode\", \"_meta\": {\"title\": \"VAE Decode\"}}, \"9\": {\"inputs\": {\"filename_prefix\": \"2025-02-09/carisha_182302\", \"images\": [\"8\", 0]}, \"class_type\": \"SaveImage\", \"_meta\": {\"title\": \"Save Image\"}}, \"17\": {\"inputs\": {\"model_name\": \"4x_NMKD-Siax_200k.pth\", \"+\": null}, \"class_type\": \"UpscaleModelLoader\", \"_meta\": {\"title\": \"Load Upscale Model\"}}, \"25\": {\"inputs\": {\"PowerLoraLoaderHeaderWidget\": {\"type\": \"PowerLoraLoaderHeaderWidget\"}, \"lora_1\": {\"on\": true, \"lora\": \"!Myloras\\\\Carisha\\\\Carisha_rank8_bf16-step03000.safetensors\", \"strength\": 1}, \"\\u2795 Add Lora\": \"\", \"model\": [\"29\", 0], \"clip\": [\"28\", 0]}, \"class_type\": \"Power Lora Loader (rgthree)\", \"_meta\": {\"title\": \"Power Lora Loader (rgthree)\"}}, \"28\": {\"inputs\": {\"clip_name1\": \"t5xxl_fp8_e4m3fn.safetensors\", \"clip_name2\": \"clip_l.safetensors\", \"type\": \"flux\", \"+\": null}, \"class_type\": \"DualCLIPLoader\", \"_meta\": {\"title\": \"DualCLIPLoader\"}}, \"29\": {\"inputs\": {\"unet_name\": \"flux1-dev-fp8.safetensors\", \"weight_dtype\": \"fp8_e4m3fn_fast\", \"+\": null}, \"class_type\": \"UNETLoader\", \"_meta\": {\"title\": \"Load Diffusion Model\"}}, \"30\": {\"inputs\": {\"vae_name\": \"ae.safetensors\", \"+\": null}, \"class_type\": \"VAELoader\", \"_meta\": {\"title\": \"Load VAE\"}}, \"33\": {\"inputs\": {\"seed\": 867596028827431}, \"class_type\": \"Seed (rgthree)\", \"_meta\": {\"title\": \"Seed (rgthree)\"}, \"is_changed\": [867596028827431]}, \"34\": {\"inputs\": {\"resolution\": \"832x1216 (0.68)\", \"batch_size\": 1, \"width_override\": 0, \"height_override\": 0}, \"class_type\": \"SDXLEmptyLatentSizePicker+\", \"_meta\": {\"title\": \"\\ud83d\\udd27 Empty Latent Size Picker\"}}}, \"workflow\": {\"last_node_id\": 39, \"last_link_id\": 64, \"nodes\": [{\"id\": 8, \"type\": \"VAEDecode\", \"pos\": [1211.7613525390625, 207.52772521972656], \"size\": [210, 46], \"flags\": {\"collapsed\": true}, \"order\": 11, \"mode\": 0, \"inputs\": [{\"name\": \"samples\", \"type\": \"LATENT\", \"link\": 7}, {\"name\": \"vae\", \"type\": \"VAE\", \"link\": 54}], \"outputs\": [{\"name\": \"IMAGE\", \"type\": \"IMAGE\", \"links\": [9, 24], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"VAEDecode\"}, \"widgets_values\": []}, {\"id\": 18, \"type\": \"SaveImage\", \"pos\": [2197, 223.68397521972656], \"size\": [1249.5550537109375, 1464.7823486328125], \"flags\": {}, \"order\": 14, \"mode\": 4, \"inputs\": [{\"name\": \"images\", \"type\": \"IMAGE\", \"link\": 30}], \"outputs\": [], \"properties\": {}, \"widgets_values\": [\"%date:yyyy-MM-dd%/image_%KSampler.seed%\"]}, {\"id\": 16, \"type\": \"UltimateSDUpscale\", \"pos\": [1868.0947265625, 227], \"size\": [315, 826], \"flags\": {}, \"order\": 13, \"mode\": 4, \"inputs\": [{\"name\": \"image\", \"type\": \"IMAGE\", \"link\": 24}, {\"name\": \"model\", \"type\": \"MODEL\", \"link\": 38}, {\"name\": \"positive\", \"type\": \"CONDITIONING\", \"link\": 27}, {\"name\": \"negative\", \"type\": \"CONDITIONING\", \"link\": 31}, {\"name\": \"vae\", \"type\": \"VAE\", \"link\": 55}, {\"name\": \"upscale_model\", \"type\": \"UPSCALE_MODEL\", \"link\": 25}, {\"name\": \"tile_width\", \"type\": \"INT\", \"link\": 50, \"widget\": {\"name\": \"tile_width\"}}, {\"name\": \"tile_height\", \"type\": \"INT\", \"link\": 51, \"widget\": {\"name\": \"tile_height\"}}, {\"name\": \"seed\", \"type\": \"INT\", \"link\": 48, \"widget\": {\"name\": \"seed\"}}], \"outputs\": [{\"name\": \"IMAGE\", \"type\": \"IMAGE\", \"links\": [30], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"UltimateSDUpscale\"}, \"widgets_values\": [4, 1004509612896576, \"randomize\", 2, 9, \"euler\", \"normal\", 0.15, \"Linear\", 768, 1024, 8, 32, \"None\", 1, 64, 8, 16, true, false]}, {\"id\": 17, \"type\": \"UpscaleModelLoader\", \"pos\": [1628.652587890625, 202.11581420898438], \"size\": [228.74729919433594, 82], \"flags\": {\"collapsed\": true}, \"order\": 0, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"UPSCALE_MODEL\", \"type\": \"UPSCALE_MODEL\", \"links\": [25]}], \"properties\": {\"Node name for S&R\": \"UpscaleModelLoader\"}, \"widgets_values\": [\"4x_NMKD-Siax_200k.pth\", null]}, {\"id\": 34, \"type\": \"SDXLEmptyLatentSizePicker+\", \"pos\": [275.15325927734375, 800.7346801757812], \"size\": [259.20001220703125, 170], \"flags\": {}, \"order\": 1, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [49], \"slot_index\": 0}, {\"name\": \"width\", \"type\": \"INT\", \"links\": [50], \"slot_index\": 1}, {\"name\": \"height\", \"type\": \"INT\", \"links\": [51], \"slot_index\": 2}], \"properties\": {\"Node name for S&R\": \"SDXLEmptyLatentSizePicker+\"}, \"widgets_values\": [\"832x1216 (0.68)\", 1, 0, 0]}, {\"id\": 33, \"type\": \"Seed (rgthree)\", \"pos\": [549.7734985351562, 806.7191772460938], \"size\": [244.56271362304688, 159.79544067382812], \"flags\": {}, \"order\": 2, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"SEED\", \"type\": \"INT\", \"links\": [47, 48], \"slot_index\": 0, \"shape\": 3, \"dir\": 4}], \"properties\": {}, \"widgets_values\": [867596028827431, null, null, null]}, {\"id\": 38, \"type\": \"Reroute\", \"pos\": [944.2876586914062, 218.80259704589844], \"size\": [75, 26], \"flags\": {}, \"order\": 6, \"mode\": 0, \"inputs\": [{\"name\": \"\", \"type\": \"*\", \"link\": 61}], \"outputs\": [{\"name\": \"\", \"type\": \"VAE\", \"links\": [54, 55], \"slot_index\": 0}], \"properties\": {\"showOutputText\": false, \"horizontal\": false}}, {\"id\": 3, \"type\": \"KSampler\", \"pos\": [844, 232], \"size\": [349.8854064941406, 747.8931274414062], \"flags\": {}, \"order\": 10, \"mode\": 0, \"inputs\": [{\"name\": \"model\", \"type\": \"MODEL\", \"link\": 37}, {\"name\": \"positive\", \"type\": \"CONDITIONING\", \"link\": 4}, {\"name\": \"negative\", \"type\": \"CONDITIONING\", \"link\": 21}, {\"name\": \"latent_image\", \"type\": \"LATENT\", \"link\": 49}, {\"name\": \"seed\", \"type\": \"INT\", \"link\": 47, \"widget\": {\"name\": \"seed\"}}], \"outputs\": [{\"name\": \"LATENT\", \"type\": \"LATENT\", \"links\": [7], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"KSampler\"}, \"widgets_values\": [723416045864649, \"randomize\", 25, 1, \"euler\", \"normal\", 1]}, {\"id\": 30, \"type\": \"VAELoader\", \"pos\": [-522.5015869140625, 230.6754608154297], \"size\": [318.4554138183594, 84.38363647460938], \"flags\": {\"collapsed\": true}, \"order\": 3, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"VAE\", \"type\": \"VAE\", \"links\": [61], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"VAELoader\"}, \"widgets_values\": [\"ae.safetensors\", null]}, {\"id\": 29, \"type\": \"UNETLoader\", \"pos\": [-545.8380126953125, 272.9270935058594], \"size\": [343.6680603027344, 106], \"flags\": {\"collapsed\": true}, \"order\": 4, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"MODEL\", \"type\": \"MODEL\", \"links\": [64], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"UNETLoader\"}, \"widgets_values\": [\"flux1-dev-fp8.safetensors\", \"fp8_e4m3fn_fast\", null]}, {\"id\": 28, \"type\": \"DualCLIPLoader\", \"pos\": [-531.428955078125, 319.86724853515625], \"size\": [347.35186767578125, 130], \"flags\": {\"collapsed\": true}, \"order\": 5, \"mode\": 0, \"inputs\": [], \"outputs\": [{\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [62], \"slot_index\": 0}], \"properties\": {\"Node name for S&R\": \"DualCLIPLoader\"}, \"widgets_values\": [\"t5xxl_fp8_e4m3fn.safetensors\", \"clip_l.safetensors\", \"flux\", null]}, {\"id\": 25, \"type\": \"Power Lora Loader (rgthree)\", \"pos\": [-313.511962890625, 235.59800720214844], \"size\": [645.4066772460938, 233.36923217773438], \"flags\": {}, \"order\": 7, \"mode\": 0, \"inputs\": [{\"name\": \"model\", \"type\": \"MODEL\", \"link\": 64, \"dir\": 3}, {\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 62, \"dir\": 3}], \"outputs\": [{\"name\": \"MODEL\", \"type\": \"MODEL\", \"links\": [37, 38], \"slot_index\": 0, \"shape\": 3, \"dir\": 4}, {\"name\": \"CLIP\", \"type\": \"CLIP\", \"links\": [35, 36], \"slot_index\": 1, \"shape\": 3, \"dir\": 4}], \"properties\": {\"Show Strengths\": \"Single Strength\"}, \"widgets_values\": [null, {\"type\": \"PowerLoraLoaderHeaderWidget\"}, {\"on\": true, \"lora\": \"!Myloras\\\\Carisha\\\\Carisha_rank8_bf16-step03000.safetensors\", \"strength\": 1, \"strengthTwo\": null}, null, \"\"]}, {\"id\": 9, \"type\": \"SaveImage\", \"pos\": [1215, 192.27305603027344], \"size\": [629.7261352539062, 859.7188720703125], \"flags\": {}, \"order\": 12, \"mode\": 0, \"inputs\": [{\"name\": \"images\", \"type\": \"IMAGE\", \"link\": 9}], \"outputs\": [], \"properties\": {}, \"widgets_values\": [\"%date:yyyy-MM-dd%/carisha_%date:hhmmss%\"]}, {\"id\": 7, \"type\": \"CLIPTextEncode\", \"pos\": [536.8186645507812, 755.2752075195312], \"size\": [442.9576416015625, 94.61177062988281], \"flags\": {\"collapsed\": true}, \"order\": 9, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 36}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [21, 31], \"slot_index\": 0}], \"title\": \"CLIP Text Encode (Negative)\", \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"\"], \"color\": \"#322\", \"bgcolor\": \"#533\"}, {\"id\": 6, \"type\": \"CLIPTextEncode\", \"pos\": [343.7825012207031, 215.52549743652344], \"size\": [482.2969665527344, 494.79656982421875], \"flags\": {}, \"order\": 8, \"mode\": 0, \"inputs\": [{\"name\": \"clip\", \"type\": \"CLIP\", \"link\": 35}], \"outputs\": [{\"name\": \"CONDITIONING\", \"type\": \"CONDITIONING\", \"links\": [4, 27], \"slot_index\": 0}], \"title\": \"CLIP Text Encode (Positive)\", \"properties\": {\"Node name for S&R\": \"CLIPTextEncode\"}, \"widgets_values\": [\"closeup photo of carisha, a woman with blonde hair, brown eyes, with long hair wearing a greenish-blue hoodie. she is hiking in an autumn forest on a sunny day. she is looking at the viewer and smiling.\"], \"color\": \"#232\", \"bgcolor\": \"#353\"}], \"links\": [[4, 6, 0, 3, 1, \"CONDITIONING\"], [7, 3, 0, 8, 0, \"LATENT\"], [9, 8, 0, 9, 0, \"IMAGE\"], [21, 7, 0, 3, 2, \"CONDITIONING\"], [24, 8, 0, 16, 0, \"IMAGE\"], [25, 17, 0, 16, 5, \"UPSCALE_MODEL\"], [27, 6, 0, 16, 2, \"CONDITIONING\"], [30, 16, 0, 18, 0, \"IMAGE\"], [31, 7, 0, 16, 3, \"CONDITIONING\"], [35, 25, 1, 6, 0, \"CLIP\"], [36, 25, 1, 7, 0, \"CLIP\"], [37, 25, 0, 3, 0, \"MODEL\"], [38, 25, 0, 16, 1, \"MODEL\"], [47, 33, 0, 3, 4, \"INT\"], [48, 33, 0, 16, 8, \"INT\"], [49, 34, 0, 3, 3, \"LATENT\"], [50, 34, 1, 16, 6, \"INT\"], [51, 34, 2, 16, 7, \"INT\"], [54, 38, 0, 8, 1, \"VAE\"], [55, 38, 0, 16, 4, \"VAE\"], [61, 30, 0, 38, 0, \"*\"], [62, 28, 0, 25, 1, \"CLIP\"], [64, 29, 0, 25, 0, \"MODEL\"]], \"groups\": [], \"config\": {}, \"extra\": {\"ds\": {\"scale\": 0.8390545288824038, \"offset\": [66.01527400302358, -164.71021253714747]}, \"VHS_latentpreview\": false, \"VHS_latentpreviewrate\": 0, \"ue_links\": []}, \"version\": 0.4, \"widget_idx_map\": {\"3\": {\"sampler_name\": 4, \"scheduler\": 5}, \"16\": {\"sampler_name\": 5, \"scheduler\": 6}, \"33\": {\"seed\": 0}}}}", "steps": 25, "models": [], "prompt": "closeup photo of carisha, a woman with blonde hair, brown eyes, with long hair wearing a greenish-blue hoodie. she is hiking in an autumn forest on a sunny day. she is looking at the viewer and smiling.", "denoise": 1, "sampler": "Euler", "cfgScale": 1, "modelIds": [], "scheduler": "normal", "upscalers": [ "4x_NMKD-Siax_200k.pth" ], "versionIds": [], "controlNets": [], "additionalResources": [] }, "availability": "Public", "hasMeta": true, "hasPositivePrompt": true, "onSite": false, "remixOfId": null } ], "downloadUrl": "https://civitai.com/api/download/models/1394407" }