Prompt
score_9, score_8_up, score_7_up, AL1c3, 1girl, solo, looking at viewer, smile, blonde hair, brown eyes, blue sweater, lips, eyelashes, portrait, realistic, outdoor
Negative Prompt
score_6, score_5, score_4, text, english, signature, watermark, artist name,
ข้อมูล Meta
seed: 595284715897296
vaes:
Model: ponyRealism_v21VAE
comfy: {“prompt”: {“1”: {“inputs”: {“ckpt_name”: “ponyRealism_v21VAE.safetensors”, “+”: null}, “class_type”: “CheckpointLoaderSimple”}, “3”: {“inputs”: {“stop_at_clip_layer”: -2, “clip”: [“1”, 1]}, “class_type”: “CLIPSetLastLayer”}, “31”: {“inputs”: {“dimensions”: “1216 x 832”, “invert”: true, “batch_size”: 1}, “class_type”: “EmptyLatentImagePresets”}, “34”: {“inputs”: {“image”: “2024-06-11 10-30-27.png”, “upload”: “image”}, “class_type”: “LoadImage”, “is_changed”: [“d92a0866c580852c6df67866faae2235556dd52219258d2bfbf2707b2b67b3c7”]}, “54”: {“inputs”: {“model”: [“1”, 0], “clip”: [“3”, 0], “lora_stack”: [“55”, 0]}, “class_type”: “CR Apply LoRA Stack”}, “55”: {“inputs”: {“switch_1”: “On”, “lora_name_1”: “ALice Delish_v2.safetensors”, “model_weight_1”: 0.9500000000000001, “clip_weight_1”: 1.0, “switch_2”: “Off”, “lora_name_2”: “MeganFox_v3.safetensors”, “model_weight_2”: 1.01, “clip_weight_2”: 1.0, “switch_3”: “Off”, “lora_name_3”: “None”, “model_weight_3”: 1.0, “clip_weight_3”: 1.0}, “class_type”: “CR LoRA Stack”}, “124”: {“inputs”: {“seed”: [“228”, 0], “steps”: 25, “cfg”: 4.0, “sampler_name”: “dpmpp_2m_sde”, “scheduler”: “karras”, “denoise”: 1.0, “preview_method”: “auto”, “vae_decode”: “true”, “model”: [“54”, 0], “positive”: [“207”, 0], “negative”: [“125”, 0], “latent_image”: [“31”, 0], “optional_vae”: [“1”, 2]}, “class_type”: “KSampler (Efficient)”}, “125”: {“inputs”: {“text”: “score_6, score_5, score_4, text, english, signature, watermark, artist name,”, “clip”: [“54”, 1]}, “class_type”: “CLIPTextEncode”}, “137”: {“inputs”: {“seed”: [“228”, 0], “steps”: 30, “cfg”: 4.0, “sampler_name”: “dpmpp_2m_sde”, “scheduler”: “karras”, “denoise”: 0.45, “preview_method”: “auto”, “vae_decode”: “true”, “model”: [“124”, 0], “positive”: [“124”, 1], “negative”: [“124”, 2], “latent_image”: [“139”, 0], “optional_vae”: [“1”, 2]}, “class_type”: “KSampler (Efficient)”}, “138”: {“inputs”: {“images”: [“124”, 5]}, “class_type”: “PreviewImage”}, “139”: {“inputs”: {“version”: “SDXL”, “upscale”: 1.3, “latent”: [“124”, 3]}, “class_type”: “NNLatentUpscale”}, “172”: {“inputs”: {“images”: [“137”, 5]}, “class_type”: “PreviewImage”}, “188”: {“inputs”: {“guide_size”: 512.0, “guide_size_for”: true, “max_size”: 1024.0, “seed”: [“228”, 0], “steps”: 20, “cfg”: 2.5, “sampler_name”: “dpmpp_2m_sde”, “scheduler”: “karras”, “denoise”: 0.18, “feather”: 5, “noise_mask”: true, “force_inpaint”: true, “bbox_threshold”: 0.5, “bbox_dilation”: 10, “bbox_crop_factor”: 3.0, “sam_detection_hint”: “center-1”, “sam_dilation”: 0, “sam_threshold”: 0.93, “sam_bbox_expansion”: 0, “sam_mask_hint_threshold”: 0.7, “sam_mask_hint_use_negative”: “False”, “drop_size”: 10, “wildcard”: “”, “cycle”: 1, “inpaint_model”: false, “noise_mask_feather”: 20, “image”: [“137”, 5], “model”: [“124”, 0], “clip”: [“54”, 1], “positive”: [“124”, 1], “negative”: [“124”, 2], “bbox_detector”: [“189”, 0], “vae”: [“1”, 2]}, “class_type”: “FaceDetailer”}, “189”: {“inputs”: {“model_name”: “bbox/face_yolov8m.pt”}, “class_type”: “UltralyticsDetectorProvider”}, “190”: {“inputs”: {“images”: [“188”, 0]}, “class_type”: “PreviewImage”}, “207”: {“inputs”: {“text”: “score_9, score_8_up, score_7_up, AL1c3, 1girl, solo, looking at viewer, smile, blonde hair, brown eyes, blue sweater, lips, eyelashes, portrait, realistic, outdoor”, “clip”: [“54”, 1]}, “class_type”: “CLIPTextEncode”}, “209”: {“inputs”: {“text”: [“233”, 0], “clip”: [“54”, 1]}, “class_type”: “Text to Conditioning”}, “222”: {“inputs”: {“VAE”: [“1”, 2]}, “class_type”: “Anything Everywhere”}, “227”: {“inputs”: {“INT”: [“228”, 0]}, “class_type”: “Anything Everywhere”}, “228”: {“inputs”: {“seed”: 595284715897296}, “class_type”: “CR Seed”}, “233”: {“inputs”: {“prompt”: “bare shoulders, cleavage,pink dress, dress”}, “class_type”: “CR Prompt Text”}}, “workflow”: {“last_node_id”: 251, “last_link_id”: 486, “nodes”: [{“id”: 139, “type”: “NNLatentUpscale”, “pos”: [-4577, -1438], “size”: {“0”: 315, “1”: 82}, “flags”: {“collapsed”: false}, “order”: 34, “mode”: 0, “inputs”: [{“name”: “latent”, “type”: “LATENT”, “link”: 286}], “outputs”: [{“name”: “LATENT”, “type”: “LATENT”, “links”: [287], “shape”: 3, “slot_index”: 0}], “properties”: {“Node name for S&R”: “NNLatentUpscale”}, “widgets_values”: [“SDXL”, 1.3]}, {“id”: 222, “type”: “Anything Everywhere”, “pos”: [-6438, -692], “size”: {“0”: 210, “1”: 26}, “flags”: {“collapsed”: true}, “order”: 26, “mode”: 0, “inputs”: [{“name”: “VAE”, “type”: “", “link”: 451, “color_on”: “#FF6E6E”}], “properties”: {“group_restricted”: 0, “color_restricted”: 0, “Node name for S&R”: “Anything Everywhere”}, “widgets_values”: []}, {“id”: 54, “type”: “CR Apply LoRA Stack”, “pos”: [-6092, -743], “size”: {“0”: 254.40000915527344, “1”: 66}, “flags”: {“collapsed”: true}, “order”: 28, “mode”: 0, “inputs”: [{“name”: “model”, “type”: “MODEL”, “link”: 128}, {“name”: “clip”, “type”: “CLIP”, “link”: 127}, {“name”: “lora_stack”, “type”: “LORA_STACK”, “link”: 126, “slot_index”: 2}], “outputs”: [{“name”: “MODEL”, “type”: “MODEL”, “links”: [420], “shape”: 3, “slot_index”: 0}, {“name”: “CLIP”, “type”: “CLIP”, “links”: [259, 377, 428, 430], “shape”: 3, “slot_index”: 1}, {“name”: “show_help”, “type”: “STRING”, “links”: null, “shape”: 3}], “properties”: {“Node name for S&R”: “CR Apply LoRA Stack”}}, {“id”: 237, “type”: “Note”, “pos”: [-7067, -545], “size”: {“0”: 210, “1”: 58}, “flags”: {}, “order”: 0, “mode”: 0, “properties”: {“text”: “”}, “widgets_values”: [“Select loras”], “color”: “#222”, “bgcolor”: “#000”}, {“id”: 238, “type”: “Note”, “pos”: [-7066, -725], “size”: {“0”: 210, “1”: 58}, “flags”: {}, “order”: 1, “mode”: 0, “properties”: {“text”: “”}, “widgets_values”: [“Select Checkpoint”], “color”: “#233”, “bgcolor”: “#355”}, {“id”: 240, “type”: “Note”, “pos”: [-7060, -915], “size”: {“0”: 210, “1”: 58}, “flags”: {}, “order”: 2, “mode”: 0, “properties”: {“text”: “”}, “widgets_values”: [“Image size Settings”], “color”: “#232”, “bgcolor”: “#353”}, {“id”: 236, “type”: “Note”, “pos”: [-6721, -212], “size”: {“0”: 210, “1”: 58}, “flags”: {}, “order”: 3, “mode”: 0, “properties”: {“text”: “”}, “widgets_values”: [“Remove the tags you don’t want “], “color”: “#322”, “bgcolor”: “#533”}, {“id”: 209, “type”: “Text to Conditioning”, “pos”: [-6000, -592], “size”: {“0”: 315, “1”: 58}, “flags”: {“collapsed”: true}, “order”: 30, “mode”: 0, “inputs”: [{“name”: “clip”, “type”: “CLIP”, “link”: 428}, {“name”: “text”, “type”: “STRING”, “link”: 425, “widget”: {“name”: “text”}}], “outputs”: [{“name”: “CONDITIONING”, “type”: “CONDITIONING”, “links”: [461], “shape”: 3, “slot_index”: 0}], “properties”: {“Node name for S&R”: “Text to Conditioning”}, “widgets_values”: [””]}, {“id”: 245, “type”: “Note”, “pos”: [-5643, -722], “size”: {“0”: 210, “1”: 58}, “flags”: {}, “order”: 4, “mode”: 0, “properties”: {“text”: “”}, “widgets_values”: [“Merges the prompts”], “color”: “#432”, “bgcolor”: “#653”}, {“id”: 244, “type”: “Note”, “pos”: [-5210, -262], “size”: {“0”: 210, “1”: 58}, “flags”: {}, “order”: 5, “mode”: 0, “properties”: {“text”: “”}, “widgets_values”: [“First pass”], “color”: “#432”, “bgcolor”: “#653”}, {“id”: 246, “type”: “Note”, “pos”: [-4540, -1553], “size”: {“0”: 210, “1”: 58}, “flags”: {}, “order”: 6, “mode”: 0, “properties”: {“text”: “”}, “widgets_values”: [“Upscale for second pass”], “color”: “#432”, “bgcolor”: “#653”}, {“id”: 247, “type”: “Note”, “pos”: [-3853, -1318], “size”: {“0”: 210, “1”: 58}, “flags”: {}, “order”: 7, “mode”: 0, “properties”: {“text”: “”}, “widgets_values”: [“Face similarity ratio between Source image and second pass result”], “color”: “#432”, “bgcolor”: “#653”}, {“id”: 249, “type”: “Note”, “pos”: [-3907, -283], “size”: {“0”: 210, “1”: 58}, “flags”: {}, “order”: 8, “mode”: 0, “properties”: {“text”: “”}, “widgets_values”: [“Second Pass”], “color”: “#432”, “bgcolor”: “#653”}, {“id”: 189, “type”: “UltralyticsDetectorProvider”, “pos”: [-2476, -187], “size”: {“0”: 315, “1”: 78}, “flags”: {“collapsed”: false}, “order”: 9, “mode”: 0, “outputs”: [{“name”: “BBOX_DETECTOR”, “type”: “BBOX_DETECTOR”, “links”: [378], “shape”: 3}, {“name”: “SEGM_DETECTOR”, “type”: “SEGM_DETECTOR”, “links”: null, “shape”: 3}], “properties”: {“Node name for S&R”: “UltralyticsDetectorProvider”}, “widgets_values”: [“bbox/face_yolov8m.pt”]}, {“id”: 248, “type”: “Note”, “pos”: [-2045, -1381], “size”: {“0”: 210, “1”: 58}, “flags”: {}, “order”: 10, “mode”: 0, “properties”: {“text”: “”}, “widgets_values”: [“Face similarity ratio between Source image and FaceDetailer pass”], “color”: “#432”, “bgcolor”: “#653”}, {“id”: 250, “type”: “Note”, “pos”: [-2362, -297], “size”: {“0”: 210, “1”: 58}, “flags”: {}, “order”: 11, “mode”: 0, “properties”: {“text”: “”}, “widgets_values”: [“Facedetailer Pass”], “color”: “#432”, “bgcolor”: “#653”}, {“id”: 251, “type”: “Note”, “pos”: [-246, -525], “size”: {“0”: 210, “1”: 58}, “flags”: {}, “order”: 12, “mode”: 0, “properties”: {“text”: “”}, “widgets_values”: [“Final Result”], “color”: “#432”, “bgcolor”: “#653”}, {“id”: 239, “type”: “Note”, “pos”: [-7068, -1193], “size”: {“0”: 210, “1”: 80.64366912841797}, “flags”: {}, “order”: 13, “mode”: 0, “properties”: {“text”: “”}, “widgets_values”: [“Seed Settings using Anything everywhere custom node, connect manually if you don’t to use Anything Everywhere”], “color”: “#332922”, “bgcolor”: “#593930”}, {“id”: 241, “type”: “Note”, “pos”: [-6482, -514], “size”: {“0”: 210, “1”: 58}, “flags”: {}, “order”: 14, “mode”: 0, “properties”: {“text”: “”}, “widgets_values”: [“Will go in front of the prompt”], “color”: “#432”, “bgcolor”: “#653”}, {“id”: 213, “type”: “ConditioningCombine”, “pos”: [-5650, -612], “size”: {“0”: 342.5999755859375, “1”: 46}, “flags”: {“collapsed”: false}, “order”: 32, “mode”: 4, “inputs”: [{“name”: “conditioning_1”, “type”: “CONDITIONING”, “link”: 460}, {“name”: “conditioning_2”, “type”: “CONDITIONING”, “link”: 461}], “outputs”: [{“name”: “CONDITIONING”, “type”: “CONDITIONING”, “links”: [462], “shape”: 3, “slot_index”: 0}], “properties”: {“Node name for S&R”: “ConditioningCombine”}}, {“id”: 204, “type”: “Face Similarity”, “pos”: [-2118, -1077], “size”: {“0”: 315, “1”: 78}, “flags”: {}, “order”: 41, “mode”: 4, “inputs”: [{“name”: “image1”, “type”: “IMAGE”, “link”: 403}, {“name”: “image2”, “type”: “IMAGE”, “link”: 401}], “outputs”: [{“name”: “similarity”, “type”: “FLOAT”, “links”: [402], “shape”: 3, “slot_index”: 0}], “properties”: {“Node name for S&R”: “Face Similarity”}, “widgets_values”: [“face_recognition”]}, {“id”: 205, “type”: “Show any [Crystools]”, “pos”: [-2095, -1274], “size”: {“0”: 315, “1”: 148.00006103515625}, “flags”: {}, “order”: 43, “mode”: 4, “inputs”: [{“name”: “any_value”, “type”: "”, “link”: 402}], “properties”: {“Node name for S&R”: “Show any [Crystools]”}, “widgets_values”: [false, true, “”]}, {“id”: 233, “type”: “CR Prompt Text”, “pos”: [-6517, -107], “size”: {“0”: 397.1485900878906, “1”: 108.0190658569336}, “flags”: {}, “order”: 15, “mode”: 0, “outputs”: [{“name”: “prompt”, “type”: “STRING”, “links”: [456], “shape”: 3, “slot_index”: 0}, {“name”: “show_help”, “type”: “STRING”, “links”: null, “shape”: 3}], “properties”: {“Node name for S&R”: “CR Prompt Text”}, “widgets_values”: [“bare shoulders, cleavage,pink dress, dress”]}, {“id”: 243, “type”: “Note”, “pos”: [-5659, -244], “size”: {“0”: 210, “1”: 58}, “flags”: {}, “order”: 16, “mode”: 0, “properties”: {“text”: “”}, “widgets_values”: [“Negative prompt”], “color”: “#432”, “bgcolor”: “#653”}, {“id”: 153, “type”: “WD14Tagger|pysssss”, “pos”: [-6037, -507], “size”: {“0”: 326.36883544921875, “1”: 308.4385070800781}, “flags”: {“collapsed”: false}, “order”: 27, “mode”: 4, “inputs”: [{“name”: “image”, “type”: “IMAGE”, “link”: 423}, {“name”: “exclude_tags”, “type”: “STRING”, “link”: 456, “widget”: {“name”: “exclude_tags”}, “slot_index”: 1}], “outputs”: [{“name”: “STRING”, “type”: “STRING”, “links”: [425], “shape”: 6, “slot_index”: 0}], “properties”: {“Node name for S&R”: “WD14Tagger|pysssss”}, “widgets_values”: [“wd-v1-4-moat-tagger-v2”, 0.35, 0.65, true, false, “artist name, v”], “color”: “#323”, “bgcolor”: “#535”}, {“id”: 3, “type”: “CLIPSetLastLayer”, “pos”: [-6425, -864], “size”: {“0”: 315, “1”: 58}, “flags”: {“collapsed”: false}, “order”: 25, “mode”: 0, “inputs”: [{“name”: “clip”, “type”: “CLIP”, “link”: 1}], “outputs”: [{“name”: “CLIP”, “type”: “CLIP”, “links”: [127], “shape”: 3, “slot_index”: 0}], “properties”: {“Node name for S&R”: “CLIPSetLastLayer”}, “widgets_values”: [-2]}, {“id”: 227, “type”: “Anything Everywhere”, “pos”: [-6308, -1191], “size”: {“0”: 210, “1”: 26}, “flags”: {“collapsed”: true}, “order”: 24, “mode”: 0, “inputs”: [{“name”: “INT”, “type”: “", “link”: 453, “color_on”: “”}], “properties”: {“group_restricted”: 0, “color_restricted”: 0, “Node name for S&R”: “Anything Everywhere”}, “widgets_values”: []}, {“id”: 125, “type”: “CLIPTextEncode”, “pos”: [-5656, -507], “size”: {“0”: 400, “1”: 200}, “flags”: {}, “order”: 29, “mode”: 0, “inputs”: [{“name”: “clip”, “type”: “CLIP”, “link”: 259}], “outputs”: [{“name”: “CONDITIONING”, “type”: “CONDITIONING”, “links”: [431], “shape”: 3, “slot_index”: 0}], “properties”: {“Node name for S&R”: “CLIPTextEncode”}, “widgets_values”: [“score_6, score_5, score_4, text, english, signature, watermark, artist name,”], “color”: “#322”, “bgcolor”: “#533”}, {“id”: 191, “type”: “Face Similarity”, “pos”: [-3905, -1019], “size”: {“0”: 315, “1”: 78}, “flags”: {}, “order”: 39, “mode”: 4, “inputs”: [{“name”: “image1”, “type”: “IMAGE”, “link”: 415}, {“name”: “image2”, “type”: “IMAGE”, “link”: 416}], “outputs”: [{“name”: “similarity”, “type”: “FLOAT”, “links”: [383], “shape”: 3, “slot_index”: 0}], “properties”: {“Node name for S&R”: “Face Similarity”}, “widgets_values”: [“face_recognition”]}, {“id”: 192, “type”: “Show any [Crystools]”, “pos”: [-3900, -1208], “size”: {“0”: 315, “1”: 148.00006103515625}, “flags”: {}, “order”: 42, “mode”: 4, “inputs”: [{“name”: “any_value”, “type”: "”, “link”: 383}], “properties”: {“Node name for S&R”: “Show any [Crystools]”}, “widgets_values”: [false, true, “”]}, {“id”: 228, “type”: “CR Seed”, “pos”: [-6827, -1205], “size”: {“0”: 315, “1”: 102}, “flags”: {}, “order”: 17, “mode”: 0, “outputs”: [{“name”: “seed”, “type”: “INT”, “links”: [453], “shape”: 3, “slot_index”: 0}, {“name”: “show_help”, “type”: “STRING”, “links”: null, “shape”: 3}], “properties”: {“Node name for S&R”: “CR Seed”}, “widgets_values”: [595284715897296, “randomize”], “color”: “#223”, “bgcolor”: “#335”}, {“id”: 1, “type”: “CheckpointLoaderSimple”, “pos”: [-6821, -755], “size”: {“0”: 315, “1”: 122}, “flags”: {}, “order”: 18, “mode”: 0, “outputs”: [{“name”: “MODEL”, “type”: “MODEL”, “links”: [128], “shape”: 3, “slot_index”: 0}, {“name”: “CLIP”, “type”: “CLIP”, “links”: [1], “shape”: 3, “slot_index”: 1}, {“name”: “VAE”, “type”: “VAE”, “links”: [451], “shape”: 3, “slot_index”: 2}], “properties”: {“Node name for S&R”: “CheckpointLoaderSimple”}, “widgets_values”: [“ponyRealism_v21VAE.safetensors”, null], “color”: “#223”, “bgcolor”: “#335”}, {“id”: 138, “type”: “PreviewImage”, “pos”: [-4839, -1314], “size”: {“0”: 886.2273559570312, “1”: 1284.03955078125}, “flags”: {}, “order”: 35, “mode”: 0, “inputs”: [{“name”: “images”, “type”: “IMAGE”, “link”: 371}], “properties”: {“Node name for S&R”: “PreviewImage”}}, {“id”: 190, “type”: “PreviewImage”, “pos”: [-1453, -1581], “size”: {“0”: 1167.648681640625, “1”: 1729.986328125}, “flags”: {}, “order”: 40, “mode”: 0, “inputs”: [{“name”: “images”, “type”: “IMAGE”, “link”: 379}], “properties”: {“Node name for S&R”: “PreviewImage”}}, {“id”: 31, “type”: “EmptyLatentImagePresets”, “pos”: [-6823, -951], “size”: {“0”: 315, “1”: 146}, “flags”: {}, “order”: 19, “mode”: 0, “outputs”: [{“name”: “Latent”, “type”: “LATENT”, “links”: [422], “shape”: 3, “slot_index”: 0}, {“name”: “Width”, “type”: “INT”, “links”: null, “shape”: 3}, {“name”: “Height”, “type”: “INT”, “links”: null, “shape”: 3, “slot_index”: 2}], “properties”: {“Node name for S&R”: “EmptyLatentImagePresets”}, “widgets_values”: [“1216 x 832”, true, 1], “color”: “#223”, “bgcolor”: “#335”}, {“id”: 172, “type”: “PreviewImage”, “pos”: [-3528, -1475], “size”: {“0”: 1123.129638671875, “1”: 1678.2646484375}, “flags”: {}, “order”: 37, “mode”: 0, “inputs”: [{“name”: “images”, “type”: “IMAGE”, “link”: 341}], “properties”: {“Node name for S&R”: “PreviewImage”}}, {“id”: 234, “type”: “ConditioningConcat”, “pos”: [-5643, -16], “size”: {“0”: 380.4000244140625, “1”: 46}, “flags”: {}, “order”: 20, “mode”: 4, “inputs”: [{“name”: “conditioning_to”, “type”: “CONDITIONING”, “link”: null}, {“name”: “conditioning_from”, “type”: “CONDITIONING”, “link”: null}], “outputs”: [{“name”: “CONDITIONING”, “type”: “CONDITIONING”, “links”: , “shape”: 3, “slot_index”: 0}], “properties”: {“Node name for S&R”: “ConditioningConcat”}}, {“id”: 242, “type”: “Note”, “pos”: [-6032, -153], “size”: {“0”: 319.1242370605469, “1”: 58}, “flags”: {}, “order”: 21, “mode”: 0, “properties”: {“text”: “”}, “widgets_values”: [“Pulls the tags of the source image automatically”], “color”: “#432”, “bgcolor”: “#653”}, {“id”: 188, “type”: “FaceDetailer”, “pos”: [-2138, -946], “size”: {“0”: 382.36981201171875, “1”: 852}, “flags”: {}, “order”: 38, “mode”: 0, “inputs”: [{“name”: “image”, “type”: “IMAGE”, “link”: 411}, {“name”: “model”, “type”: “MODEL”, “link”: 373}, {“name”: “clip”, “type”: “CLIP”, “link”: 377}, {“name”: “vae”, “type”: “VAE”, “link”: null}, {“name”: “positive”, “type”: “CONDITIONING”, “link”: 374}, {“name”: “negative”, “type”: “CONDITIONING”, “link”: 375}, {“name”: “bbox_detector”, “type”: “BBOX_DETECTOR”, “link”: 378, “slot_index”: 6}, {“name”: “sam_model_opt”, “type”: “SAM_MODEL”, “link”: null}, {“name”: “segm_detector_opt”, “type”: “SEGM_DETECTOR”, “link”: null}, {“name”: “detailer_hook”, “type”: “DETAILER_HOOK”, “link”: null}, {“name”: “seed”, “type”: “INT”, “link”: null, “widget”: {“name”: “seed”}}], “outputs”: [{“name”: “image”, “type”: “IMAGE”, “links”: [379, 403], “shape”: 3, “slot_index”: 0}, {“name”: “cropped_refined”, “type”: “IMAGE”, “links”: null, “shape”: 6}, {“name”: “cropped_enhanced_alpha”, “type”: “IMAGE”, “links”: null, “shape”: 6}, {“name”: “mask”, “type”: “MASK”, “links”: null, “shape”: 3}, {“name”: “detailer_pipe”, “type”: “DETAILER_PIPE”, “links”: null, “shape”: 3}, {“name”: “cnet_images”, “type”: “IMAGE”, “links”: null, “shape”: 6}], “properties”: {“Node name for S&R”: “FaceDetailer”}, “widgets_values”: [512, true, 1024, 1074180825169944, “randomize”, 20, 2.5, “dpmpp_2m_sde”, “karras”, 0.18, 5, true, true, 0.5, 10, 3, “center-1”, 0, 0.93, 0, 0.7, “False”, 10, “”, 1, false, 20]}, {“id”: 137, “type”: “KSampler (Efficient)”, “pos”: [-3915, -893], “size”: {“0”: 325, “1”: 562}, “flags”: {}, “order”: 36, “mode”: 0, “inputs”: [{“name”: “model”, “type”: “MODEL”, “link”: 284}, {“name”: “positive”, “type”: “CONDITIONING”, “link”: 283, “slot_index”: 1}, {“name”: “negative”, “type”: “CONDITIONING”, “link”: 282, “slot_index”: 2}, {“name”: “latent_image”, “type”: “LATENT”, “link”: 287}, {“name”: “optional_vae”, “type”: “VAE”, “link”: null}, {“name”: “script”, “type”: “SCRIPT”, “link”: null}, {“name”: “seed”, “type”: “INT”, “link”: null, “widget”: {“name”: “seed”}, “slot_index”: 6}], “outputs”: [{“name”: “MODEL”, “type”: “MODEL”, “links”: , “shape”: 3, “slot_index”: 0}, {“name”: “CONDITIONING+”, “type”: “CONDITIONING”, “links”: null, “shape”: 3}, {“name”: “CONDITIONING-”, “type”: “CONDITIONING”, “links”: null, “shape”: 3}, {“name”: “LATENT”, “type”: “LATENT”, “links”: , “shape”: 3, “slot_index”: 3}, {“name”: “VAE”, “type”: “VAE”, “links”: null, “shape”: 3}, {“name”: “IMAGE”, “type”: “IMAGE”, “links”: [341, 411, 416], “shape”: 3, “slot_index”: 5}], “properties”: {“Node name for S&R”: “KSampler (Efficient)”}, “widgets_values”: [36400388591581, null, 30, 4, “dpmpp_2m_sde”, “karras”, 0.45, “auto”, “true”], “color”: “#222233”, “bgcolor”: “#333355”, “shape”: 1}, {“id”: 55, “type”: “CR LoRA Stack”, “pos”: [-6832, -575], “size”: {“0”: 315, “1”: 342}, “flags”: {}, “order”: 22, “mode”: 0, “inputs”: [{“name”: “lora_stack”, “type”: “LORA_STACK”, “link”: null}], “outputs”: [{“name”: “LORA_STACK”, “type”: “LORA_STACK”, “links”: [126], “shape”: 3, “slot_index”: 0}, {“name”: “show_help”, “type”: “STRING”, “links”: null, “shape”: 3}], “properties”: {“Node name for S&R”: “CR LoRA Stack”}, “widgets_values”: [“On”, “ALice Delish_v2.safetensors”, 0.9500000000000001, 1, “Off”, “MeganFox_v3.safetensors”, 1.01, 1, “Off”, “None”, 1, 1], “color”: “#223”, “bgcolor”: “#335”}, {“id”: 34, “type”: “LoadImage”, “pos”: [-5821, -1654], “size”: {“0”: 521.4766845703125, “1”: 869.163818359375}, “flags”: {}, “order”: 23, “mode”: 0, “outputs”: [{“name”: “IMAGE”, “type”: “IMAGE”, “links”: [401, 415, 423], “shape”: 3, “slot_index”: 0}, {“name”: “MASK”, “type”: “MASK”, “links”: null, “shape”: 3}], “properties”: {“Node name for S&R”: “LoadImage”}, “widgets_values”: [“2024-06-11 10-30-27.png”, “image”]}, {“id”: 124, “type”: “KSampler (Efficient)”, “pos”: [-5210, -874], “size”: {“0”: 325, “1”: 562}, “flags”: {}, “order”: 33, “mode”: 0, “inputs”: [{“name”: “model”, “type”: “MODEL”, “link”: 420}, {“name”: “positive”, “type”: “CONDITIONING”, “link”: 462}, {“name”: “negative”, “type”: “CONDITIONING”, “link”: 431, “slot_index”: 2}, {“name”: “latent_image”, “type”: “LATENT”, “link”: 422}, {“name”: “optional_vae”, “type”: “VAE”, “link”: null}, {“name”: “script”, “type”: “SCRIPT”, “link”: null}, {“name”: “seed”, “type”: “INT”, “link”: null, “widget”: {“name”: “seed”}, “slot_index”: 6}], “outputs”: [{“name”: “MODEL”, “type”: “MODEL”, “links”: [284, 373], “shape”: 3, “slot_index”: 0}, {“name”: “CONDITIONING+”, “type”: “CONDITIONING”, “links”: [283, 374], “shape”: 3, “slot_index”: 1}, {“name”: “CONDITIONING-”, “type”: “CONDITIONING”, “links”: [282, 375], “shape”: 3, “slot_index”: 2}, {“name”: “LATENT”, “type”: “LATENT”, “links”: [286], “shape”: 3, “slot_index”: 3}, {“name”: “VAE”, “type”: “VAE”, “links”: , “shape”: 3, “slot_index”: 4}, {“name”: “IMAGE”, “type”: “IMAGE”, “links”: [371], “shape”: 3, “slot_index”: 5}], “properties”: {“Node name for S&R”: “KSampler (Efficient)”}, “widgets_values”: [36400388591581, null, 25, 4, “dpmpp_2m_sde”, “karras”, 1, “auto”, “true”], “color”: “#222233”, “bgcolor”: “#333355”, “shape”: 1}, {“id”: 207, “type”: “CLIPTextEncode”, “pos”: [-6493, -404], “size”: {“0”: 392.71826171875, “1”: 225.68417358398438}, “flags”: {}, “order”: 31, “mode”: 0, “inputs”: [{“name”: “clip”, “type”: “CLIP”, “link”: 430}], “outputs”: [{“name”: “CONDITIONING”, “type”: “CONDITIONING”, “links”: [460], “shape”: 3, “slot_index”: 0}], “properties”: {“Node name for S&R”: “CLIPTextEncode”}, “widgets_values”: [“score_9, score_8_up, score_7_up, AL1c3, 1girl, solo, looking at viewer, smile, blonde hair, brown eyes, blue sweater, lips, eyelashes, portrait, realistic, outdoor”], “color”: “#232”, “bgcolor”: “#353”}], “links”: [[1, 1, 1, 3, 0, “CLIP”], [126, 55, 0, 54, 2, “LORA_STACK”], [127, 3, 0, 54, 1, “CLIP”], [128, 1, 0, 54, 0, “MODEL”], [259, 54, 1, 125, 0, “CLIP”], [282, 124, 2, 137, 2, “CONDITIONING”], [283, 124, 1, 137, 1, “CONDITIONING”], [284, 124, 0, 137, 0, “MODEL”], [286, 124, 3, 139, 0, “LATENT”], [287, 139, 0, 137, 3, “LATENT”], [290, 128, 0, 137, 6, “INT”], [341, 137, 5, 172, 0, “IMAGE”], [371, 124, 5, 138, 0, “IMAGE”], [373, 124, 0, 188, 1, “MODEL”], [374, 124, 1, 188, 4, “CONDITIONING”], [375, 124, 2, 188, 5, “CONDITIONING”], [377, 54, 1, 188, 2, “CLIP”], [378, 189, 0, 188, 6, “BBOX_DETECTOR”], [379, 188, 0, 190, 0, “IMAGE”], [383, 191, 0, 192, 0, “"], [401, 34, 0, 204, 1, “IMAGE”], [402, 204, 0, 205, 0, "”], [403, 188, 0, 204, 0, “IMAGE”], [411, 137, 5, 188, 0, “IMAGE”], [415, 34, 0, 191, 0, “IMAGE”], [416, 137, 5, 191, 1, “IMAGE”], [420, 54, 0, 124, 0, “MODEL”], [422, 31, 0, 124, 3, “LATENT”], [423, 34, 0, 153, 0, “IMAGE”], [425, 153, 0, 209, 1, “STRING”], [428, 54, 1, 209, 0, “CLIP”], [430, 54, 1, 207, 0, “CLIP”], [431, 125, 0, 124, 2, “CONDITIONING”], [451, 1, 2, 222, 0, “VAE”], [453, 228, 0, 227, 0, “INT”], [456, 233, 0, 153, 1, “STRING”], [460, 207, 0, 213, 0, “CONDITIONING”], [461, 209, 0, 213, 1, “CONDITIONING”], [462, 213, 0, 124, 1, “CONDITIONING”], [463, 1, 2, 188, 3, “VAE”], [464, 228, 0, 188, 10, “INT”], [465, 1, 2, 124, 4, “VAE”], [466, 228, 0, 124, 6, “INT”], [467, 1, 2, 137, 4, “VAE”], [468, 228, 0, 137, 6, “INT”], [469, 1, 2, 188, 3, “VAE”], [470, 228, 0, 188, 10, “INT”], [471, 1, 2, 137, 4, “VAE”], [472, 228, 0, 137, 6, “INT”], [473, 1, 2, 124, 4, “VAE”], [474, 228, 0, 124, 6, “INT”], [475, 1, 2, 188, 3, “VAE”], [476, 228, 0, 188, 10, “INT”], [477, 1, 2, 137, 4, “VAE”], [478, 228, 0, 137, 6, “INT”], [479, 1, 2, 124, 4, “VAE”], [480, 228, 0, 124, 6, “INT”], [481, 1, 2, 188, 3, “VAE”], [482, 228, 0, 188, 10, “INT”], [483, 1, 2, 137, 4, “VAE”], [484, 228, 0, 137, 6, “INT”], [485, 1, 2, 124, 4, “VAE”], [486, 228, 0, 124, 6, “INT”]], “groups”: [{“title”: “All”, “bounding”: [-5242, -1498, 5138, 1447], “color”: “#3f789e”, “font_size”: 24, “locked”: false}], “config”: {}, “extra”: {“ds”: {“scale”: 0.4950000000000035, “offset”: [5799.055912484415, 1900.4333513254178]}, “groupNodes”: {}}, “version”: 0.4, “widget_idx_map”: {“124”: {“sampler_name”: 4, “scheduler”: 5}, “137”: {“sampler_name”: 4, “scheduler”: 5}, “188”: {“sampler_name”: 7, “scheduler”: 8}, “228”: {“seed”: 0}}}}
steps: 25
models: [‘ponyRealism_v21VAE.safetensors’]
denoise: 1
sampler: DPM++ 2M SDE Karras
cfgScale: 4
modelIds:
scheduler: karras
upscalers:
versionIds:
controlNets:
additionalResources:
Resources
Source