You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

332 lines
6.2 KiB

{
"3": {
"inputs": {
"seed": 224444567475717,
"steps": 7,
"cfg": 2,
"sampler_name": "lcm",
"scheduler": "sgm_uniform",
"denoise": 0.6,
"model": [
"14",
0
],
"positive": [
"24",
0
],
"negative": [
"7",
0
],
"latent_image": [
"13",
0
]
},
"class_type": "KSampler",
"_meta": {
"title": "KSampler"
}
},
"4": {
"inputs": {
"ckpt_name": "3D角色IP 迪士尼风格_v2.0.safetensors"
},
"class_type": "CheckpointLoaderSimple",
"_meta": {
"title": "Load Checkpoint"
}
},
"6": {
"inputs": {
"text": [
"11",
0
],
"clip": [
"4",
1
]
},
"class_type": "CLIPTextEncode",
"_meta": {
"title": "CLIP Text Encode (Prompt)"
}
},
"7": {
"inputs": {
"text": "",
"clip": [
"4",
1
]
},
"class_type": "CLIPTextEncode",
"_meta": {
"title": "CLIP Text Encode (Prompt)"
}
},
"8": {
"inputs": {
"samples": [
"3",
0
],
"vae": [
"4",
2
]
},
"class_type": "VAEDecode",
"_meta": {
"title": "VAE Decode"
}
},
"11": {
"inputs": {
"text_positive": [
"22",
0
],
"text_negative": "",
"style": "sai-3d-model",
"log_prompt": false,
"style_positive": true,
"style_negative": true
},
"class_type": "SDXLPromptStyler",
"_meta": {
"title": "SDXL Prompt Styler"
}
},
"12": {
"inputs": {
"image": "temp (6).png",
"upload": "image"
},
"class_type": "LoadImage",
"_meta": {
"title": "Load Image"
}
},
"13": {
"inputs": {
"pixels": [
"12",
0
],
"vae": [
"4",
2
]
},
"class_type": "VAEEncode",
"_meta": {
"title": "VAE Encode"
}
},
"14": {
"inputs": {
"lora_name": "LCM_LoRA_Weights_SD15.safetensors",
"strength_model": 1,
"model": [
"32",
0
]
},
"class_type": "LoraLoaderModelOnly",
"_meta": {
"title": "LoraLoaderModelOnly"
}
},
"16": {
"inputs": {
"ipadapter_file": "ip-adapter-plus_sd15.safetensors"
},
"class_type": "IPAdapterModelLoader",
"_meta": {
"title": "IPAdapter Model Loader"
}
},
"17": {
"inputs": {
"clip_name": "pytorch_model.bin"
},
"class_type": "CLIPVisionLoader",
"_meta": {
"title": "Load CLIP Vision"
}
},
"19": {
"inputs": {
"strength": 0.3,
"conditioning": [
"6",
0
],
"control_net": [
"23",
0
],
"image": [
"20",
0
]
},
"class_type": "ControlNetApply",
"_meta": {
"title": "Apply ControlNet"
}
},
"20": {
"inputs": {
"detect_hand": "enable",
"detect_body": "enable",
"detect_face": "enable",
"resolution": 512,
"bbox_detector": "yolox_l.onnx",
"pose_estimator": "dw-ll_ucoco_384.onnx",
"image": [
"12",
0
]
},
"class_type": "DWPreprocessor",
"_meta": {
"title": "DWPose Estimator"
}
},
"21": {
"inputs": {
"model": "wd-v1-4-convnextv2-tagger-v2",
"threshold": 0.35,
"character_threshold": 0.85,
"replace_underscore": "blurry,motion_blur",
"trailing_comma": false,
"exclude_tags": "",
"tags": "1girl, solo, long hair, looking at viewer, bangs, brown hair, shirt, brown eyes, closed mouth, white shirt, upper body, outdoors, day, collared shirt, blurry, lips, eyelashes, depth of field, blurry background, freckles, realistic, nose",
"image": [
"12",
0
]
},
"class_type": "WD14Tagger|pysssss",
"_meta": {
"title": "WD14 Tagger 🐍"
}
},
"22": {
"inputs": {
"action": "append",
"tidy_tags": "yes",
"text_a": "masterpiece, best quality,",
"text_b": [
"21",
0
],
"text_c": " 1 human,(smile:1.2)",
"result": "masterpiece, best quality, 1girl, solo, long hair, looking at viewer, bangs, brown hair, shirt, brown eyes, closed mouth, white shirt, upper body, outdoors, day, collared shirt, blurry, lips, eyelashes, depth of field, blurry background, freckles, realistic, nose, 1 human,(smile:1.2)"
},
"class_type": "StringFunction|pysssss",
"_meta": {
"title": "String Function 🐍"
}
},
"23": {
"inputs": {
"control_net_name": "control_v11p_sd15_openpose.pth"
},
"class_type": "ControlNetLoader",
"_meta": {
"title": "Load ControlNet Model"
}
},
"24": {
"inputs": {
"strength": 0.15,
"conditioning": [
"19",
0
],
"control_net": [
"26",
0
],
"image": [
"27",
0
]
},
"class_type": "ControlNetApply",
"_meta": {
"title": "Apply ControlNet"
}
},
"26": {
"inputs": {
"control_net_name": "control_v11p_sd15_softedge.pth"
},
"class_type": "ControlNetLoader",
"_meta": {
"title": "Load ControlNet Model"
}
},
"27": {
"inputs": {
"safe": "v1.1",
"resolution": "disable",
"image": [
"12",
0
]
},
"class_type": "HEDPreprocessor",
"_meta": {
"title": "HED Soft-Edge Lines"
}
},
"32": {
"inputs": {
"weight": 0.9,
"weight_type": "linear",
"combine_embeds": "concat",
"start_at": 0,
"end_at": 1,
"embeds_scaling": "V only",
"model": [
"4",
0
],
"ipadapter": [
"16",
0
],
"image": [
"12",
0
],
"clip_vision": [
"17",
0
]
},
"class_type": "IPAdapterAdvanced",
"_meta": {
"title": "IPAdapter Advanced"
}
},
"33": {
"inputs": {
"filename_prefix": "ComfyUI",
"images": [
"8",
0
]
},
"class_type": "SaveImage",
"_meta": {
"title": "Save Image"
}
}
}