349 lines
8.7 KiB
JSON
349 lines
8.7 KiB
JSON
{
|
||
"6": {
|
||
"inputs": {
|
||
"text": "Create an 8-second animated loop featuring a young man sitting on a stone ledge overlooking a nighttime cityscape. The scene should begin with a slow zoom into the boy’s face as he gazes upwards at the starry sky. Throughout the video, have shooting stars streak across the sky – some fast, some slower, creating a dynamic visual effect. Gentle wind blows his hair and clothing.",
|
||
"clip": [
|
||
"38",
|
||
0
|
||
]
|
||
},
|
||
"class_type": "CLIPTextEncode",
|
||
"_meta": {
|
||
"title": "CLIP Text Encode (Positive Prompt)"
|
||
}
|
||
},
|
||
"7": {
|
||
"inputs": {
|
||
"text": "色调艳丽,过曝,静态,细节模糊不清,字幕,风格,作品,画作,画面,静止,整体发灰,最差质量,低质量,JPEG压缩残留,丑陋的,残缺的,多余的手指,画得不好的手部,画得不好的脸部,畸形的,毁容的,形态畸形的肢体,手指融合,静止不动的画面,杂乱的背景,三条腿,背景人很多,倒着走",
|
||
"clip": [
|
||
"38",
|
||
0
|
||
]
|
||
},
|
||
"class_type": "CLIPTextEncode",
|
||
"_meta": {
|
||
"title": "CLIP Text Encode (Negative Prompt)"
|
||
}
|
||
},
|
||
"8": {
|
||
"inputs": {
|
||
"samples": [
|
||
"58",
|
||
0
|
||
],
|
||
"vae": [
|
||
"39",
|
||
0
|
||
]
|
||
},
|
||
"class_type": "VAEDecode",
|
||
"_meta": {
|
||
"title": "VAE Decode"
|
||
}
|
||
},
|
||
"38": {
|
||
"inputs": {
|
||
"clip_name": "umt5_xxl_fp8_e4m3fn_scaled.safetensors",
|
||
"type": "wan",
|
||
"device": "cpu"
|
||
},
|
||
"class_type": "CLIPLoader",
|
||
"_meta": {
|
||
"title": "Load CLIP"
|
||
}
|
||
},
|
||
"39": {
|
||
"inputs": {
|
||
"vae_name": "wan_2.1_vae.safetensors"
|
||
},
|
||
"class_type": "VAELoader",
|
||
"_meta": {
|
||
"title": "Load VAE"
|
||
}
|
||
},
|
||
"50": {
|
||
"inputs": {
|
||
"width": [
|
||
"64",
|
||
1
|
||
],
|
||
"height": [
|
||
"64",
|
||
2
|
||
],
|
||
"length": 121,
|
||
"batch_size": 1,
|
||
"positive": [
|
||
"6",
|
||
0
|
||
],
|
||
"negative": [
|
||
"7",
|
||
0
|
||
],
|
||
"vae": [
|
||
"39",
|
||
0
|
||
],
|
||
"start_image": [
|
||
"64",
|
||
0
|
||
]
|
||
},
|
||
"class_type": "WanImageToVideo",
|
||
"_meta": {
|
||
"title": "WanImageToVideo"
|
||
}
|
||
},
|
||
"52": {
|
||
"inputs": {
|
||
"image": "ComfyUI_00036_.png"
|
||
},
|
||
"class_type": "LoadImage",
|
||
"_meta": {
|
||
"title": "Load Image"
|
||
}
|
||
},
|
||
"54": {
|
||
"inputs": {
|
||
"shift": 8.000000000000002,
|
||
"model": [
|
||
"69",
|
||
0
|
||
]
|
||
},
|
||
"class_type": "ModelSamplingSD3",
|
||
"_meta": {
|
||
"title": "ModelSamplingSD3"
|
||
}
|
||
},
|
||
"55": {
|
||
"inputs": {
|
||
"shift": 8.000000000000002,
|
||
"model": [
|
||
"70",
|
||
0
|
||
]
|
||
},
|
||
"class_type": "ModelSamplingSD3",
|
||
"_meta": {
|
||
"title": "ModelSamplingSD3"
|
||
}
|
||
},
|
||
"57": {
|
||
"inputs": {
|
||
"add_noise": "enable",
|
||
"noise_seed": 375574453154296,
|
||
"steps": 6,
|
||
"cfg": 1,
|
||
"sampler_name": "euler",
|
||
"scheduler": "simple",
|
||
"start_at_step": 0,
|
||
"end_at_step": 3,
|
||
"return_with_leftover_noise": "enable",
|
||
"model": [
|
||
"54",
|
||
0
|
||
],
|
||
"positive": [
|
||
"50",
|
||
0
|
||
],
|
||
"negative": [
|
||
"50",
|
||
1
|
||
],
|
||
"latent_image": [
|
||
"50",
|
||
2
|
||
]
|
||
},
|
||
"class_type": "KSamplerAdvanced",
|
||
"_meta": {
|
||
"title": "KSampler (Advanced)"
|
||
}
|
||
},
|
||
"58": {
|
||
"inputs": {
|
||
"add_noise": "disable",
|
||
"noise_seed": 0,
|
||
"steps": 6,
|
||
"cfg": 1,
|
||
"sampler_name": "euler",
|
||
"scheduler": "simple",
|
||
"start_at_step": 3,
|
||
"end_at_step": 10000,
|
||
"return_with_leftover_noise": "disable",
|
||
"model": [
|
||
"55",
|
||
0
|
||
],
|
||
"positive": [
|
||
"50",
|
||
0
|
||
],
|
||
"negative": [
|
||
"50",
|
||
1
|
||
],
|
||
"latent_image": [
|
||
"57",
|
||
0
|
||
]
|
||
},
|
||
"class_type": "KSamplerAdvanced",
|
||
"_meta": {
|
||
"title": "KSampler (Advanced)"
|
||
}
|
||
},
|
||
"61": {
|
||
"inputs": {
|
||
"unet_name": "wan2.2_i2v_high_noise_14B_Q4_K_S.gguf"
|
||
},
|
||
"class_type": "UnetLoaderGGUF",
|
||
"_meta": {
|
||
"title": "Unet Loader (GGUF)"
|
||
}
|
||
},
|
||
"62": {
|
||
"inputs": {
|
||
"unet_name": "wan2.2_i2v_low_noise_14B_Q4_K_S.gguf"
|
||
},
|
||
"class_type": "UnetLoaderGGUF",
|
||
"_meta": {
|
||
"title": "Unet Loader (GGUF)"
|
||
}
|
||
},
|
||
"63": {
|
||
"inputs": {
|
||
"frame_rate": 32,
|
||
"loop_count": 0,
|
||
"filename_prefix": "wan22_",
|
||
"format": "video/h264-mp4",
|
||
"pix_fmt": "yuv420p",
|
||
"crf": 19,
|
||
"save_metadata": true,
|
||
"trim_to_audio": false,
|
||
"pingpong": false,
|
||
"save_output": true,
|
||
"images": [
|
||
"8",
|
||
0
|
||
]
|
||
},
|
||
"class_type": "VHS_VideoCombine",
|
||
"_meta": {
|
||
"title": "Video Combine 🎥🅥🅗🅢"
|
||
}
|
||
},
|
||
"64": {
|
||
"inputs": {
|
||
"width": 720,
|
||
"height": 1280,
|
||
"upscale_method": "lanczos",
|
||
"keep_proportion": "crop",
|
||
"pad_color": "0, 0, 0",
|
||
"crop_position": "center",
|
||
"divisible_by": 16,
|
||
"device": "cpu",
|
||
"image": [
|
||
"52",
|
||
0
|
||
]
|
||
},
|
||
"class_type": "ImageResizeKJv2",
|
||
"_meta": {
|
||
"title": "Resize Image v2"
|
||
}
|
||
},
|
||
"65": {
|
||
"inputs": {
|
||
"sage_attention": "sageattn_qk_int8_pv_fp8_cuda++",
|
||
"model": [
|
||
"61",
|
||
0
|
||
]
|
||
},
|
||
"class_type": "PathchSageAttentionKJ",
|
||
"_meta": {
|
||
"title": "Patch Sage Attention KJ"
|
||
}
|
||
},
|
||
"66": {
|
||
"inputs": {
|
||
"enable_fp16_accumulation": true,
|
||
"model": [
|
||
"65",
|
||
0
|
||
]
|
||
},
|
||
"class_type": "ModelPatchTorchSettings",
|
||
"_meta": {
|
||
"title": "Model Patch Torch Settings"
|
||
}
|
||
},
|
||
"67": {
|
||
"inputs": {
|
||
"sage_attention": "sageattn_qk_int8_pv_fp8_cuda++",
|
||
"model": [
|
||
"62",
|
||
0
|
||
]
|
||
},
|
||
"class_type": "PathchSageAttentionKJ",
|
||
"_meta": {
|
||
"title": "Patch Sage Attention KJ"
|
||
}
|
||
},
|
||
"68": {
|
||
"inputs": {
|
||
"enable_fp16_accumulation": true,
|
||
"model": [
|
||
"67",
|
||
0
|
||
]
|
||
},
|
||
"class_type": "ModelPatchTorchSettings",
|
||
"_meta": {
|
||
"title": "Model Patch Torch Settings"
|
||
}
|
||
},
|
||
"69": {
|
||
"inputs": {
|
||
"lora_name": "Wan21_I2V_14B_lightx2v_cfg_step_distill_lora_rank64.safetensors",
|
||
"strength_model": 3.0000000000000004,
|
||
"model": [
|
||
"66",
|
||
0
|
||
]
|
||
},
|
||
"class_type": "LoraLoaderModelOnly",
|
||
"_meta": {
|
||
"title": "LoraLoaderModelOnly"
|
||
}
|
||
},
|
||
"70": {
|
||
"inputs": {
|
||
"lora_name": "Wan21_T2V_14B_lightx2v_cfg_step_distill_lora_rank64.safetensors",
|
||
"strength_model": 1.5000000000000002,
|
||
"model": [
|
||
"68",
|
||
0
|
||
]
|
||
},
|
||
"class_type": "LoraLoaderModelOnly",
|
||
"_meta": {
|
||
"title": "LoraLoaderModelOnly"
|
||
}
|
||
},
|
||
"75": {
|
||
"inputs": {
|
||
"model": "4xNomos2_otf_esrgan",
|
||
"precision": "fp16"
|
||
},
|
||
"class_type": "LoadUpscalerTensorrtModel",
|
||
"_meta": {
|
||
"title": "Load Upscale Tensorrt Model"
|
||
}
|
||
}
|
||
} |