update the custom workflow

pull/369/head
Abdullah Alfaraj 2023-11-26 19:43:25 +03:00
parent f81b8a6679
commit 5f7e94c55f
14 changed files with 2128 additions and 3352 deletions

View File

@ -160,7 +160,7 @@
}, },
"76": { "76": {
"inputs": { "inputs": {
"prompt": "young teen:1.2, (cute black girl:1.2)", "prompt": "young teen:1.2, (cute girl:1.2)",
"model": [ "model": [
"78", "78",
0 0

File diff suppressed because it is too large Load Diff

View File

@ -1,110 +0,0 @@
{
"4": {
"inputs": {
"ckpt_name": "anythingV5Anything_anythingV5PrtRE.safetensors"
},
"class_type": "CheckpointLoaderSimple"
},
"6": {
"inputs": {
"text": "masterpiece, best quality, 1girl, solo, cherry blossoms, hanami, pink flower, white flower, spring season, wisteria, petals, flower, plum blossoms, outdoors, falling petals, white hair, black eyes",
"clip": [
"4",
1
]
},
"class_type": "CLIPTextEncode"
},
"7": {
"inputs": {
"text": "embedding:easynegative, embedding:badhandv4, ",
"clip": [
"4",
1
]
},
"class_type": "CLIPTextEncode"
},
"8": {
"inputs": {
"samples": [
"15",
0
],
"vae": [
"13",
0
]
},
"class_type": "VAEDecode"
},
"12": {
"inputs": {
"frame_rate": 8,
"loop_count": 0,
"save_image": false,
"filename_prefix": "AnimateDiff",
"format": "image/gif",
"pingpong": false,
"ad_video_preview__0": "/view?filename=AnimateDiff_00005_.gif&subfolder=&type=temp&format=image%2Fgif",
"images": [
"8",
0
]
},
"class_type": "AnimateDiffCombine"
},
"13": {
"inputs": {
"vae_name": "vae-ft-mse-840000-ema-pruned.safetensors"
},
"class_type": "VAELoader"
},
"15": {
"inputs": {
"inject_method": "default",
"frame_number": 16,
"seed": 345029849956677,
"steps": 20,
"cfg": 8,
"sampler_name": "euler",
"scheduler": "normal",
"denoise": 0.8,
"motion_module": [
"16",
0
],
"model": [
"4",
0
],
"positive": [
"6",
0
],
"negative": [
"7",
0
],
"latent_image": [
"20",
0
]
},
"class_type": "AnimateDiffSampler"
},
"16": {
"inputs": {
"model_name": "mm_sd_v15_v2.ckpt"
},
"class_type": "AnimateDiffModuleLoader"
},
"20": {
"inputs": {
"width": 512,
"height": 512,
"batch_size": 1
},
"class_type": "EmptyLatentImage"
}
}

View File

@ -0,0 +1,182 @@
{
"2": {
"inputs": {
"vae_name": "vae-ft-mse-840000-ema-pruned.safetensors"
},
"class_type": "VAELoader"
},
"4": {
"inputs": {
"stop_at_clip_layer": -1,
"clip": [
"32",
1
]
},
"class_type": "CLIPSetLastLayer"
},
"6": {
"inputs": {
"text": "embedding:BadDream, ",
"clip": [
"4",
0
]
},
"class_type": "CLIPTextEncode"
},
"7": {
"inputs": {
"seed": 888888891,
"steps": 8,
"cfg": 1.5,
"sampler_name": "lcm",
"scheduler": "sgm_uniform",
"denoise": 1,
"model": [
"36",
0
],
"positive": [
"38",
0
],
"negative": [
"6",
0
],
"latent_image": [
"9",
0
]
},
"class_type": "KSampler"
},
"9": {
"inputs": {
"width": 512,
"height": 512,
"batch_size": 110
},
"class_type": "EmptyLatentImage"
},
"10": {
"inputs": {
"samples": [
"7",
0
],
"vae": [
"2",
0
]
},
"class_type": "VAEDecode"
},
"32": {
"inputs": {
"ckpt_name": "dreamshaper_8.safetensors"
},
"class_type": "CheckpointLoaderSimple"
},
"33": {
"inputs": {
"context_length": 16,
"context_stride": 1,
"context_overlap": 4,
"context_schedule": "uniform",
"closed_loop": false
},
"class_type": "ADE_AnimateDiffUniformContextOptions"
},
"36": {
"inputs": {
"model_name": "mm_sd_v15_v2.ckpt",
"beta_schedule": "sqrt_linear (AnimateDiff)",
"motion_scale": 1,
"apply_v2_models_properly": false,
"model": [
"42",
0
],
"context_options": [
"33",
0
]
},
"class_type": "ADE_AnimateDiffLoaderWithContext"
},
"37": {
"inputs": {
"frame_rate": 8,
"loop_count": 0,
"filename_prefix": "aaa_readme",
"format": "image/gif",
"pingpong": false,
"save_image": true,
"crf": 20,
"save_metadata": true,
"videopreview": {
"hidden": false,
"paused": false,
"params": {
"filename": "aaa_readme_00024.gif",
"subfolder": "",
"type": "output",
"format": "image/gif"
}
},
"images": [
"10",
0
]
},
"class_type": "VHS_VideoCombine"
},
"38": {
"inputs": {
"text": "\"0\" : \"Spring, flowers, smile\",\n\"20\" : \"Spring, flowers, smile\",\n\"30\" : \"Summer, sun, happy, windy\",\n\"50\" : \"Summer, sun, happy, windy\",\n\"60\" : \"Autumn, yellow leaves, laugh\",\n\"80\" : \"Autumn, yellow leaves, laugh\",\n\"90\" : \"Winter, wind, snow, smile, seductive\",\n\"110\" : \"Winter, wind snow, smile, seductive\"",
"max_frames": 110,
"print_output": false,
"pre_text": "25 year old woman, t-shirt",
"app_text": "",
"start_frame": 0,
"pw_a": 0,
"pw_b": 0,
"pw_c": 0,
"pw_d": 0,
"clip": [
"4",
0
]
},
"class_type": "BatchPromptSchedule"
},
"41": {
"inputs": {
"lora_name": "lcm_lora_sd15.safetensors",
"strength_model": 1,
"strength_clip": 1,
"model": [
"32",
0
],
"clip": [
"32",
1
]
},
"class_type": "LoraLoader"
},
"42": {
"inputs": {
"sampling": "lcm",
"zsnr": false,
"model": [
"41",
0
]
},
"class_type": "ModelSamplingDiscrete"
}
}

View File

@ -0,0 +1,141 @@
{
"2": {
"inputs": {
"vae_name": "vae-ft-mse-840000-ema-pruned.safetensors"
},
"class_type": "VAELoader"
},
"3": {
"inputs": {
"text": "girl astronaut walking on the moon. ",
"clip": [
"4",
0
]
},
"class_type": "CLIPTextEncode"
},
"4": {
"inputs": {
"stop_at_clip_layer": -2,
"clip": [
"32",
1
]
},
"class_type": "CLIPSetLastLayer"
},
"6": {
"inputs": {
"text": "(worst quality, low quality: 1.4)",
"clip": [
"4",
0
]
},
"class_type": "CLIPTextEncode"
},
"7": {
"inputs": {
"seed": 888888889,
"steps": 20,
"cfg": 8,
"sampler_name": "euler",
"scheduler": "normal",
"denoise": 1,
"model": [
"27",
0
],
"positive": [
"3",
0
],
"negative": [
"6",
0
],
"latent_image": [
"9",
0
]
},
"class_type": "KSampler"
},
"9": {
"inputs": {
"width": 512,
"height": 512,
"batch_size": 16
},
"class_type": "EmptyLatentImage"
},
"10": {
"inputs": {
"samples": [
"7",
0
],
"vae": [
"2",
0
]
},
"class_type": "VAEDecode"
},
"27": {
"inputs": {
"model_name": "mm_sd_v14.ckpt",
"beta_schedule": "sqrt_linear (AnimateDiff)",
"motion_scale": 1,
"apply_v2_models_properly": false,
"model": [
"32",
0
]
},
"class_type": "ADE_AnimateDiffLoaderWithContext"
},
"32": {
"inputs": {
"ckpt_name": "cardosAnime_v20.safetensors"
},
"class_type": "CheckpointLoaderSimple"
},
"35": {
"inputs": {
"frame_rate": 8,
"loop_count": 0,
"filename_prefix": "aaa_readme",
"format": "image/gif",
"pingpong": false,
"save_image": true,
"crf": 20,
"save_metadata": false,
"videopreview": {
"hidden": false,
"paused": false,
"params": {
"filename": "aaa_readme_00022.gif",
"subfolder": "",
"type": "output",
"format": "image/gif"
}
},
"images": [
"10",
0
]
},
"class_type": "VHS_VideoCombine"
},
"37": {
"inputs": {
"images": [
"10",
0
]
},
"class_type": "PreviewImage"
}
}

View File

@ -1,92 +0,0 @@
{
"1": {
"inputs": {
"ckpt_name": "cardosAnime_v20.safetensors",
"beta_schedule": "sqrt_linear (AnimateDiff)"
},
"class_type": "CheckpointLoaderSimpleWithNoiseSelect"
},
"2": {
"inputs": {
"vae_name": "MoistMix.vae.pt"
},
"class_type": "VAELoader"
},
"3": {
"inputs": {
"text": "ship in storm, waves, dark, night, Artstation ",
"clip": ["4", 0]
},
"class_type": "CLIPTextEncode"
},
"4": {
"inputs": {
"stop_at_clip_layer": -2,
"clip": ["1", 1]
},
"class_type": "CLIPSetLastLayer"
},
"6": {
"inputs": {
"text": "(ugly:1.2), (worst quality, low quality: 1.4)",
"clip": ["4", 0]
},
"class_type": "CLIPTextEncode"
},
"7": {
"inputs": {
"seed": 711493021904285,
"steps": 20,
"cfg": 8,
"sampler_name": "euler",
"scheduler": "normal",
"denoise": 1,
"model": ["8", 0],
"positive": ["3", 0],
"negative": ["6", 0],
"latent_image": ["8", 1]
},
"class_type": "KSampler"
},
"8": {
"inputs": {
"model_name": "mm_sd_v14.ckpt",
"unlimited_area_hack": true,
"model": ["1", 0],
"latents": ["9", 0]
},
"class_type": "AnimateDiffLoaderV1"
},
"9": {
"inputs": {
"width": 512,
"height": 512,
"batch_size": 16
},
"class_type": "EmptyLatentImage"
},
"10": {
"inputs": {
"samples": ["7", 0],
"vae": ["2", 0]
},
"class_type": "VAEDecode"
},
"12": {
"inputs": {
"filename_prefix": "AA_readme",
"images": ["10", 0]
},
"class_type": "SaveImage"
},
"26": {
"inputs": {
"frame_rate": 8,
"loop_count": 0,
"save_image": "Enabled",
"filename_prefix": "AA_readme_gif",
"images": ["10", 0]
},
"class_type": "ADE_AnimateDiffCombine"
}
}

View File

@ -1,98 +0,0 @@
{
"3": {
"inputs": {
"seed": 89848141647836,
"steps": 12,
"cfg": 8,
"sampler_name": "dpmpp_sde",
"scheduler": "normal",
"denoise": 1,
"model": ["16", 0],
"positive": ["6", 0],
"negative": ["7", 0],
"latent_image": ["5", 0]
},
"class_type": "KSampler"
},
"5": {
"inputs": {
"width": 768,
"height": 768,
"batch_size": 1
},
"class_type": "EmptyLatentImage"
},
"6": {
"inputs": {
"text": "masterpiece HDR victorian portrait painting of woman, blonde hair, mountain nature, blue sky\n",
"clip": ["16", 1]
},
"class_type": "CLIPTextEncode"
},
"7": {
"inputs": {
"text": "bad hands, text, watermark\n",
"clip": ["16", 1]
},
"class_type": "CLIPTextEncode"
},
"8": {
"inputs": {
"samples": ["3", 0],
"vae": ["16", 2]
},
"class_type": "VAEDecode"
},
"9": {
"inputs": {
"filename_prefix": "ComfyUI",
"images": ["8", 0]
},
"class_type": "SaveImage"
},
"10": {
"inputs": {
"upscale_method": "nearest-exact",
"width": 1152,
"height": 1152,
"crop": "disabled",
"samples": ["3", 0]
},
"class_type": "LatentUpscale"
},
"11": {
"inputs": {
"seed": 463499690269752,
"steps": 14,
"cfg": 8,
"sampler_name": "dpmpp_2m",
"scheduler": "simple",
"denoise": 0.5,
"model": ["16", 0],
"positive": ["6", 0],
"negative": ["7", 0],
"latent_image": ["10", 0]
},
"class_type": "KSampler"
},
"12": {
"inputs": {
"filename_prefix": "ComfyUI",
"images": ["13", 0]
},
"class_type": "SaveImage"
},
"13": {
"inputs": {
"samples": ["11", 0],
"vae": ["16", 2]
},
"class_type": "VAEDecode"
},
"16": {
"inputs": {
"ckpt_name": "juggernaut_final.safetensors"
},
"class_type": "CheckpointLoaderSimple"
}
}

View File

@ -1,65 +0,0 @@
{
"3": {
"inputs": {
"seed": 280823642470253,
"steps": 20,
"cfg": 8,
"sampler_name": "dpmpp_2m",
"scheduler": "normal",
"denoise": 0.8700000000000001,
"model": ["14", 0],
"positive": ["6", 0],
"negative": ["7", 0],
"latent_image": ["12", 0]
},
"class_type": "KSampler"
},
"6": {
"inputs": {
"text": "photograph of victorian woman with wings, sky clouds, meadow grass\n",
"clip": ["14", 1]
},
"class_type": "CLIPTextEncode"
},
"7": {
"inputs": {
"text": "watermark, text\n",
"clip": ["14", 1]
},
"class_type": "CLIPTextEncode"
},
"8": {
"inputs": {
"samples": ["3", 0],
"vae": ["14", 2]
},
"class_type": "VAEDecode"
},
"9": {
"inputs": {
"filename_prefix": "ComfyUI",
"images": ["8", 0]
},
"class_type": "SaveImage"
},
"10": {
"inputs": {
"image": "example.png",
"choose file to upload": "image"
},
"class_type": "LoadImage"
},
"12": {
"inputs": {
"pixels": ["10", 0],
"vae": ["14", 2]
},
"class_type": "VAEEncode"
},
"14": {
"inputs": {
"ckpt_name": "v1-5-pruned-emaonly.ckpt"
},
"class_type": "CheckpointLoaderSimple"
}
}

View File

@ -1,199 +0,0 @@
{
"3": {
"inputs": {
"seed": 967414160783294,
"steps": 20,
"cfg": 8,
"sampler_name": "uni_pc_bh2",
"scheduler": "normal",
"denoise": 1,
"model": [
"29",
0
],
"positive": [
"6",
0
],
"negative": [
"7",
0
],
"latent_image": [
"26",
0
]
},
"class_type": "KSampler"
},
"6": {
"inputs": {
"text": "closeup photograph of maine coon (cat:1.2) in the yosemite national park mountains nature",
"clip": [
"29",
1
]
},
"class_type": "CLIPTextEncode"
},
"7": {
"inputs": {
"text": "watermark, text\n",
"clip": [
"29",
1
]
},
"class_type": "CLIPTextEncode"
},
"8": {
"inputs": {
"samples": [
"3",
0
],
"vae": [
"29",
2
]
},
"class_type": "VAEDecode"
},
"9": {
"inputs": {
"filename_prefix": "ComfyUI",
"images": [
"8",
0
]
},
"class_type": "SaveImage"
},
"20": {
"inputs": {
"image": "ComfyUI_temp_cqoqp_00001_ (1).png",
"choose file to upload": "image"
},
"class_type": "LoadImage"
},
"26": {
"inputs": {
"grow_mask_by": 6,
"pixels": [
"33",
0
],
"vae": [
"29",
2
],
"mask": [
"34",
0
]
},
"class_type": "VAEEncodeForInpaint"
},
"29": {
"inputs": {
"ckpt_name": "sd-v1-5-inpainting.ckpt"
},
"class_type": "CheckpointLoaderSimple"
},
"30": {
"inputs": {
"mask": [
"34",
0
]
},
"class_type": "MaskToImage"
},
"31": {
"inputs": {
"images": [
"30",
0
]
},
"class_type": "PreviewImage"
},
"32": {
"inputs": {
"images": [
"33",
0
]
},
"class_type": "PreviewImage"
},
"33": {
"inputs": {
"upscale_method": "nearest-exact",
"width": [
"37",
0
],
"height": [
"38",
0
],
"crop": "disabled",
"image": [
"20",
0
]
},
"class_type": "ImageScale"
},
"34": {
"inputs": {
"channel": "red",
"image": [
"36",
0
]
},
"class_type": "ImageToMask"
},
"35": {
"inputs": {
"mask": [
"20",
1
]
},
"class_type": "MaskToImage"
},
"36": {
"inputs": {
"upscale_method": "nearest-exact",
"width": [
"37",
0
],
"height": [
"38",
0
],
"crop": "disabled",
"image": [
"35",
0
]
},
"class_type": "ImageScale"
},
"37": {
"inputs": {
"Value": 512
},
"class_type": "Integer"
},
"38": {
"inputs": {
"Value": 512
},
"class_type": "Integer"
}
}

View File

@ -1,130 +0,0 @@
{
"3": {
"inputs": {
"seed": 288631494757459,
"steps": 20,
"cfg": 8,
"sampler_name": "uni_pc_bh2",
"scheduler": "normal",
"denoise": 1,
"model": [
"29",
0
],
"positive": [
"6",
0
],
"negative": [
"7",
0
],
"latent_image": [
"26",
0
]
},
"class_type": "KSampler"
},
"6": {
"inputs": {
"text": "closeup photograph of maine coon (cat:1.2) in the yosemite national park mountains nature",
"clip": [
"29",
1
]
},
"class_type": "CLIPTextEncode"
},
"7": {
"inputs": {
"text": "watermark, text\n",
"clip": [
"29",
1
]
},
"class_type": "CLIPTextEncode"
},
"8": {
"inputs": {
"samples": [
"3",
0
],
"vae": [
"29",
2
]
},
"class_type": "VAEDecode"
},
"9": {
"inputs": {
"filename_prefix": "ComfyUI",
"images": [
"8",
0
]
},
"class_type": "SaveImage"
},
"20": {
"inputs": {
"image": "ComfyUI_temp_cqoqp_00001_ (1).png",
"choose file to upload": "image"
},
"class_type": "LoadImage"
},
"26": {
"inputs": {
"grow_mask_by": 6,
"pixels": [
"20",
0
],
"vae": [
"29",
2
],
"mask": [
"20",
1
]
},
"class_type": "VAEEncodeForInpaint"
},
"29": {
"inputs": {
"ckpt_name": "sd-v1-5-inpainting.ckpt"
},
"class_type": "CheckpointLoaderSimple"
},
"30": {
"inputs": {
"mask": [
"20",
1
]
},
"class_type": "MaskToImage"
},
"31": {
"inputs": {
"images": [
"30",
0
]
},
"class_type": "PreviewImage"
},
"32": {
"inputs": {
"images": [
"20",
0
]
},
"class_type": "PreviewImage"
}
}

View File

@ -1,187 +0,0 @@
{
"8": {
"inputs": {
"vae_name": "klF8Anime2VAE_klF8Anime2VAE.ckpt"
},
"class_type": "VAELoader"
},
"16": {
"inputs": {
"ckpt_name": "juggernaut_final.safetensors"
},
"class_type": "CheckpointLoaderSimple"
},
"18": {
"inputs": {
"text": "(front view:1.2)",
"clip": ["16", 1]
},
"class_type": "CLIPTextEncode"
},
"21": {
"inputs": {
"width": 512,
"height": 512,
"batch_size": 2
},
"class_type": "EmptyLatentImage"
},
"30": {
"inputs": {
"text": "(a dog sitting:1.3) on a tile floor with a blue eyes and a white nose and tail, looking at the camera, artist, extremely detailed oil painting, a photorealistic painting, photorealism",
"clip": ["16", 1]
},
"class_type": "CLIPTextEncode"
},
"31": {
"inputs": {
"tile_size": 512,
"samples": ["97", 0],
"vae": ["8", 0]
},
"class_type": "VAEDecodeTiled"
},
"48": {
"inputs": {
"weight": ["163", 0],
"model_name": "ip-adapter-plus_sd15.bin",
"dtype": "fp32",
"model": ["162", 0],
"image": ["168", 0],
"clip_vision": ["57", 0]
},
"class_type": "IPAdapter"
},
"50": {
"inputs": {
"strength": ["163", 0],
"noise_augmentation": 0,
"conditioning": ["30", 0],
"clip_vision_output": ["48", 1]
},
"class_type": "unCLIPConditioning"
},
"57": {
"inputs": {
"clip_name": "model.safetensors"
},
"class_type": "CLIPVisionLoader"
},
"97": {
"inputs": {
"seed": 229741993160779,
"steps": 32,
"cfg": 6.5,
"sampler_name": "dpmpp_2s_ancestral",
"scheduler": "karras",
"denoise": 1,
"model": ["48", 0],
"positive": ["50", 0],
"negative": ["18", 0],
"latent_image": ["21", 0]
},
"class_type": "KSampler"
},
"122": {
"inputs": {
"images": ["31", 0]
},
"class_type": "PreviewImage"
},
"155": {
"inputs": {
"seed": 216203953003378,
"steps": 40,
"cfg": 5,
"sampler_name": "ddim",
"scheduler": "normal",
"denoise": 0.45,
"model": ["48", 0],
"positive": ["50", 0],
"negative": ["18", 0],
"latent_image": ["156", 0]
},
"class_type": "KSampler"
},
"156": {
"inputs": {
"tile_size": 640,
"pixels": ["159", 0],
"vae": ["8", 0]
},
"class_type": "VAEEncodeTiled"
},
"157": {
"inputs": {
"upscale_model": ["158", 0],
"image": ["31", 0]
},
"class_type": "ImageUpscaleWithModel"
},
"158": {
"inputs": {
"model_name": "RealESRGAN_x4plus_anime_6B.pth"
},
"class_type": "UpscaleModelLoader"
},
"159": {
"inputs": {
"upscale_method": "nearest-exact",
"scale_by": 0.45,
"image": ["157", 0]
},
"class_type": "ImageScaleBy"
},
"160": {
"inputs": {
"filename_prefix": "ComfyUI",
"images": ["161", 0]
},
"class_type": "SaveImage"
},
"161": {
"inputs": {
"tile_size": 512,
"samples": ["155", 0],
"vae": ["8", 0]
},
"class_type": "VAEDecodeTiled"
},
"162": {
"inputs": {
"b1": 1.1500000000000001,
"b2": 1.35,
"s1": 0.9500000000000001,
"s2": 0.18,
"model": ["16", 0]
},
"class_type": "FreeU"
},
"163": {
"inputs": {
"Value": 0.5
},
"class_type": "Float"
},
"168": {
"inputs": {
"image": "Layer 4 (3).png",
"choose file to upload": "image"
},
"class_type": "LoadImage"
},
"169": {
"inputs": {
"image": "Layer 3 (1).png",
"choose file to upload": "image"
},
"class_type": "LoadImage"
},
"188": {
"inputs": {
"image": "01285-3246154361-a photo of charddim15 person looking happy, beautiful, ((cloth)), ((full body)), ((chest)), ((far away)), ((waist up)).png",
"choose file to upload": "image"
},
"class_type": "LoadImage"
}
}

View File

@ -1,185 +0,0 @@
{
"4": {
"inputs": {
"ckpt_name": "dreamshaper_8.safetensors"
},
"class_type": "CheckpointLoaderSimple"
},
"6": {
"inputs": {
"text": "(best quality, masterpiece), 1girl, short hair, blue eyes, dancing, city, cloudy",
"clip": [
"4",
1
]
},
"class_type": "CLIPTextEncode"
},
"7": {
"inputs": {
"text": "embedding:easynegative, embedding:badhandv4, nsfw",
"clip": [
"4",
1
]
},
"class_type": "CLIPTextEncode"
},
"13": {
"inputs": {
"vae_name": "vae-ft-mse-840000-ema-pruned.safetensors"
},
"class_type": "VAELoader"
},
"16": {
"inputs": {
"model_name": "mm-Stabilized_mid.pth"
},
"class_type": "AnimateDiffModuleLoader"
},
"20": {
"inputs": {
"width": [
"104",
0
],
"height": [
"104",
1
],
"batch_size": 1
},
"class_type": "EmptyLatentImage"
},
"36": {
"inputs": {
"control_net_name": "control_v11p_sd15_openpose.pth"
},
"class_type": "ControlNetLoaderAdvanced"
},
"39": {
"inputs": {
"strength": 1,
"start_percent": 0,
"end_percent": 1,
"positive": [
"6",
0
],
"negative": [
"7",
0
],
"control_net": [
"36",
0
],
"image": [
"103",
0
]
},
"class_type": "ControlNetApplyAdvanced"
},
"44": {
"inputs": {
"samples": [
"107",
0
],
"vae": [
"13",
0
]
},
"class_type": "VAEDecode"
},
"45": {
"inputs": {
"frame_rate": 8,
"loop_count": 0,
"save_image": true,
"filename_prefix": "AnimateDiff",
"format": "image/gif",
"pingpong": true,
"ad_video_preview__0": "/view?filename=AnimateDiff_00056_.gif&subfolder=&type=output&format=image%2Fgif",
"images": [
"44",
0
]
},
"class_type": "AnimateDiffCombine"
},
"103": {
"inputs": {
"video": "video/268628360-bf926f52-da97-4fb4-b86a-8b26ef5fab04.gif",
"choose file to upload": "image",
"frame_start": 0,
"frame_limit": 16
},
"class_type": "LoadVideo"
},
"104": {
"inputs": {
"image": [
"103",
0
]
},
"class_type": "ImageSizeAndBatchSize"
},
"105": {
"inputs": {
"images": [
"103",
0
]
},
"class_type": "PreviewImage"
},
"106": {
"inputs": {
"images": [
"44",
0
]
},
"class_type": "PreviewImage"
},
"107": {
"inputs": {
"inject_method": "default",
"frame_number": [
"104",
2
],
"seed": 611067767367918,
"steps": 20,
"cfg": 8,
"sampler_name": "euler",
"scheduler": "normal",
"denoise": 1,
"motion_module": [
"16",
0
],
"model": [
"4",
0
],
"positive": [
"39",
0
],
"negative": [
"39",
1
],
"latent_image": [
"20",
0
]
},
"class_type": "AnimateDiffSampler"
}
}

View File

@ -1,110 +0,0 @@
{
"28": {
"inputs": {
"images": [
"30",
0
]
},
"class_type": "PreviewImage"
},
"29": {
"inputs": {
"frame_rate": 10,
"loop_count": 0,
"filename_prefix": "LCM",
"format": "image/gif",
"pingpong": true,
"save_image": false,
"Sync playback": null,
"vhs_gif_preview__0": "/view?filename=LCM_00003_.gif&subfolder=&type=temp&format=image%2Fgif",
"images": [
"35",
0
]
},
"class_type": "VHS_VideoCombine"
},
"30": {
"inputs": {
"video": "Head rotations_.mp4",
"force_rate": 0,
"force_size": "Disabled",
"frame_load_cap": 20,
"skip_first_frames": 17,
"select_every_nth": 10,
"choose file to upload": "video"
},
"class_type": "VHS_LoadVideo"
},
"35": {
"inputs": {
"seed": 567253074059555,
"prompt_strength": 0.4,
"steps": 4,
"cfg": 8,
"height": 512,
"width": 512,
"num_images": 1,
"use_fp16": true,
"positive_prompt": "cute girl, oil painting",
"images": [
"30",
0
]
},
"class_type": "LCM_img2img_Sampler"
},
"37": {
"inputs": {
"image": "000662ed61a84c86fc8a3fe69d38a6e2 (1).jpg",
"choose file to upload": "image"
},
"class_type": "LoadImage"
},
"38": {
"inputs": {
"images": [
"35",
0
]
},
"class_type": "PreviewImage"
},
"39": {
"inputs": {
"width": 512,
"height": 512,
"batch_size": 30
},
"class_type": "EmptyLatentImage"
},
"40": {
"inputs": {
"samples": [
"39",
0
],
"vae": [
"41",
0
]
},
"class_type": "VAEDecode"
},
"41": {
"inputs": {
"vae_name": "vae-ft-mse-840000-ema-pruned.safetensors"
},
"class_type": "VAELoader"
},
"42": {
"inputs": {
"images": [
"40",
0
]
},
"class_type": "PreviewImage"
}
}

View File

@ -0,0 +1,333 @@
{
"1": {
"inputs": {
"image": "000662ed61a84c86fc8a3fe69d38a6e2 (1).jpg",
"choose file to upload": "image"
},
"class_type": "LoadImage"
},
"2": {
"inputs": {
"left": 112,
"top": 112,
"right": 104,
"bottom": 208,
"feathering": 20,
"image": [
"50",
0
]
},
"class_type": "ImagePadForOutpaint"
},
"3": {
"inputs": {
"images": [
"2",
0
]
},
"class_type": "PreviewImage"
},
"5": {
"inputs": {
"width": [
"12",
2
],
"height": [
"12",
1
],
"batch_size": 1
},
"class_type": "EmptyLatentImage"
},
"6": {
"inputs": {
"pixels": [
"2",
0
],
"vae": [
"7",
0
]
},
"class_type": "VAEEncode"
},
"7": {
"inputs": {
"vae_name": "vae-ft-mse-840000-ema-pruned.safetensors"
},
"class_type": "VAELoader"
},
"12": {
"inputs": {
"value": [
"2",
0
]
},
"class_type": "ImpactImageInfo"
},
"14": {
"inputs": {
"x": 0,
"y": 0,
"resize_source": true,
"destination": [
"6",
0
],
"source": [
"5",
0
],
"mask": [
"2",
1
]
},
"class_type": "LatentCompositeMasked"
},
"15": {
"inputs": {
"samples": [
"14",
0
],
"vae": [
"7",
0
]
},
"class_type": "VAEDecode"
},
"19": {
"inputs": {
"images": [
"15",
0
]
},
"class_type": "PreviewImage"
},
"20": {
"inputs": {
"strength": 1,
"conditioning": [
"27",
4
],
"control_net": [
"21",
0
],
"image": [
"47",
0
]
},
"class_type": "ControlNetApply"
},
"21": {
"inputs": {
"control_net_name": "control_v11p_sd15_inpaint_fp16.safetensors"
},
"class_type": "ControlNetLoader"
},
"22": {
"inputs": {
"ckpt_name": "aniverse_v15Pruned.safetensors"
},
"class_type": "CheckpointLoaderSimple"
},
"23": {
"inputs": {
"model": [
"22",
0
],
"clip": [
"22",
1
],
"vae": [
"22",
2
],
"positive": [
"24",
0
],
"negative": [
"25",
0
]
},
"class_type": "ToBasicPipe"
},
"24": {
"inputs": {
"text": "",
"clip": [
"22",
1
]
},
"class_type": "CLIPTextEncode"
},
"25": {
"inputs": {
"text": "nsfw",
"clip": [
"22",
1
]
},
"class_type": "CLIPTextEncode"
},
"27": {
"inputs": {
"basic_pipe": [
"23",
0
]
},
"class_type": "FromBasicPipe_v2"
},
"29": {
"inputs": {
"basic_pipe": [
"27",
0
],
"positive": [
"20",
0
]
},
"class_type": "EditBasicPipe"
},
"30": {
"inputs": {
"seed": 949895177872699,
"steps": 20,
"cfg": 8,
"sampler_name": "euler",
"scheduler": "normal",
"denoise": 1,
"basic_pipe": [
"29",
0
],
"latent_image": [
"62",
0
]
},
"class_type": "ImpactKSamplerBasicPipe"
},
"31": {
"inputs": {
"samples": [
"30",
1
],
"vae": [
"30",
2
]
},
"class_type": "VAEDecode"
},
"32": {
"inputs": {
"images": [
"31",
0
]
},
"class_type": "PreviewImage"
},
"33": {
"inputs": {
"samples": [
"14",
0
],
"mask": [
"2",
1
]
},
"class_type": "SetLatentNoiseMask"
},
"47": {
"inputs": {
"image": [
"2",
0
],
"mask": [
"2",
1
]
},
"class_type": "InpaintPreprocessor"
},
"50": {
"inputs": {
"upscale_method": "nearest-exact",
"scale_by": 0.3,
"image": [
"1",
0
]
},
"class_type": "ImageScaleBy"
},
"52": {
"inputs": {
"clip_vision": [
"53",
0
],
"image": [
"1",
0
]
},
"class_type": "CLIPVisionEncode"
},
"53": {
"inputs": {
"clip_name": "SD1.5/pytorch_model.bin"
},
"class_type": "CLIPVisionLoader"
},
"56": {
"inputs": {
"pixels": [
"2",
0
],
"vae": [
"7",
0
]
},
"class_type": "VAEEncode"
},
"62": {
"inputs": {
"amount": 2,
"samples": [
"33",
0
]
},
"class_type": "RepeatLatentBatch"
}
}