gartajackhats1985's picture
Upload 40 files
deb4eee verified
raw
history blame
13.4 kB
{
"last_node_id": 22,
"last_link_id": 40,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [
690,
840
],
"size": {
"0": 425.27801513671875,
"1": 180.6060791015625
},
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 5
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
6
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"blurry, noisy, messy, lowres, jpeg, artifacts, ill, distorted, malformed"
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [
1570,
700
],
"size": {
"0": 140,
"1": 46
},
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 8
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
9
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
}
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [
690,
610
],
"size": {
"0": 422.84503173828125,
"1": 164.31304931640625
},
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 3
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
4
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"in a peaceful spring morning a woman wearing a white shirt is sitting in a park on a bench\n\nhigh quality, detailed, diffuse light"
]
},
{
"id": 3,
"type": "KSampler",
"pos": [
1210,
700
],
"size": {
"0": 315,
"1": 262
},
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 31
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
7
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
0,
"fixed",
30,
6.5,
"ddpm",
"karras",
1
]
},
{
"id": 19,
"type": "IPAdapterBatch",
"pos": [
1173,
251
],
"size": {
"0": 315,
"1": 254
},
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 37
},
{
"name": "ipadapter",
"type": "IPADAPTER",
"link": 29
},
{
"name": "image",
"type": "IMAGE",
"link": 30
},
{
"name": "image_negative",
"type": "IMAGE",
"link": null
},
{
"name": "attn_mask",
"type": "MASK",
"link": null
},
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": null
},
{
"name": "weight",
"type": "FLOAT",
"link": 38,
"widget": {
"name": "weight"
},
"slot_index": 6
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
31
],
"shape": 3,
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "IPAdapterBatch"
},
"widgets_values": [
1,
"linear",
0,
1,
"V only"
]
},
{
"id": 18,
"type": "IPAdapterUnifiedLoader",
"pos": [
303,
132
],
"size": {
"0": 315,
"1": 78
},
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 36
},
{
"name": "ipadapter",
"type": "IPADAPTER",
"link": null
}
],
"outputs": [
{
"name": "model",
"type": "MODEL",
"links": [
37
],
"shape": 3,
"slot_index": 0
},
{
"name": "ipadapter",
"type": "IPADAPTER",
"links": [
29
],
"shape": 3
}
],
"properties": {
"Node name for S&R": "IPAdapterUnifiedLoader"
},
"widgets_values": [
"PLUS (high strength)"
]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [
-79,
712
],
"size": {
"0": 315,
"1": 98
},
"flags": {},
"order": 0,
"mode": 0,
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
36
],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [
3,
5
],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [
8
],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": [
"sd15/realisticVisionV51_v51VAE.safetensors"
]
},
{
"id": 17,
"type": "PrepImageForClipVision",
"pos": [
788,
43
],
"size": {
"0": 315,
"1": 106
},
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 25
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
30
],
"shape": 3,
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "PrepImageForClipVision"
},
"widgets_values": [
"LANCZOS",
"top",
0.15
]
},
{
"id": 9,
"type": "SaveImage",
"pos": [
1770,
710
],
"size": {
"0": 556.2374267578125,
"1": 892.1895751953125
},
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"properties": {},
"widgets_values": [
"IPAdapter"
]
},
{
"id": 12,
"type": "LoadImage",
"pos": [
311,
270
],
"size": {
"0": 315,
"1": 314
},
"flags": {},
"order": 1,
"mode": 0,
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
25
],
"shape": 3,
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null,
"shape": 3
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": [
"warrior_woman.png",
"image"
]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [
801,
1097
],
"size": [
309.1109879864148,
82
],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "batch_size",
"type": "INT",
"link": 35,
"widget": {
"name": "batch_size"
}
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
2
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [
512,
512,
6
]
},
{
"id": 21,
"type": "PrimitiveNode",
"pos": [
340,
1093
],
"size": {
"0": 210,
"1": 82
},
"flags": {},
"order": 2,
"mode": 0,
"outputs": [
{
"name": "INT",
"type": "INT",
"links": [
35,
40
],
"widget": {
"name": "batch_size"
},
"slot_index": 0
}
],
"title": "frames",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [
6,
"fixed"
]
},
{
"id": 22,
"type": "IPAdapterWeights",
"pos": [
761,
208
],
"size": [
299.9049990375719,
324.00000762939453
],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": null
},
{
"name": "frames",
"type": "INT",
"link": 40,
"widget": {
"name": "frames"
}
}
],
"outputs": [
{
"name": "weights",
"type": "FLOAT",
"links": [
38
],
"shape": 3,
"slot_index": 0
},
{
"name": "weights_invert",
"type": "FLOAT",
"links": null,
"shape": 3
},
{
"name": "total_frames",
"type": "INT",
"links": null,
"shape": 3
},
{
"name": "image_1",
"type": "IMAGE",
"links": null,
"shape": 3
},
{
"name": "image_2",
"type": "IMAGE",
"links": null,
"shape": 3
}
],
"properties": {
"Node name for S&R": "IPAdapterWeights"
},
"widgets_values": [
"1.0, 0.0",
"linear",
6,
0,
9999,
0,
0,
"full batch"
]
}
],
"links": [
[
2,
5,
0,
3,
3,
"LATENT"
],
[
3,
4,
1,
6,
0,
"CLIP"
],
[
4,
6,
0,
3,
1,
"CONDITIONING"
],
[
5,
4,
1,
7,
0,
"CLIP"
],
[
6,
7,
0,
3,
2,
"CONDITIONING"
],
[
7,
3,
0,
8,
0,
"LATENT"
],
[
8,
4,
2,
8,
1,
"VAE"
],
[
9,
8,
0,
9,
0,
"IMAGE"
],
[
25,
12,
0,
17,
0,
"IMAGE"
],
[
29,
18,
1,
19,
1,
"IPADAPTER"
],
[
30,
17,
0,
19,
2,
"IMAGE"
],
[
31,
19,
0,
3,
0,
"MODEL"
],
[
35,
21,
0,
5,
0,
"INT"
],
[
36,
4,
0,
18,
0,
"MODEL"
],
[
37,
18,
0,
19,
0,
"MODEL"
],
[
38,
22,
0,
19,
6,
"FLOAT"
],
[
40,
21,
0,
22,
1,
"INT"
]
],
"groups": [],
"config": {},
"extra": {},
"version": 0.4
}