Skip to content

Commit 21621ee

Browse files
psychedelicioushipsterusername
authored andcommitted
feat(ui): handle control adapter processed images
- Add helper functions to build metadata for control adapters, including the processed images - Update parses to parse the new metadata
1 parent c24f204 commit 21621ee

File tree

4 files changed

+187
-71
lines changed

4 files changed

+187
-71
lines changed

invokeai/frontend/web/src/features/metadata/util/parsers.ts

Lines changed: 30 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -225,7 +225,14 @@ const parseControlNet: MetadataParseFunc<ControlNetConfigMetadata> = async (meta
225225
const control_model = await getProperty(metadataItem, 'control_model');
226226
const key = await getModelKey(control_model, 'controlnet');
227227
const controlNetModel = await fetchModelConfigWithTypeGuard(key, isControlNetModelConfig);
228-
const image = zControlField.shape.image.nullish().catch(null).parse(await getProperty(metadataItem, 'image'));
228+
const image = zControlField.shape.image
229+
.nullish()
230+
.catch(null)
231+
.parse(await getProperty(metadataItem, 'image'));
232+
const processedImage = zControlField.shape.image
233+
.nullish()
234+
.catch(null)
235+
.parse(await getProperty(metadataItem, 'processed_image'));
229236
const control_weight = zControlField.shape.control_weight
230237
.nullish()
231238
.catch(null)
@@ -259,7 +266,7 @@ const parseControlNet: MetadataParseFunc<ControlNetConfigMetadata> = async (meta
259266
controlMode: control_mode ?? initialControlNet.controlMode,
260267
resizeMode: resize_mode ?? initialControlNet.resizeMode,
261268
controlImage: image?.image_name ?? null,
262-
processedControlImage: image?.image_name ?? null,
269+
processedControlImage: processedImage?.image_name ?? null,
263270
processorType,
264271
processorNode,
265272
shouldAutoConfig: true,
@@ -283,8 +290,18 @@ const parseT2IAdapter: MetadataParseFunc<T2IAdapterConfigMetadata> = async (meta
283290
const key = await getModelKey(t2i_adapter_model, 't2i_adapter');
284291
const t2iAdapterModel = await fetchModelConfigWithTypeGuard(key, isT2IAdapterModelConfig);
285292

286-
const image = zT2IAdapterField.shape.image.nullish().catch(null).parse(await getProperty(metadataItem, 'image'));
287-
const weight = zT2IAdapterField.shape.weight.nullish().catch(null).parse(await getProperty(metadataItem, 'weight'));
293+
const image = zT2IAdapterField.shape.image
294+
.nullish()
295+
.catch(null)
296+
.parse(await getProperty(metadataItem, 'image'));
297+
const processedImage = zT2IAdapterField.shape.image
298+
.nullish()
299+
.catch(null)
300+
.parse(await getProperty(metadataItem, 'processed_image'));
301+
const weight = zT2IAdapterField.shape.weight
302+
.nullish()
303+
.catch(null)
304+
.parse(await getProperty(metadataItem, 'weight'));
288305
const begin_step_percent = zT2IAdapterField.shape.begin_step_percent
289306
.nullish()
290307
.catch(null)
@@ -309,7 +326,7 @@ const parseT2IAdapter: MetadataParseFunc<T2IAdapterConfigMetadata> = async (meta
309326
endStepPct: end_step_percent ?? initialT2IAdapter.endStepPct,
310327
resizeMode: resize_mode ?? initialT2IAdapter.resizeMode,
311328
controlImage: image?.image_name ?? null,
312-
processedControlImage: image?.image_name ?? null,
329+
processedControlImage: processedImage?.image_name ?? null,
313330
processorType,
314331
processorNode,
315332
shouldAutoConfig: true,
@@ -333,8 +350,14 @@ const parseIPAdapter: MetadataParseFunc<IPAdapterConfigMetadata> = async (metada
333350
const key = await getModelKey(ip_adapter_model, 'ip_adapter');
334351
const ipAdapterModel = await fetchModelConfigWithTypeGuard(key, isIPAdapterModelConfig);
335352

336-
const image = zIPAdapterField.shape.image.nullish().catch(null).parse(await getProperty(metadataItem, 'image'));
337-
const weight = zIPAdapterField.shape.weight.nullish().catch(null).parse(await getProperty(metadataItem, 'weight'));
353+
const image = zIPAdapterField.shape.image
354+
.nullish()
355+
.catch(null)
356+
.parse(await getProperty(metadataItem, 'image'));
357+
const weight = zIPAdapterField.shape.weight
358+
.nullish()
359+
.catch(null)
360+
.parse(await getProperty(metadataItem, 'weight'));
338361
const begin_step_percent = zIPAdapterField.shape.begin_step_percent
339362
.nullish()
340363
.catch(null)

invokeai/frontend/web/src/features/nodes/util/graph/addControlNetToLinearGraph.ts

Lines changed: 66 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,15 @@
11
import type { RootState } from 'app/store/store';
22
import { selectValidControlNets } from 'features/controlAdapters/store/controlAdaptersSlice';
3+
import type { ControlAdapterProcessorType, ControlNetConfig } from 'features/controlAdapters/store/types';
4+
import type { ImageField } from 'features/nodes/types/common';
35
import type {
46
CollectInvocation,
57
ControlNetInvocation,
68
CoreMetadataInvocation,
79
NonNullableGraph,
10+
S,
811
} from 'services/api/types';
12+
import { assert } from 'tsafe';
913

1014
import { CONTROL_NET_COLLECT } from './constants';
1115
import { upsertMetadata } from './metadata';
@@ -70,34 +74,12 @@ export const addControlNetToLinearGraph = async (
7074
resize_mode: resizeMode,
7175
control_model: model,
7276
control_weight: weight,
77+
image: buildControlImage(controlImage, processedControlImage, processorType),
7378
};
7479

75-
if (processedControlImage && processorType !== 'none') {
76-
// We've already processed the image in the app, so we can just use the processed image
77-
controlNetNode.image = {
78-
image_name: processedControlImage,
79-
};
80-
} else if (controlImage) {
81-
// The control image is preprocessed
82-
controlNetNode.image = {
83-
image_name: controlImage,
84-
};
85-
} else {
86-
// Skip CAs without an unprocessed image - should never happen, we already filtered the list of valid CAs
87-
return;
88-
}
80+
graph.nodes[controlNetNode.id] = controlNetNode;
8981

90-
graph.nodes[controlNetNode.id] = controlNetNode as ControlNetInvocation;
91-
92-
controlNetMetadata.push({
93-
control_model: model,
94-
control_weight: weight,
95-
control_mode: controlMode,
96-
begin_step_percent: beginStepPct,
97-
end_step_percent: endStepPct,
98-
resize_mode: resizeMode,
99-
image: controlNetNode.image,
100-
});
82+
controlNetMetadata.push(buildControlNetMetadata(controlNet));
10183

10284
graph.edges.push({
10385
source: { node_id: controlNetNode.id, field: 'control' },
@@ -110,3 +92,62 @@ export const addControlNetToLinearGraph = async (
11092
upsertMetadata(graph, { controlnets: controlNetMetadata });
11193
}
11294
};
95+
96+
const buildControlImage = (
97+
controlImage: string | null,
98+
processedControlImage: string | null,
99+
processorType: ControlAdapterProcessorType
100+
): ImageField => {
101+
let image: ImageField | null = null;
102+
if (processedControlImage && processorType !== 'none') {
103+
// We've already processed the image in the app, so we can just use the processed image
104+
image = {
105+
image_name: processedControlImage,
106+
};
107+
} else if (controlImage) {
108+
// The control image is preprocessed
109+
image = {
110+
image_name: controlImage,
111+
};
112+
}
113+
assert(image, 'ControlNet image is required');
114+
return image;
115+
};
116+
117+
const buildControlNetMetadata = (controlNet: ControlNetConfig): S['ControlNetMetadataField'] => {
118+
const {
119+
controlImage,
120+
processedControlImage,
121+
beginStepPct,
122+
endStepPct,
123+
controlMode,
124+
resizeMode,
125+
model,
126+
processorType,
127+
weight,
128+
} = controlNet;
129+
130+
assert(model, 'ControlNet model is required');
131+
132+
const processed_image =
133+
processedControlImage && processorType !== 'none'
134+
? {
135+
image_name: processedControlImage,
136+
}
137+
: null;
138+
139+
assert(controlImage, 'ControlNet image is required');
140+
141+
return {
142+
control_model: model,
143+
control_weight: weight,
144+
control_mode: controlMode,
145+
begin_step_percent: beginStepPct,
146+
end_step_percent: endStepPct,
147+
resize_mode: resizeMode,
148+
image: {
149+
image_name: controlImage,
150+
},
151+
processed_image,
152+
};
153+
};

invokeai/frontend/web/src/features/nodes/util/graph/addIPAdapterToLinearGraph.ts

Lines changed: 36 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,15 @@
11
import type { RootState } from 'app/store/store';
22
import { selectValidIPAdapters } from 'features/controlAdapters/store/controlAdaptersSlice';
3+
import type { IPAdapterConfig } from 'features/controlAdapters/store/types';
4+
import type { ImageField } from 'features/nodes/types/common';
35
import type {
46
CollectInvocation,
57
CoreMetadataInvocation,
68
IPAdapterInvocation,
79
NonNullableGraph,
10+
S,
811
} from 'services/api/types';
12+
import { assert } from 'tsafe';
913

1014
import { IP_ADAPTER_COLLECT } from './constants';
1115
import { upsertMetadata } from './metadata';
@@ -44,7 +48,10 @@ export const addIPAdapterToLinearGraph = async (
4448
if (!ipAdapter.model) {
4549
return;
4650
}
47-
const { id, weight, model, beginStepPct, endStepPct } = ipAdapter;
51+
const { id, weight, model, beginStepPct, endStepPct, controlImage } = ipAdapter;
52+
53+
assert(controlImage, 'IP Adapter image is required');
54+
4855
const ipAdapterNode: IPAdapterInvocation = {
4956
id: `ip_adapter_${id}`,
5057
type: 'ip_adapter',
@@ -53,25 +60,14 @@ export const addIPAdapterToLinearGraph = async (
5360
ip_adapter_model: model,
5461
begin_step_percent: beginStepPct,
5562
end_step_percent: endStepPct,
63+
image: {
64+
image_name: controlImage,
65+
},
5666
};
5767

58-
if (ipAdapter.controlImage) {
59-
ipAdapterNode.image = {
60-
image_name: ipAdapter.controlImage,
61-
};
62-
} else {
63-
return;
64-
}
65-
6668
graph.nodes[ipAdapterNode.id] = ipAdapterNode;
6769

68-
ipAdapterMetdata.push({
69-
weight: weight,
70-
ip_adapter_model: model,
71-
begin_step_percent: beginStepPct,
72-
end_step_percent: endStepPct,
73-
image: ipAdapterNode.image,
74-
});
70+
ipAdapterMetdata.push(buildIPAdapterMetadata(ipAdapter));
7571

7672
graph.edges.push({
7773
source: { node_id: ipAdapterNode.id, field: 'ip_adapter' },
@@ -85,3 +81,27 @@ export const addIPAdapterToLinearGraph = async (
8581
upsertMetadata(graph, { ipAdapters: ipAdapterMetdata });
8682
}
8783
};
84+
85+
const buildIPAdapterMetadata = (ipAdapter: IPAdapterConfig): S['IPAdapterMetadataField'] => {
86+
const { controlImage, beginStepPct, endStepPct, model, weight } = ipAdapter;
87+
88+
assert(model, 'IP Adapter model is required');
89+
90+
let image: ImageField | null = null;
91+
92+
if (controlImage) {
93+
image = {
94+
image_name: controlImage,
95+
};
96+
}
97+
98+
assert(image, 'IP Adapter image is required');
99+
100+
return {
101+
ip_adapter_model: model,
102+
weight,
103+
begin_step_percent: beginStepPct,
104+
end_step_percent: endStepPct,
105+
image,
106+
};
107+
};

invokeai/frontend/web/src/features/nodes/util/graph/addT2IAdapterToLinearGraph.ts

Lines changed: 55 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,15 @@
11
import type { RootState } from 'app/store/store';
22
import { selectValidT2IAdapters } from 'features/controlAdapters/store/controlAdaptersSlice';
3+
import type { ControlAdapterProcessorType, T2IAdapterConfig } from 'features/controlAdapters/store/types';
4+
import type { ImageField } from 'features/nodes/types/common';
35
import type {
46
CollectInvocation,
57
CoreMetadataInvocation,
68
NonNullableGraph,
9+
S,
710
T2IAdapterInvocation,
811
} from 'services/api/types';
12+
import { assert } from 'tsafe';
913

1014
import { T2I_ADAPTER_COLLECT } from './constants';
1115
import { upsertMetadata } from './metadata';
@@ -68,33 +72,12 @@ export const addT2IAdaptersToLinearGraph = async (
6872
resize_mode: resizeMode,
6973
t2i_adapter_model: model,
7074
weight: weight,
75+
image: buildControlImage(controlImage, processedControlImage, processorType),
7176
};
7277

73-
if (processedControlImage && processorType !== 'none') {
74-
// We've already processed the image in the app, so we can just use the processed image
75-
t2iAdapterNode.image = {
76-
image_name: processedControlImage,
77-
};
78-
} else if (controlImage) {
79-
// The control image is preprocessed
80-
t2iAdapterNode.image = {
81-
image_name: controlImage,
82-
};
83-
} else {
84-
// Skip CAs without an unprocessed image - should never happen, we already filtered the list of valid CAs
85-
return;
86-
}
87-
8878
graph.nodes[t2iAdapterNode.id] = t2iAdapterNode;
8979

90-
t2iAdapterMetadata.push({
91-
begin_step_percent: beginStepPct,
92-
end_step_percent: endStepPct,
93-
resize_mode: resizeMode,
94-
t2i_adapter_model: t2iAdapter.model,
95-
weight: weight,
96-
image: t2iAdapterNode.image,
97-
});
80+
t2iAdapterMetadata.push(buildT2IAdapterMetadata(t2iAdapter));
9881

9982
graph.edges.push({
10083
source: { node_id: t2iAdapterNode.id, field: 't2i_adapter' },
@@ -108,3 +91,52 @@ export const addT2IAdaptersToLinearGraph = async (
10891
upsertMetadata(graph, { t2iAdapters: t2iAdapterMetadata });
10992
}
11093
};
94+
95+
const buildControlImage = (
96+
controlImage: string | null,
97+
processedControlImage: string | null,
98+
processorType: ControlAdapterProcessorType
99+
): ImageField => {
100+
let image: ImageField | null = null;
101+
if (processedControlImage && processorType !== 'none') {
102+
// We've already processed the image in the app, so we can just use the processed image
103+
image = {
104+
image_name: processedControlImage,
105+
};
106+
} else if (controlImage) {
107+
// The control image is preprocessed
108+
image = {
109+
image_name: controlImage,
110+
};
111+
}
112+
assert(image, 'T2I Adapter image is required');
113+
return image;
114+
};
115+
116+
const buildT2IAdapterMetadata = (t2iAdapter: T2IAdapterConfig): S['T2IAdapterMetadataField'] => {
117+
const { controlImage, processedControlImage, beginStepPct, endStepPct, resizeMode, model, processorType, weight } =
118+
t2iAdapter;
119+
120+
assert(model, 'T2I Adapter model is required');
121+
122+
const processed_image =
123+
processedControlImage && processorType !== 'none'
124+
? {
125+
image_name: processedControlImage,
126+
}
127+
: null;
128+
129+
assert(controlImage, 'T2I Adapter image is required');
130+
131+
return {
132+
t2i_adapter_model: model,
133+
weight,
134+
begin_step_percent: beginStepPct,
135+
end_step_percent: endStepPct,
136+
resize_mode: resizeMode,
137+
image: {
138+
image_name: controlImage,
139+
},
140+
processed_image,
141+
};
142+
};

0 commit comments

Comments
 (0)