diff --git a/invokeai/app/invocations/controlnet_image_processors.py b/invokeai/app/invocations/controlnet_image_processors.py
index 8d7d771434b..4d958cd09c5 100644
--- a/invokeai/app/invocations/controlnet_image_processors.py
+++ b/invokeai/app/invocations/controlnet_image_processors.py
@@ -176,6 +176,7 @@ def invoke(self, context: InvocationContext) -> ImageOutput:
class CannyImageProcessorInvocation(ImageProcessorInvocation):
"""Canny edge detection for ControlNet"""
+ image_resolution: int = InputField(default=512, ge=0, description=FieldDescriptions.image_res)
low_threshold: int = InputField(
default=100, ge=0, le=255, description="The low threshold of the Canny pixel gradient (0-255)"
)
@@ -189,7 +190,12 @@ def load_image(self, context: InvocationContext) -> Image.Image:
def run_processor(self, image):
canny_processor = CannyDetector()
- processed_image = canny_processor(image, self.low_threshold, self.high_threshold)
+ processed_image = canny_processor(
+ image,
+ self.low_threshold,
+ self.high_threshold,
+ image_resolution=self.image_resolution,
+ )
return processed_image
@@ -279,6 +285,7 @@ class MidasDepthImageProcessorInvocation(ImageProcessorInvocation):
a_mult: float = InputField(default=2.0, ge=0, description="Midas parameter `a_mult` (a = a_mult * PI)")
bg_th: float = InputField(default=0.1, ge=0, description="Midas parameter `bg_th`")
+ image_resolution: int = InputField(default=512, ge=0, description=FieldDescriptions.image_res)
# depth_and_normal not supported in controlnet_aux v0.0.3
# depth_and_normal: bool = InputField(default=False, description="whether to use depth and normal mode")
@@ -288,6 +295,7 @@ def run_processor(self, image):
image,
a=np.pi * self.a_mult,
bg_th=self.bg_th,
+ image_resolution=self.image_resolution,
# dept_and_normal not supported in controlnet_aux v0.0.3
# depth_and_normal=self.depth_and_normal,
)
@@ -419,10 +427,13 @@ class MediapipeFaceProcessorInvocation(ImageProcessorInvocation):
max_faces: int = InputField(default=1, ge=1, description="Maximum number of faces to detect")
min_confidence: float = InputField(default=0.5, ge=0, le=1, description="Minimum confidence for face detection")
+ image_resolution: int = InputField(default=512, ge=0, description=FieldDescriptions.image_res)
def run_processor(self, image):
mediapipe_face_processor = MediapipeFaceDetector()
- processed_image = mediapipe_face_processor(image, max_faces=self.max_faces, min_confidence=self.min_confidence)
+ processed_image = mediapipe_face_processor(
+ image, max_faces=self.max_faces, min_confidence=self.min_confidence, image_resolution=self.image_resolution
+ )
return processed_image
@@ -505,13 +516,15 @@ def run_processor(self, img):
class SegmentAnythingProcessorInvocation(ImageProcessorInvocation):
"""Applies segment anything processing to image"""
+ image_resolution: int = InputField(default=512, ge=0, description=FieldDescriptions.image_res)
+
def run_processor(self, image):
# segment_anything_processor = SamDetector.from_pretrained("ybelkada/segment-anything", subfolder="checkpoints")
segment_anything_processor = SamDetectorReproducibleColors.from_pretrained(
"ybelkada/segment-anything", subfolder="checkpoints"
)
np_img = np.array(image, dtype=np.uint8)
- processed_image = segment_anything_processor(np_img)
+ processed_image = segment_anything_processor(np_img, image_resolution=self.image_resolution)
return processed_image
diff --git a/invokeai/frontend/web/src/features/controlAdapters/components/processors/CannyProcessor.tsx b/invokeai/frontend/web/src/features/controlAdapters/components/processors/CannyProcessor.tsx
index 1cc458be8ef..427a23963e6 100644
--- a/invokeai/frontend/web/src/features/controlAdapters/components/processors/CannyProcessor.tsx
+++ b/invokeai/frontend/web/src/features/controlAdapters/components/processors/CannyProcessor.tsx
@@ -1,14 +1,12 @@
import { CompositeNumberInput, CompositeSlider, FormControl, FormLabel } from '@invoke-ai/ui-library';
import { useProcessorNodeChanged } from 'features/controlAdapters/components/hooks/useProcessorNodeChanged';
-import { CONTROLNET_PROCESSORS } from 'features/controlAdapters/store/constants';
+import { useGetDefaultForControlnetProcessor } from 'features/controlAdapters/hooks/useGetDefaultForControlnetProcessor';
import type { RequiredCannyImageProcessorInvocation } from 'features/controlAdapters/store/types';
import { memo, useCallback } from 'react';
import { useTranslation } from 'react-i18next';
import ProcessorWrapper from './common/ProcessorWrapper';
-const DEFAULTS = CONTROLNET_PROCESSORS.canny_image_processor.default as RequiredCannyImageProcessorInvocation;
-
type CannyProcessorProps = {
controlNetId: string;
processorNode: RequiredCannyImageProcessorInvocation;
@@ -17,9 +15,12 @@ type CannyProcessorProps = {
const CannyProcessor = (props: CannyProcessorProps) => {
const { controlNetId, processorNode, isEnabled } = props;
- const { low_threshold, high_threshold } = processorNode;
+ const { low_threshold, high_threshold, image_resolution } = processorNode;
const processorChanged = useProcessorNodeChanged();
const { t } = useTranslation();
+ const defaults = useGetDefaultForControlnetProcessor(
+ 'canny_image_processor'
+ ) as RequiredCannyImageProcessorInvocation;
const handleLowThresholdChanged = useCallback(
(v: number) => {
@@ -35,6 +36,13 @@ const CannyProcessor = (props: CannyProcessorProps) => {
[controlNetId, processorChanged]
);
+ const handleImageResolutionChanged = useCallback(
+ (v: number) => {
+ processorChanged(controlNetId, { image_resolution: v });
+ },
+ [controlNetId, processorChanged]
+ );
+
return (
@@ -42,14 +50,14 @@ const CannyProcessor = (props: CannyProcessorProps) => {
@@ -59,18 +67,36 @@ const CannyProcessor = (props: CannyProcessorProps) => {
+
+ {t('controlnet.imageResolution')}
+
+
+
);
};
diff --git a/invokeai/frontend/web/src/features/controlAdapters/components/processors/ColorMapProcessor.tsx b/invokeai/frontend/web/src/features/controlAdapters/components/processors/ColorMapProcessor.tsx
index b0ddde6f936..3dd6bf0aa98 100644
--- a/invokeai/frontend/web/src/features/controlAdapters/components/processors/ColorMapProcessor.tsx
+++ b/invokeai/frontend/web/src/features/controlAdapters/components/processors/ColorMapProcessor.tsx
@@ -1,14 +1,12 @@
import { CompositeNumberInput, CompositeSlider, FormControl, FormLabel } from '@invoke-ai/ui-library';
import { useProcessorNodeChanged } from 'features/controlAdapters/components/hooks/useProcessorNodeChanged';
-import { CONTROLNET_PROCESSORS } from 'features/controlAdapters/store/constants';
+import { useGetDefaultForControlnetProcessor } from 'features/controlAdapters/hooks/useGetDefaultForControlnetProcessor';
import type { RequiredColorMapImageProcessorInvocation } from 'features/controlAdapters/store/types';
import { memo, useCallback } from 'react';
import { useTranslation } from 'react-i18next';
import ProcessorWrapper from './common/ProcessorWrapper';
-const DEFAULTS = CONTROLNET_PROCESSORS.color_map_image_processor.default as RequiredColorMapImageProcessorInvocation;
-
type ColorMapProcessorProps = {
controlNetId: string;
processorNode: RequiredColorMapImageProcessorInvocation;
@@ -20,6 +18,9 @@ const ColorMapProcessor = (props: ColorMapProcessorProps) => {
const { color_map_tile_size } = processorNode;
const processorChanged = useProcessorNodeChanged();
const { t } = useTranslation();
+ const defaults = useGetDefaultForControlnetProcessor(
+ 'color_map_image_processor'
+ ) as RequiredColorMapImageProcessorInvocation;
const handleColorMapTileSizeChanged = useCallback(
(v: number) => {
@@ -34,7 +35,7 @@ const ColorMapProcessor = (props: ColorMapProcessorProps) => {
{t('controlnet.colorMapTileSize')}
{
/>
{
const processorChanged = useProcessorNodeChanged();
const { t } = useTranslation();
+ const defaults = useGetDefaultForControlnetProcessor(
+ 'content_shuffle_image_processor'
+ ) as RequiredContentShuffleImageProcessorInvocation;
+
const handleDetectResolutionChanged = useCallback(
(v: number) => {
processorChanged(controlNetId, { detect_resolution: v });
@@ -63,7 +64,7 @@ const ContentShuffleProcessor = (props: Props) => {
{t('controlnet.detectResolution')}
{
/>
{
{t('controlnet.imageResolution')}
{
/>
{
{t('controlnet.w')}
-
-
+
+
{t('controlnet.h')}
-
-
+
+
{t('controlnet.f')}
-
-
+
+
);
diff --git a/invokeai/frontend/web/src/features/controlAdapters/components/processors/DWOpenposeProcessor.tsx b/invokeai/frontend/web/src/features/controlAdapters/components/processors/DWOpenposeProcessor.tsx
index 6d9064af394..6761bfd4e1d 100644
--- a/invokeai/frontend/web/src/features/controlAdapters/components/processors/DWOpenposeProcessor.tsx
+++ b/invokeai/frontend/web/src/features/controlAdapters/components/processors/DWOpenposeProcessor.tsx
@@ -1,6 +1,6 @@
import { CompositeNumberInput, CompositeSlider, Flex, FormControl, FormLabel, Switch } from '@invoke-ai/ui-library';
import { useProcessorNodeChanged } from 'features/controlAdapters/components/hooks/useProcessorNodeChanged';
-import { CONTROLNET_PROCESSORS } from 'features/controlAdapters/store/constants';
+import { useGetDefaultForControlnetProcessor } from 'features/controlAdapters/hooks/useGetDefaultForControlnetProcessor';
import type { RequiredDWOpenposeImageProcessorInvocation } from 'features/controlAdapters/store/types';
import type { ChangeEvent } from 'react';
import { memo, useCallback } from 'react';
@@ -8,9 +8,6 @@ import { useTranslation } from 'react-i18next';
import ProcessorWrapper from './common/ProcessorWrapper';
-const DEFAULTS = CONTROLNET_PROCESSORS.dw_openpose_image_processor
- .default as RequiredDWOpenposeImageProcessorInvocation;
-
type Props = {
controlNetId: string;
processorNode: RequiredDWOpenposeImageProcessorInvocation;
@@ -23,6 +20,10 @@ const DWOpenposeProcessor = (props: Props) => {
const processorChanged = useProcessorNodeChanged();
const { t } = useTranslation();
+ const defaults = useGetDefaultForControlnetProcessor(
+ 'dw_openpose_image_processor'
+ ) as RequiredDWOpenposeImageProcessorInvocation;
+
const handleDrawBodyChanged = useCallback(
(e: ChangeEvent) => {
processorChanged(controlNetId, { draw_body: e.target.checked });
@@ -56,15 +57,15 @@ const DWOpenposeProcessor = (props: Props) => {
{t('controlnet.body')}
-
+
{t('controlnet.face')}
-
+
{t('controlnet.hands')}
-
+
@@ -72,7 +73,7 @@ const DWOpenposeProcessor = (props: Props) => {
{
diff --git a/invokeai/frontend/web/src/features/controlAdapters/components/processors/DepthAnyThingProcessor.tsx b/invokeai/frontend/web/src/features/controlAdapters/components/processors/DepthAnyThingProcessor.tsx
index 272a2cab49e..a5b04624378 100644
--- a/invokeai/frontend/web/src/features/controlAdapters/components/processors/DepthAnyThingProcessor.tsx
+++ b/invokeai/frontend/web/src/features/controlAdapters/components/processors/DepthAnyThingProcessor.tsx
@@ -1,7 +1,7 @@
import type { ComboboxOnChange } from '@invoke-ai/ui-library';
import { Combobox, CompositeNumberInput, CompositeSlider, FormControl, FormLabel } from '@invoke-ai/ui-library';
import { useProcessorNodeChanged } from 'features/controlAdapters/components/hooks/useProcessorNodeChanged';
-import { CONTROLNET_PROCESSORS } from 'features/controlAdapters/store/constants';
+import { useGetDefaultForControlnetProcessor } from 'features/controlAdapters/hooks/useGetDefaultForControlnetProcessor';
import type {
DepthAnythingModelSize,
RequiredDepthAnythingImageProcessorInvocation,
@@ -12,9 +12,6 @@ import { useTranslation } from 'react-i18next';
import ProcessorWrapper from './common/ProcessorWrapper';
-const DEFAULTS = CONTROLNET_PROCESSORS.midas_depth_image_processor
- .default as RequiredDepthAnythingImageProcessorInvocation;
-
type Props = {
controlNetId: string;
processorNode: RequiredDepthAnythingImageProcessorInvocation;
@@ -25,9 +22,12 @@ const DepthAnythingProcessor = (props: Props) => {
const { controlNetId, processorNode, isEnabled } = props;
const { model_size, resolution } = processorNode;
const processorChanged = useProcessorNodeChanged();
-
const { t } = useTranslation();
+ const defaults = useGetDefaultForControlnetProcessor(
+ 'midas_depth_image_processor'
+ ) as RequiredDepthAnythingImageProcessorInvocation;
+
const handleModelSizeChange = useCallback(
(v) => {
if (!isDepthAnythingModelSize(v?.value)) {
@@ -68,7 +68,7 @@ const DepthAnythingProcessor = (props: Props) => {
{t('controlnet.modelSize')}
@@ -78,7 +78,7 @@ const DepthAnythingProcessor = (props: Props) => {
{
{
const processorChanged = useProcessorNodeChanged();
const { t } = useTranslation();
+ const defaults = useGetDefaultForControlnetProcessor('hed_image_processor') as RequiredHedImageProcessorInvocation;
+
const handleDetectResolutionChanged = useCallback(
(v: number) => {
processorChanged(controlNetId, { detect_resolution: v });
@@ -52,7 +52,7 @@ const HedPreprocessor = (props: HedProcessorProps) => {
{t('controlnet.detectResolution')}
{
/>
{
{
diff --git a/invokeai/frontend/web/src/features/controlAdapters/components/processors/LineartAnimeProcessor.tsx b/invokeai/frontend/web/src/features/controlAdapters/components/processors/LineartAnimeProcessor.tsx
index 3d08e4724af..9849bda7c89 100644
--- a/invokeai/frontend/web/src/features/controlAdapters/components/processors/LineartAnimeProcessor.tsx
+++ b/invokeai/frontend/web/src/features/controlAdapters/components/processors/LineartAnimeProcessor.tsx
@@ -1,15 +1,12 @@
import { CompositeNumberInput, CompositeSlider, FormControl, FormLabel } from '@invoke-ai/ui-library';
import { useProcessorNodeChanged } from 'features/controlAdapters/components/hooks/useProcessorNodeChanged';
-import { CONTROLNET_PROCESSORS } from 'features/controlAdapters/store/constants';
+import { useGetDefaultForControlnetProcessor } from 'features/controlAdapters/hooks/useGetDefaultForControlnetProcessor';
import type { RequiredLineartAnimeImageProcessorInvocation } from 'features/controlAdapters/store/types';
import { memo, useCallback } from 'react';
import { useTranslation } from 'react-i18next';
import ProcessorWrapper from './common/ProcessorWrapper';
-const DEFAULTS = CONTROLNET_PROCESSORS.lineart_anime_image_processor
- .default as RequiredLineartAnimeImageProcessorInvocation;
-
type Props = {
controlNetId: string;
processorNode: RequiredLineartAnimeImageProcessorInvocation;
@@ -22,6 +19,10 @@ const LineartAnimeProcessor = (props: Props) => {
const processorChanged = useProcessorNodeChanged();
const { t } = useTranslation();
+ const defaults = useGetDefaultForControlnetProcessor(
+ 'lineart_anime_image_processor'
+ ) as RequiredLineartAnimeImageProcessorInvocation;
+
const handleDetectResolutionChanged = useCallback(
(v: number) => {
processorChanged(controlNetId, { detect_resolution: v });
@@ -42,7 +43,7 @@ const LineartAnimeProcessor = (props: Props) => {
{t('controlnet.detectResolution')}
{
/>
{
{
diff --git a/invokeai/frontend/web/src/features/controlAdapters/components/processors/LineartProcessor.tsx b/invokeai/frontend/web/src/features/controlAdapters/components/processors/LineartProcessor.tsx
index 22a96f51f93..51d082eb57f 100644
--- a/invokeai/frontend/web/src/features/controlAdapters/components/processors/LineartProcessor.tsx
+++ b/invokeai/frontend/web/src/features/controlAdapters/components/processors/LineartProcessor.tsx
@@ -1,6 +1,6 @@
import { CompositeNumberInput, CompositeSlider, FormControl, FormLabel, Switch } from '@invoke-ai/ui-library';
import { useProcessorNodeChanged } from 'features/controlAdapters/components/hooks/useProcessorNodeChanged';
-import { CONTROLNET_PROCESSORS } from 'features/controlAdapters/store/constants';
+import { useGetDefaultForControlnetProcessor } from 'features/controlAdapters/hooks/useGetDefaultForControlnetProcessor';
import type { RequiredLineartImageProcessorInvocation } from 'features/controlAdapters/store/types';
import type { ChangeEvent } from 'react';
import { memo, useCallback } from 'react';
@@ -8,8 +8,6 @@ import { useTranslation } from 'react-i18next';
import ProcessorWrapper from './common/ProcessorWrapper';
-const DEFAULTS = CONTROLNET_PROCESSORS.lineart_image_processor.default as RequiredLineartImageProcessorInvocation;
-
type LineartProcessorProps = {
controlNetId: string;
processorNode: RequiredLineartImageProcessorInvocation;
@@ -22,6 +20,10 @@ const LineartProcessor = (props: LineartProcessorProps) => {
const processorChanged = useProcessorNodeChanged();
const { t } = useTranslation();
+ const defaults = useGetDefaultForControlnetProcessor(
+ 'lineart_image_processor'
+ ) as RequiredLineartImageProcessorInvocation;
+
const handleDetectResolutionChanged = useCallback(
(v: number) => {
processorChanged(controlNetId, { detect_resolution: v });
@@ -50,7 +52,7 @@ const LineartProcessor = (props: LineartProcessorProps) => {
{
@@ -68,7 +70,7 @@ const LineartProcessor = (props: LineartProcessorProps) => {
{
diff --git a/invokeai/frontend/web/src/features/controlAdapters/components/processors/MediapipeFaceProcessor.tsx b/invokeai/frontend/web/src/features/controlAdapters/components/processors/MediapipeFaceProcessor.tsx
index a0d5308210e..3cf0758504a 100644
--- a/invokeai/frontend/web/src/features/controlAdapters/components/processors/MediapipeFaceProcessor.tsx
+++ b/invokeai/frontend/web/src/features/controlAdapters/components/processors/MediapipeFaceProcessor.tsx
@@ -1,14 +1,12 @@
import { CompositeNumberInput, CompositeSlider, FormControl, FormLabel } from '@invoke-ai/ui-library';
import { useProcessorNodeChanged } from 'features/controlAdapters/components/hooks/useProcessorNodeChanged';
-import { CONTROLNET_PROCESSORS } from 'features/controlAdapters/store/constants';
+import { useGetDefaultForControlnetProcessor } from 'features/controlAdapters/hooks/useGetDefaultForControlnetProcessor';
import type { RequiredMediapipeFaceProcessorInvocation } from 'features/controlAdapters/store/types';
import { memo, useCallback } from 'react';
import { useTranslation } from 'react-i18next';
import ProcessorWrapper from './common/ProcessorWrapper';
-const DEFAULTS = CONTROLNET_PROCESSORS.mediapipe_face_processor.default as RequiredMediapipeFaceProcessorInvocation;
-
type Props = {
controlNetId: string;
processorNode: RequiredMediapipeFaceProcessorInvocation;
@@ -17,10 +15,14 @@ type Props = {
const MediapipeFaceProcessor = (props: Props) => {
const { controlNetId, processorNode, isEnabled } = props;
- const { max_faces, min_confidence } = processorNode;
+ const { max_faces, min_confidence, image_resolution } = processorNode;
const processorChanged = useProcessorNodeChanged();
const { t } = useTranslation();
+ const defaults = useGetDefaultForControlnetProcessor(
+ 'mediapipe_face_processor'
+ ) as RequiredMediapipeFaceProcessorInvocation;
+
const handleMaxFacesChanged = useCallback(
(v: number) => {
processorChanged(controlNetId, { max_faces: v });
@@ -35,6 +37,13 @@ const MediapipeFaceProcessor = (props: Props) => {
[controlNetId, processorChanged]
);
+ const handleImageResolutionChanged = useCallback(
+ (v: number) => {
+ processorChanged(controlNetId, { image_resolution: v });
+ },
+ [controlNetId, processorChanged]
+ );
+
return (
@@ -42,7 +51,7 @@ const MediapipeFaceProcessor = (props: Props) => {
{
@@ -60,7 +69,7 @@ const MediapipeFaceProcessor = (props: Props) => {
{
+
+ {t('controlnet.imageResolution')}
+
+
+
);
};
diff --git a/invokeai/frontend/web/src/features/controlAdapters/components/processors/MidasDepthProcessor.tsx b/invokeai/frontend/web/src/features/controlAdapters/components/processors/MidasDepthProcessor.tsx
index ba245bc2ebd..c60baea0d90 100644
--- a/invokeai/frontend/web/src/features/controlAdapters/components/processors/MidasDepthProcessor.tsx
+++ b/invokeai/frontend/web/src/features/controlAdapters/components/processors/MidasDepthProcessor.tsx
@@ -1,15 +1,12 @@
import { CompositeNumberInput, CompositeSlider, FormControl, FormLabel } from '@invoke-ai/ui-library';
import { useProcessorNodeChanged } from 'features/controlAdapters/components/hooks/useProcessorNodeChanged';
-import { CONTROLNET_PROCESSORS } from 'features/controlAdapters/store/constants';
+import { useGetDefaultForControlnetProcessor } from 'features/controlAdapters/hooks/useGetDefaultForControlnetProcessor';
import type { RequiredMidasDepthImageProcessorInvocation } from 'features/controlAdapters/store/types';
import { memo, useCallback } from 'react';
import { useTranslation } from 'react-i18next';
import ProcessorWrapper from './common/ProcessorWrapper';
-const DEFAULTS = CONTROLNET_PROCESSORS.midas_depth_image_processor
- .default as RequiredMidasDepthImageProcessorInvocation;
-
type Props = {
controlNetId: string;
processorNode: RequiredMidasDepthImageProcessorInvocation;
@@ -18,10 +15,14 @@ type Props = {
const MidasDepthProcessor = (props: Props) => {
const { controlNetId, processorNode, isEnabled } = props;
- const { a_mult, bg_th } = processorNode;
+ const { a_mult, bg_th, image_resolution } = processorNode;
const processorChanged = useProcessorNodeChanged();
const { t } = useTranslation();
+ const defaults = useGetDefaultForControlnetProcessor(
+ 'midas_depth_image_processor'
+ ) as RequiredMidasDepthImageProcessorInvocation;
+
const handleAMultChanged = useCallback(
(v: number) => {
processorChanged(controlNetId, { a_mult: v });
@@ -36,6 +37,13 @@ const MidasDepthProcessor = (props: Props) => {
[controlNetId, processorChanged]
);
+ const handleImageResolutionChanged = useCallback(
+ (v: number) => {
+ processorChanged(controlNetId, { image_resolution: v });
+ },
+ [controlNetId, processorChanged]
+ );
+
return (
@@ -43,7 +51,7 @@ const MidasDepthProcessor = (props: Props) => {
{
{
{
+
+ {t('controlnet.imageResolution')}
+
+
+
);
};
diff --git a/invokeai/frontend/web/src/features/controlAdapters/components/processors/MlsdImageProcessor.tsx b/invokeai/frontend/web/src/features/controlAdapters/components/processors/MlsdImageProcessor.tsx
index 5138b2168bb..69fd4f68074 100644
--- a/invokeai/frontend/web/src/features/controlAdapters/components/processors/MlsdImageProcessor.tsx
+++ b/invokeai/frontend/web/src/features/controlAdapters/components/processors/MlsdImageProcessor.tsx
@@ -1,14 +1,12 @@
import { CompositeNumberInput, CompositeSlider, FormControl, FormLabel } from '@invoke-ai/ui-library';
import { useProcessorNodeChanged } from 'features/controlAdapters/components/hooks/useProcessorNodeChanged';
-import { CONTROLNET_PROCESSORS } from 'features/controlAdapters/store/constants';
+import { useGetDefaultForControlnetProcessor } from 'features/controlAdapters/hooks/useGetDefaultForControlnetProcessor';
import type { RequiredMlsdImageProcessorInvocation } from 'features/controlAdapters/store/types';
import { memo, useCallback } from 'react';
import { useTranslation } from 'react-i18next';
import ProcessorWrapper from './common/ProcessorWrapper';
-const DEFAULTS = CONTROLNET_PROCESSORS.mlsd_image_processor.default as RequiredMlsdImageProcessorInvocation;
-
type Props = {
controlNetId: string;
processorNode: RequiredMlsdImageProcessorInvocation;
@@ -21,6 +19,8 @@ const MlsdImageProcessor = (props: Props) => {
const processorChanged = useProcessorNodeChanged();
const { t } = useTranslation();
+ const defaults = useGetDefaultForControlnetProcessor('mlsd_image_processor') as RequiredMlsdImageProcessorInvocation;
+
const handleDetectResolutionChanged = useCallback(
(v: number) => {
processorChanged(controlNetId, { detect_resolution: v });
@@ -56,7 +56,7 @@ const MlsdImageProcessor = (props: Props) => {
{
@@ -74,7 +74,7 @@ const MlsdImageProcessor = (props: Props) => {
{
@@ -92,7 +92,7 @@ const MlsdImageProcessor = (props: Props) => {
{
{
{
{
const processorChanged = useProcessorNodeChanged();
const { t } = useTranslation();
+ const defaults = useGetDefaultForControlnetProcessor(
+ 'normalbae_image_processor'
+ ) as RequiredNormalbaeImageProcessorInvocation;
+
const handleDetectResolutionChanged = useCallback(
(v: number) => {
processorChanged(controlNetId, { detect_resolution: v });
@@ -42,7 +44,7 @@ const NormalBaeProcessor = (props: Props) => {
{
@@ -60,7 +62,7 @@ const NormalBaeProcessor = (props: Props) => {
{
diff --git a/invokeai/frontend/web/src/features/controlAdapters/components/processors/PidiProcessor.tsx b/invokeai/frontend/web/src/features/controlAdapters/components/processors/PidiProcessor.tsx
index 8948e8250d3..763069f769a 100644
--- a/invokeai/frontend/web/src/features/controlAdapters/components/processors/PidiProcessor.tsx
+++ b/invokeai/frontend/web/src/features/controlAdapters/components/processors/PidiProcessor.tsx
@@ -1,6 +1,6 @@
import { CompositeNumberInput, CompositeSlider, FormControl, FormLabel, Switch } from '@invoke-ai/ui-library';
import { useProcessorNodeChanged } from 'features/controlAdapters/components/hooks/useProcessorNodeChanged';
-import { CONTROLNET_PROCESSORS } from 'features/controlAdapters/store/constants';
+import { useGetDefaultForControlnetProcessor } from 'features/controlAdapters/hooks/useGetDefaultForControlnetProcessor';
import type { RequiredPidiImageProcessorInvocation } from 'features/controlAdapters/store/types';
import type { ChangeEvent } from 'react';
import { memo, useCallback } from 'react';
@@ -8,8 +8,6 @@ import { useTranslation } from 'react-i18next';
import ProcessorWrapper from './common/ProcessorWrapper';
-const DEFAULTS = CONTROLNET_PROCESSORS.pidi_image_processor.default as RequiredPidiImageProcessorInvocation;
-
type Props = {
controlNetId: string;
processorNode: RequiredPidiImageProcessorInvocation;
@@ -22,6 +20,8 @@ const PidiProcessor = (props: Props) => {
const processorChanged = useProcessorNodeChanged();
const { t } = useTranslation();
+ const defaults = useGetDefaultForControlnetProcessor('pidi_image_processor') as RequiredPidiImageProcessorInvocation;
+
const handleDetectResolutionChanged = useCallback(
(v: number) => {
processorChanged(controlNetId, { detect_resolution: v });
@@ -57,7 +57,7 @@ const PidiProcessor = (props: Props) => {
{
@@ -75,7 +75,7 @@ const PidiProcessor = (props: Props) => {
{
diff --git a/invokeai/frontend/web/src/features/controlAdapters/hooks/useAddControlAdapter.ts b/invokeai/frontend/web/src/features/controlAdapters/hooks/useAddControlAdapter.ts
index 43c567b319a..1af2fc81b99 100644
--- a/invokeai/frontend/web/src/features/controlAdapters/hooks/useAddControlAdapter.ts
+++ b/invokeai/frontend/web/src/features/controlAdapters/hooks/useAddControlAdapter.ts
@@ -36,7 +36,7 @@ export const useAddControlAdapter = (type: ControlAdapterType) => {
) {
const defaultPreprocessor = firstModel.default_settings?.preprocessor;
const processorType = isControlAdapterProcessorType(defaultPreprocessor) ? defaultPreprocessor : 'none';
- const processorNode = CONTROLNET_PROCESSORS[processorType].default;
+ const processorNode = CONTROLNET_PROCESSORS[processorType].buildDefaults(baseModel);
dispatch(
controlAdapterAdded({
type,
@@ -55,7 +55,7 @@ export const useAddControlAdapter = (type: ControlAdapterType) => {
overrides: { model: firstModel },
})
);
- }, [dispatch, firstModel, isDisabled, type]);
+ }, [dispatch, firstModel, isDisabled, type, baseModel]);
return [addControlAdapter, isDisabled] as const;
};
diff --git a/invokeai/frontend/web/src/features/controlAdapters/hooks/useGetDefaultForControlnetProcessor.ts b/invokeai/frontend/web/src/features/controlAdapters/hooks/useGetDefaultForControlnetProcessor.ts
new file mode 100644
index 00000000000..99d2e0da8c3
--- /dev/null
+++ b/invokeai/frontend/web/src/features/controlAdapters/hooks/useGetDefaultForControlnetProcessor.ts
@@ -0,0 +1,14 @@
+import { useAppSelector } from 'app/store/storeHooks';
+import { CONTROLNET_PROCESSORS } from 'features/controlAdapters/store/constants';
+import type { ControlAdapterProcessorType } from 'features/controlAdapters/store/types';
+import { useMemo } from 'react';
+
+export const useGetDefaultForControlnetProcessor = (processorType: ControlAdapterProcessorType) => {
+ const baseModel = useAppSelector((s) => s.generation.model?.base);
+
+ const defaults = useMemo(() => {
+ return CONTROLNET_PROCESSORS[processorType].buildDefaults(baseModel);
+ }, [baseModel, processorType]);
+
+ return defaults;
+};
diff --git a/invokeai/frontend/web/src/features/controlAdapters/store/constants.ts b/invokeai/frontend/web/src/features/controlAdapters/store/constants.ts
index 9129c5d2995..a4f4eb74181 100644
--- a/invokeai/frontend/web/src/features/controlAdapters/store/constants.ts
+++ b/invokeai/frontend/web/src/features/controlAdapters/store/constants.ts
@@ -1,4 +1,5 @@
import i18n from 'i18next';
+import type { BaseModelType } from 'services/api/types';
import type { ControlAdapterProcessorType, RequiredControlAdapterProcessorNode } from './types';
@@ -8,7 +9,7 @@ type ControlNetProcessorsDict = Record<
type: ControlAdapterProcessorType | 'none';
label: string;
description: string;
- default: RequiredControlAdapterProcessorNode | { type: 'none' };
+ buildDefaults(baseModel?: BaseModelType): RequiredControlAdapterProcessorNode | { type: 'none' };
}
>;
/**
@@ -29,9 +30,9 @@ export const CONTROLNET_PROCESSORS: ControlNetProcessorsDict = {
get description() {
return i18n.t('controlnet.noneDescription');
},
- default: {
+ buildDefaults: () => ({
type: 'none',
- },
+ }),
},
canny_image_processor: {
type: 'canny_image_processor',
@@ -41,12 +42,13 @@ export const CONTROLNET_PROCESSORS: ControlNetProcessorsDict = {
get description() {
return i18n.t('controlnet.cannyDescription');
},
- default: {
+ buildDefaults: (baseModel?: BaseModelType) => ({
id: 'canny_image_processor',
type: 'canny_image_processor',
low_threshold: 100,
high_threshold: 200,
- },
+ image_resolution: baseModel === 'sdxl' ? 1024 : 512,
+ }),
},
color_map_image_processor: {
type: 'color_map_image_processor',
@@ -56,11 +58,11 @@ export const CONTROLNET_PROCESSORS: ControlNetProcessorsDict = {
get description() {
return i18n.t('controlnet.colorMapDescription');
},
- default: {
+ buildDefaults: () => ({
id: 'color_map_image_processor',
type: 'color_map_image_processor',
color_map_tile_size: 64,
- },
+ }),
},
content_shuffle_image_processor: {
type: 'content_shuffle_image_processor',
@@ -70,15 +72,15 @@ export const CONTROLNET_PROCESSORS: ControlNetProcessorsDict = {
get description() {
return i18n.t('controlnet.contentShuffleDescription');
},
- default: {
+ buildDefaults: (baseModel?: BaseModelType) => ({
id: 'content_shuffle_image_processor',
type: 'content_shuffle_image_processor',
- detect_resolution: 512,
- image_resolution: 512,
- h: 512,
- w: 512,
- f: 256,
- },
+ detect_resolution: baseModel === 'sdxl' ? 1024 : 512,
+ image_resolution: baseModel === 'sdxl' ? 1024 : 512,
+ h: baseModel === 'sdxl' ? 1024 : 512,
+ w: baseModel === 'sdxl' ? 1024 : 512,
+ f: baseModel === 'sdxl' ? 512 : 256,
+ }),
},
depth_anything_image_processor: {
type: 'depth_anything_image_processor',
@@ -88,12 +90,12 @@ export const CONTROLNET_PROCESSORS: ControlNetProcessorsDict = {
get description() {
return i18n.t('controlnet.depthAnythingDescription');
},
- default: {
+ buildDefaults: (baseModel?: BaseModelType) => ({
id: 'depth_anything_image_processor',
type: 'depth_anything_image_processor',
model_size: 'small',
- resolution: 512,
- },
+ resolution: baseModel === 'sdxl' ? 1024 : 512,
+ }),
},
hed_image_processor: {
type: 'hed_image_processor',
@@ -103,13 +105,13 @@ export const CONTROLNET_PROCESSORS: ControlNetProcessorsDict = {
get description() {
return i18n.t('controlnet.hedDescription');
},
- default: {
+ buildDefaults: (baseModel?: BaseModelType) => ({
id: 'hed_image_processor',
type: 'hed_image_processor',
- detect_resolution: 512,
- image_resolution: 512,
+ detect_resolution: baseModel === 'sdxl' ? 1024 : 512,
+ image_resolution: baseModel === 'sdxl' ? 1024 : 512,
scribble: false,
- },
+ }),
},
lineart_anime_image_processor: {
type: 'lineart_anime_image_processor',
@@ -119,12 +121,12 @@ export const CONTROLNET_PROCESSORS: ControlNetProcessorsDict = {
get description() {
return i18n.t('controlnet.lineartAnimeDescription');
},
- default: {
+ buildDefaults: (baseModel?: BaseModelType) => ({
id: 'lineart_anime_image_processor',
type: 'lineart_anime_image_processor',
- detect_resolution: 512,
- image_resolution: 512,
- },
+ detect_resolution: baseModel === 'sdxl' ? 1024 : 512,
+ image_resolution: baseModel === 'sdxl' ? 1024 : 512,
+ }),
},
lineart_image_processor: {
type: 'lineart_image_processor',
@@ -134,13 +136,13 @@ export const CONTROLNET_PROCESSORS: ControlNetProcessorsDict = {
get description() {
return i18n.t('controlnet.lineartDescription');
},
- default: {
+ buildDefaults: (baseModel?: BaseModelType) => ({
id: 'lineart_image_processor',
type: 'lineart_image_processor',
- detect_resolution: 512,
- image_resolution: 512,
+ detect_resolution: baseModel === 'sdxl' ? 1024 : 512,
+ image_resolution: baseModel === 'sdxl' ? 1024 : 512,
coarse: false,
- },
+ }),
},
mediapipe_face_processor: {
type: 'mediapipe_face_processor',
@@ -150,12 +152,13 @@ export const CONTROLNET_PROCESSORS: ControlNetProcessorsDict = {
get description() {
return i18n.t('controlnet.mediapipeFaceDescription');
},
- default: {
+ buildDefaults: (baseModel?: BaseModelType) => ({
id: 'mediapipe_face_processor',
type: 'mediapipe_face_processor',
max_faces: 1,
min_confidence: 0.5,
- },
+ image_resolution: baseModel === 'sdxl' ? 1024 : 512,
+ }),
},
midas_depth_image_processor: {
type: 'midas_depth_image_processor',
@@ -165,12 +168,13 @@ export const CONTROLNET_PROCESSORS: ControlNetProcessorsDict = {
get description() {
return i18n.t('controlnet.depthMidasDescription');
},
- default: {
+ buildDefaults: (baseModel?: BaseModelType) => ({
id: 'midas_depth_image_processor',
type: 'midas_depth_image_processor',
a_mult: 2,
bg_th: 0.1,
- },
+ image_resolution: baseModel === 'sdxl' ? 1024 : 512,
+ }),
},
mlsd_image_processor: {
type: 'mlsd_image_processor',
@@ -180,14 +184,14 @@ export const CONTROLNET_PROCESSORS: ControlNetProcessorsDict = {
get description() {
return i18n.t('controlnet.mlsdDescription');
},
- default: {
+ buildDefaults: (baseModel?: BaseModelType) => ({
id: 'mlsd_image_processor',
type: 'mlsd_image_processor',
- detect_resolution: 512,
- image_resolution: 512,
+ detect_resolution: baseModel === 'sdxl' ? 1024 : 512,
+ image_resolution: baseModel === 'sdxl' ? 1024 : 512,
thr_d: 0.1,
thr_v: 0.1,
- },
+ }),
},
normalbae_image_processor: {
type: 'normalbae_image_processor',
@@ -197,12 +201,12 @@ export const CONTROLNET_PROCESSORS: ControlNetProcessorsDict = {
get description() {
return i18n.t('controlnet.normalBaeDescription');
},
- default: {
+ buildDefaults: (baseModel?: BaseModelType) => ({
id: 'normalbae_image_processor',
type: 'normalbae_image_processor',
- detect_resolution: 512,
- image_resolution: 512,
- },
+ detect_resolution: baseModel === 'sdxl' ? 1024 : 512,
+ image_resolution: baseModel === 'sdxl' ? 1024 : 512,
+ }),
},
dw_openpose_image_processor: {
type: 'dw_openpose_image_processor',
@@ -212,14 +216,14 @@ export const CONTROLNET_PROCESSORS: ControlNetProcessorsDict = {
get description() {
return i18n.t('controlnet.dwOpenposeDescription');
},
- default: {
+ buildDefaults: (baseModel?: BaseModelType) => ({
id: 'dw_openpose_image_processor',
type: 'dw_openpose_image_processor',
- image_resolution: 512,
+ image_resolution: baseModel === 'sdxl' ? 1024 : 512,
draw_body: true,
draw_face: false,
draw_hands: false,
- },
+ }),
},
pidi_image_processor: {
type: 'pidi_image_processor',
@@ -229,14 +233,14 @@ export const CONTROLNET_PROCESSORS: ControlNetProcessorsDict = {
get description() {
return i18n.t('controlnet.pidiDescription');
},
- default: {
+ buildDefaults: (baseModel?: BaseModelType) => ({
id: 'pidi_image_processor',
type: 'pidi_image_processor',
- detect_resolution: 512,
- image_resolution: 512,
+ detect_resolution: baseModel === 'sdxl' ? 1024 : 512,
+ image_resolution: baseModel === 'sdxl' ? 1024 : 512,
scribble: false,
safe: false,
- },
+ }),
},
zoe_depth_image_processor: {
type: 'zoe_depth_image_processor',
@@ -246,9 +250,9 @@ export const CONTROLNET_PROCESSORS: ControlNetProcessorsDict = {
get description() {
return i18n.t('controlnet.depthZoeDescription');
},
- default: {
+ buildDefaults: () => ({
id: 'zoe_depth_image_processor',
type: 'zoe_depth_image_processor',
- },
+ }),
},
};
diff --git a/invokeai/frontend/web/src/features/controlAdapters/store/controlAdaptersSlice.ts b/invokeai/frontend/web/src/features/controlAdapters/store/controlAdaptersSlice.ts
index e29affc91ef..054f88e26bd 100644
--- a/invokeai/frontend/web/src/features/controlAdapters/store/controlAdaptersSlice.ts
+++ b/invokeai/frontend/web/src/features/controlAdapters/store/controlAdaptersSlice.ts
@@ -294,7 +294,7 @@ export const controlAdaptersSlice = createSlice({
}
const processorNode = cloneDeep(
- CONTROLNET_PROCESSORS[processorType].default
+ CONTROLNET_PROCESSORS[processorType].buildDefaults(cn.model?.base)
) as RequiredControlAdapterProcessorNode;
caAdapter.updateOne(state, {
diff --git a/invokeai/frontend/web/src/features/controlAdapters/store/types.ts b/invokeai/frontend/web/src/features/controlAdapters/store/types.ts
index 28e375fe493..fb52b2a451b 100644
--- a/invokeai/frontend/web/src/features/controlAdapters/store/types.ts
+++ b/invokeai/frontend/web/src/features/controlAdapters/store/types.ts
@@ -72,7 +72,7 @@ export const isControlAdapterProcessorType = (v: unknown): v is ControlAdapterPr
*/
export type RequiredCannyImageProcessorInvocation = O.Required<
CannyImageProcessorInvocation,
- 'type' | 'low_threshold' | 'high_threshold'
+ 'type' | 'low_threshold' | 'high_threshold' | 'image_resolution'
>;
/**
@@ -133,7 +133,7 @@ export type RequiredLineartImageProcessorInvocation = O.Required<
*/
export type RequiredMediapipeFaceProcessorInvocation = O.Required<
MediapipeFaceProcessorInvocation,
- 'type' | 'max_faces' | 'min_confidence'
+ 'type' | 'max_faces' | 'min_confidence' | 'image_resolution'
>;
/**
@@ -141,7 +141,7 @@ export type RequiredMediapipeFaceProcessorInvocation = O.Required<
*/
export type RequiredMidasDepthImageProcessorInvocation = O.Required<
MidasDepthImageProcessorInvocation,
- 'type' | 'a_mult' | 'bg_th'
+ 'type' | 'a_mult' | 'bg_th' | 'image_resolution'
>;
/**
diff --git a/invokeai/frontend/web/src/features/controlAdapters/util/buildControlAdapter.ts b/invokeai/frontend/web/src/features/controlAdapters/util/buildControlAdapter.ts
index 34803b3de39..94a867cf886 100644
--- a/invokeai/frontend/web/src/features/controlAdapters/util/buildControlAdapter.ts
+++ b/invokeai/frontend/web/src/features/controlAdapters/util/buildControlAdapter.ts
@@ -21,7 +21,7 @@ export const initialControlNet: Omit = {
controlImage: null,
processedControlImage: null,
processorType: 'canny_image_processor',
- processorNode: CONTROLNET_PROCESSORS.canny_image_processor.default as RequiredCannyImageProcessorInvocation,
+ processorNode: CONTROLNET_PROCESSORS.canny_image_processor.buildDefaults() as RequiredCannyImageProcessorInvocation,
shouldAutoConfig: true,
};
@@ -36,7 +36,7 @@ export const initialT2IAdapter: Omit = {
controlImage: null,
processedControlImage: null,
processorType: 'canny_image_processor',
- processorNode: CONTROLNET_PROCESSORS.canny_image_processor.default as RequiredCannyImageProcessorInvocation,
+ processorNode: CONTROLNET_PROCESSORS.canny_image_processor.buildDefaults() as RequiredCannyImageProcessorInvocation,
shouldAutoConfig: true,
};
diff --git a/invokeai/frontend/web/src/features/controlAdapters/util/buildControlAdapterProcessor.ts b/invokeai/frontend/web/src/features/controlAdapters/util/buildControlAdapterProcessor.ts
index 911bacc7874..63766b8e6ea 100644
--- a/invokeai/frontend/web/src/features/controlAdapters/util/buildControlAdapterProcessor.ts
+++ b/invokeai/frontend/web/src/features/controlAdapters/util/buildControlAdapterProcessor.ts
@@ -5,7 +5,7 @@ import type { ControlNetModelConfig, T2IAdapterModelConfig } from 'services/api/
export const buildControlAdapterProcessor = (modelConfig: ControlNetModelConfig | T2IAdapterModelConfig) => {
const defaultPreprocessor = modelConfig.default_settings?.preprocessor;
const processorType = isControlAdapterProcessorType(defaultPreprocessor) ? defaultPreprocessor : 'none';
- const processorNode = CONTROLNET_PROCESSORS[processorType].default;
+ const processorNode = CONTROLNET_PROCESSORS[processorType].buildDefaults(modelConfig.base);
return { processorType, processorNode };
};
diff --git a/invokeai/frontend/web/src/services/api/schema.ts b/invokeai/frontend/web/src/services/api/schema.ts
index e37129661fb..b301b5785cb 100644
--- a/invokeai/frontend/web/src/services/api/schema.ts
+++ b/invokeai/frontend/web/src/services/api/schema.ts
@@ -1662,6 +1662,12 @@ export type components = {
use_cache?: boolean;
/** @description The image to process */
image?: components["schemas"]["ImageField"];
+ /**
+ * Image Resolution
+ * @description Pixel resolution for output image
+ * @default 512
+ */
+ image_resolution?: number;
/**
* Low Threshold
* @description The low threshold of the Canny pixel gradient (0-255)
@@ -4104,7 +4110,7 @@ export type components = {
* @description The nodes in this graph
*/
nodes: {
- [key: string]: components["schemas"]["CalculateImageTilesMinimumOverlapInvocation"] | components["schemas"]["SDXLRefinerCompelPromptInvocation"] | components["schemas"]["StringJoinInvocation"] | components["schemas"]["FreeUInvocation"] | components["schemas"]["ImageToLatentsInvocation"] | components["schemas"]["FaceIdentifierInvocation"] | components["schemas"]["ImageChannelInvocation"] | components["schemas"]["MultiplyInvocation"] | components["schemas"]["SDXLCompelPromptInvocation"] | components["schemas"]["CompelInvocation"] | components["schemas"]["SDXLRefinerModelLoaderInvocation"] | components["schemas"]["SDXLLoRALoaderInvocation"] | components["schemas"]["IPAdapterInvocation"] | components["schemas"]["FloatMathInvocation"] | components["schemas"]["IntegerInvocation"] | components["schemas"]["ColorMapImageProcessorInvocation"] | components["schemas"]["FaceOffInvocation"] | components["schemas"]["LatentsCollectionInvocation"] | components["schemas"]["MergeTilesToImageInvocation"] | components["schemas"]["MidasDepthImageProcessorInvocation"] | components["schemas"]["LeresImageProcessorInvocation"] | components["schemas"]["MaskCombineInvocation"] | components["schemas"]["UnsharpMaskInvocation"] | components["schemas"]["RandomIntInvocation"] | components["schemas"]["CenterPadCropInvocation"] | components["schemas"]["MetadataItemInvocation"] | components["schemas"]["ImageConvertInvocation"] | components["schemas"]["ColorCorrectInvocation"] | components["schemas"]["SeamlessModeInvocation"] | components["schemas"]["ImageResizeInvocation"] | components["schemas"]["ControlNetInvocation"] | components["schemas"]["InfillPatchMatchInvocation"] | components["schemas"]["SDXLModelLoaderInvocation"] | components["schemas"]["LaMaInfillInvocation"] | components["schemas"]["VAELoaderInvocation"] | components["schemas"]["BooleanCollectionInvocation"] | components["schemas"]["StringSplitInvocation"] | components["schemas"]["ImageNSFWBlurInvocation"] | components["schemas"]["FloatToIntegerInvocation"] | components["schemas"]["ZoeDepthImageProcessorInvocation"] | components["schemas"]["NoiseInvocation"] | components["schemas"]["ScaleLatentsInvocation"] | components["schemas"]["ImageCropInvocation"] | components["schemas"]["BlendLatentsInvocation"] | components["schemas"]["CropLatentsCoreInvocation"] | components["schemas"]["SegmentAnythingProcessorInvocation"] | components["schemas"]["ImageChannelMultiplyInvocation"] | components["schemas"]["RoundInvocation"] | components["schemas"]["ImagePasteInvocation"] | components["schemas"]["ImageBlurInvocation"] | components["schemas"]["MergeMetadataInvocation"] | components["schemas"]["SubtractInvocation"] | components["schemas"]["ImageChannelOffsetInvocation"] | components["schemas"]["ImageCollectionInvocation"] | components["schemas"]["CannyImageProcessorInvocation"] | components["schemas"]["ResizeLatentsInvocation"] | components["schemas"]["ImageInverseLerpInvocation"] | components["schemas"]["ESRGANInvocation"] | components["schemas"]["FloatLinearRangeInvocation"] | components["schemas"]["StringSplitNegInvocation"] | components["schemas"]["HedImageProcessorInvocation"] | components["schemas"]["StringInvocation"] | components["schemas"]["DepthAnythingImageProcessorInvocation"] | components["schemas"]["CoreMetadataInvocation"] | components["schemas"]["BooleanInvocation"] | components["schemas"]["IterateInvocation"] | components["schemas"]["TileToPropertiesInvocation"] | components["schemas"]["IntegerCollectionInvocation"] | components["schemas"]["IntegerMathInvocation"] | components["schemas"]["LatentsToImageInvocation"] | components["schemas"]["FaceMaskInvocation"] | components["schemas"]["FloatCollectionInvocation"] | components["schemas"]["SchedulerInvocation"] | components["schemas"]["PidiImageProcessorInvocation"] | components["schemas"]["DivideInvocation"] | components["schemas"]["MainModelLoaderInvocation"] | components["schemas"]["ImageScaleInvocation"] | components["schemas"]["ContentShuffleImageProcessorInvocation"] | components["schemas"]["ImageHueAdjustmentInvocation"] | components["schemas"]["InfillColorInvocation"] | components["schemas"]["PromptsFromFileInvocation"] | components["schemas"]["BlankImageInvocation"] | components["schemas"]["CanvasPasteBackInvocation"] | components["schemas"]["RandomFloatInvocation"] | components["schemas"]["CreateDenoiseMaskInvocation"] | components["schemas"]["AddInvocation"] | components["schemas"]["ImageLerpInvocation"] | components["schemas"]["StringReplaceInvocation"] | components["schemas"]["TileResamplerProcessorInvocation"] | components["schemas"]["RangeInvocation"] | components["schemas"]["ConditioningCollectionInvocation"] | components["schemas"]["FloatInvocation"] | components["schemas"]["CalculateImageTilesInvocation"] | components["schemas"]["MlsdImageProcessorInvocation"] | components["schemas"]["DenoiseLatentsInvocation"] | components["schemas"]["CvInpaintInvocation"] | components["schemas"]["SaveImageInvocation"] | components["schemas"]["CLIPSkipInvocation"] | components["schemas"]["CV2InfillInvocation"] | components["schemas"]["LineartImageProcessorInvocation"] | components["schemas"]["ShowImageInvocation"] | components["schemas"]["IdealSizeInvocation"] | components["schemas"]["ImageWatermarkInvocation"] | components["schemas"]["StringJoinThreeInvocation"] | components["schemas"]["LoRALoaderInvocation"] | components["schemas"]["StepParamEasingInvocation"] | components["schemas"]["RandomRangeInvocation"] | components["schemas"]["CalculateImageTilesEvenSplitInvocation"] | components["schemas"]["MaskEdgeInvocation"] | components["schemas"]["CollectInvocation"] | components["schemas"]["RangeOfSizeInvocation"] | components["schemas"]["ImageInvocation"] | components["schemas"]["CreateGradientMaskInvocation"] | components["schemas"]["MetadataInvocation"] | components["schemas"]["NormalbaeImageProcessorInvocation"] | components["schemas"]["DynamicPromptInvocation"] | components["schemas"]["LatentsInvocation"] | components["schemas"]["InfillTileInvocation"] | components["schemas"]["ColorInvocation"] | components["schemas"]["ConditioningInvocation"] | components["schemas"]["StringCollectionInvocation"] | components["schemas"]["MediapipeFaceProcessorInvocation"] | components["schemas"]["PairTileImageInvocation"] | components["schemas"]["ImageMultiplyInvocation"] | components["schemas"]["DWOpenposeImageProcessorInvocation"] | components["schemas"]["T2IAdapterInvocation"] | components["schemas"]["MaskFromAlphaInvocation"] | components["schemas"]["LineartAnimeImageProcessorInvocation"];
+ [key: string]: components["schemas"]["IPAdapterInvocation"] | components["schemas"]["ConditioningCollectionInvocation"] | components["schemas"]["VAELoaderInvocation"] | components["schemas"]["FaceMaskInvocation"] | components["schemas"]["IntegerCollectionInvocation"] | components["schemas"]["StepParamEasingInvocation"] | components["schemas"]["StringSplitNegInvocation"] | components["schemas"]["BooleanInvocation"] | components["schemas"]["ImageChannelInvocation"] | components["schemas"]["HedImageProcessorInvocation"] | components["schemas"]["MediapipeFaceProcessorInvocation"] | components["schemas"]["SubtractInvocation"] | components["schemas"]["TileResamplerProcessorInvocation"] | components["schemas"]["CoreMetadataInvocation"] | components["schemas"]["MaskEdgeInvocation"] | components["schemas"]["CollectInvocation"] | components["schemas"]["CvInpaintInvocation"] | components["schemas"]["FloatInvocation"] | components["schemas"]["MergeMetadataInvocation"] | components["schemas"]["MergeTilesToImageInvocation"] | components["schemas"]["ImageCollectionInvocation"] | components["schemas"]["ImageScaleInvocation"] | components["schemas"]["RandomRangeInvocation"] | components["schemas"]["StringJoinThreeInvocation"] | components["schemas"]["InfillColorInvocation"] | components["schemas"]["ESRGANInvocation"] | components["schemas"]["CreateGradientMaskInvocation"] | components["schemas"]["DivideInvocation"] | components["schemas"]["LatentsCollectionInvocation"] | components["schemas"]["StringReplaceInvocation"] | components["schemas"]["LatentsToImageInvocation"] | components["schemas"]["FloatCollectionInvocation"] | components["schemas"]["SeamlessModeInvocation"] | components["schemas"]["ImageMultiplyInvocation"] | components["schemas"]["ScaleLatentsInvocation"] | components["schemas"]["MetadataItemInvocation"] | components["schemas"]["BooleanCollectionInvocation"] | components["schemas"]["ImageLerpInvocation"] | components["schemas"]["LatentsInvocation"] | components["schemas"]["LineartImageProcessorInvocation"] | components["schemas"]["MainModelLoaderInvocation"] | components["schemas"]["PidiImageProcessorInvocation"] | components["schemas"]["SaveImageInvocation"] | components["schemas"]["CV2InfillInvocation"] | components["schemas"]["MultiplyInvocation"] | components["schemas"]["ImageToLatentsInvocation"] | components["schemas"]["MetadataInvocation"] | components["schemas"]["StringInvocation"] | components["schemas"]["FaceIdentifierInvocation"] | components["schemas"]["CenterPadCropInvocation"] | components["schemas"]["MlsdImageProcessorInvocation"] | components["schemas"]["ColorCorrectInvocation"] | components["schemas"]["RandomFloatInvocation"] | components["schemas"]["ImageInverseLerpInvocation"] | components["schemas"]["LineartAnimeImageProcessorInvocation"] | components["schemas"]["StringSplitInvocation"] | components["schemas"]["BlendLatentsInvocation"] | components["schemas"]["DynamicPromptInvocation"] | components["schemas"]["CalculateImageTilesInvocation"] | components["schemas"]["IterateInvocation"] | components["schemas"]["LaMaInfillInvocation"] | components["schemas"]["FloatToIntegerInvocation"] | components["schemas"]["NoiseInvocation"] | components["schemas"]["BlankImageInvocation"] | components["schemas"]["RangeInvocation"] | components["schemas"]["SDXLLoRALoaderInvocation"] | components["schemas"]["SDXLCompelPromptInvocation"] | components["schemas"]["ImageChannelOffsetInvocation"] | components["schemas"]["FreeUInvocation"] | components["schemas"]["ImageInvocation"] | components["schemas"]["ImageBlurInvocation"] | components["schemas"]["DWOpenposeImageProcessorInvocation"] | components["schemas"]["ConditioningInvocation"] | components["schemas"]["CalculateImageTilesEvenSplitInvocation"] | components["schemas"]["ImageConvertInvocation"] | components["schemas"]["RangeOfSizeInvocation"] | components["schemas"]["ImageNSFWBlurInvocation"] | components["schemas"]["SchedulerInvocation"] | components["schemas"]["ResizeLatentsInvocation"] | components["schemas"]["FloatLinearRangeInvocation"] | components["schemas"]["RoundInvocation"] | components["schemas"]["T2IAdapterInvocation"] | components["schemas"]["CreateDenoiseMaskInvocation"] | components["schemas"]["ImageCropInvocation"] | components["schemas"]["AddInvocation"] | components["schemas"]["ColorMapImageProcessorInvocation"] | components["schemas"]["CompelInvocation"] | components["schemas"]["InfillTileInvocation"] | components["schemas"]["ImagePasteInvocation"] | components["schemas"]["ZoeDepthImageProcessorInvocation"] | components["schemas"]["MidasDepthImageProcessorInvocation"] | components["schemas"]["CalculateImageTilesMinimumOverlapInvocation"] | components["schemas"]["ShowImageInvocation"] | components["schemas"]["ImageResizeInvocation"] | components["schemas"]["CanvasPasteBackInvocation"] | components["schemas"]["LoRALoaderInvocation"] | components["schemas"]["MaskFromAlphaInvocation"] | components["schemas"]["ColorInvocation"] | components["schemas"]["MaskCombineInvocation"] | components["schemas"]["StringJoinInvocation"] | components["schemas"]["FaceOffInvocation"] | components["schemas"]["ControlNetInvocation"] | components["schemas"]["TileToPropertiesInvocation"] | components["schemas"]["RandomIntInvocation"] | components["schemas"]["DenoiseLatentsInvocation"] | components["schemas"]["LeresImageProcessorInvocation"] | components["schemas"]["StringCollectionInvocation"] | components["schemas"]["ImageWatermarkInvocation"] | components["schemas"]["CLIPSkipInvocation"] | components["schemas"]["ImageChannelMultiplyInvocation"] | components["schemas"]["CropLatentsCoreInvocation"] | components["schemas"]["SegmentAnythingProcessorInvocation"] | components["schemas"]["IntegerInvocation"] | components["schemas"]["UnsharpMaskInvocation"] | components["schemas"]["SDXLRefinerCompelPromptInvocation"] | components["schemas"]["SDXLModelLoaderInvocation"] | components["schemas"]["IdealSizeInvocation"] | components["schemas"]["FloatMathInvocation"] | components["schemas"]["ContentShuffleImageProcessorInvocation"] | components["schemas"]["ImageHueAdjustmentInvocation"] | components["schemas"]["CannyImageProcessorInvocation"] | components["schemas"]["PairTileImageInvocation"] | components["schemas"]["IntegerMathInvocation"] | components["schemas"]["DepthAnythingImageProcessorInvocation"] | components["schemas"]["PromptsFromFileInvocation"] | components["schemas"]["SDXLRefinerModelLoaderInvocation"] | components["schemas"]["NormalbaeImageProcessorInvocation"] | components["schemas"]["InfillPatchMatchInvocation"];
};
/**
* Edges
@@ -4141,7 +4147,7 @@ export type components = {
* @description The results of node executions
*/
results: {
- [key: string]: components["schemas"]["UNetOutput"] | components["schemas"]["IntegerCollectionOutput"] | components["schemas"]["SDXLModelLoaderOutput"] | components["schemas"]["StringCollectionOutput"] | components["schemas"]["LoRALoaderOutput"] | components["schemas"]["FaceMaskOutput"] | components["schemas"]["FaceOffOutput"] | components["schemas"]["DenoiseMaskOutput"] | components["schemas"]["ImageCollectionOutput"] | components["schemas"]["NoiseOutput"] | components["schemas"]["StringPosNegOutput"] | components["schemas"]["StringOutput"] | components["schemas"]["IntegerOutput"] | components["schemas"]["IterateInvocationOutput"] | components["schemas"]["ImageOutput"] | components["schemas"]["ColorCollectionOutput"] | components["schemas"]["MetadataItemOutput"] | components["schemas"]["LatentsCollectionOutput"] | components["schemas"]["FloatOutput"] | components["schemas"]["TileToPropertiesOutput"] | components["schemas"]["IdealSizeOutput"] | components["schemas"]["T2IAdapterOutput"] | components["schemas"]["GradientMaskOutput"] | components["schemas"]["ConditioningCollectionOutput"] | components["schemas"]["CLIPOutput"] | components["schemas"]["SchedulerOutput"] | components["schemas"]["SeamlessModeOutput"] | components["schemas"]["ControlOutput"] | components["schemas"]["MetadataOutput"] | components["schemas"]["SDXLLoRALoaderOutput"] | components["schemas"]["ConditioningOutput"] | components["schemas"]["CLIPSkipInvocationOutput"] | components["schemas"]["BooleanCollectionOutput"] | components["schemas"]["ModelLoaderOutput"] | components["schemas"]["SDXLRefinerModelLoaderOutput"] | components["schemas"]["CollectInvocationOutput"] | components["schemas"]["BooleanOutput"] | components["schemas"]["String2Output"] | components["schemas"]["VAEOutput"] | components["schemas"]["CalculateImageTilesOutput"] | components["schemas"]["LatentsOutput"] | components["schemas"]["ColorOutput"] | components["schemas"]["FloatCollectionOutput"] | components["schemas"]["IPAdapterOutput"] | components["schemas"]["PairTileImageOutput"];
+ [key: string]: components["schemas"]["SchedulerOutput"] | components["schemas"]["CLIPOutput"] | components["schemas"]["GradientMaskOutput"] | components["schemas"]["CollectInvocationOutput"] | components["schemas"]["DenoiseMaskOutput"] | components["schemas"]["FloatCollectionOutput"] | components["schemas"]["NoiseOutput"] | components["schemas"]["ImageOutput"] | components["schemas"]["StringCollectionOutput"] | components["schemas"]["ModelLoaderOutput"] | components["schemas"]["SeamlessModeOutput"] | components["schemas"]["MetadataOutput"] | components["schemas"]["FaceOffOutput"] | components["schemas"]["ColorOutput"] | components["schemas"]["IntegerCollectionOutput"] | components["schemas"]["ConditioningOutput"] | components["schemas"]["VAEOutput"] | components["schemas"]["MetadataItemOutput"] | components["schemas"]["PairTileImageOutput"] | components["schemas"]["UNetOutput"] | components["schemas"]["StringOutput"] | components["schemas"]["ControlOutput"] | components["schemas"]["LatentsCollectionOutput"] | components["schemas"]["SDXLRefinerModelLoaderOutput"] | components["schemas"]["CalculateImageTilesOutput"] | components["schemas"]["CLIPSkipInvocationOutput"] | components["schemas"]["TileToPropertiesOutput"] | components["schemas"]["SDXLLoRALoaderOutput"] | components["schemas"]["ColorCollectionOutput"] | components["schemas"]["LatentsOutput"] | components["schemas"]["StringPosNegOutput"] | components["schemas"]["BooleanOutput"] | components["schemas"]["FaceMaskOutput"] | components["schemas"]["SDXLModelLoaderOutput"] | components["schemas"]["LoRALoaderOutput"] | components["schemas"]["String2Output"] | components["schemas"]["BooleanCollectionOutput"] | components["schemas"]["IPAdapterOutput"] | components["schemas"]["ConditioningCollectionOutput"] | components["schemas"]["T2IAdapterOutput"] | components["schemas"]["ImageCollectionOutput"] | components["schemas"]["FloatOutput"] | components["schemas"]["IdealSizeOutput"] | components["schemas"]["IntegerOutput"] | components["schemas"]["IterateInvocationOutput"];
};
/**
* Errors
@@ -7013,6 +7019,12 @@ export type components = {
* @default 0.5
*/
min_confidence?: number;
+ /**
+ * Image Resolution
+ * @description Pixel resolution for output image
+ * @default 512
+ */
+ image_resolution?: number;
/**
* type
* @default mediapipe_face_processor
@@ -7262,6 +7274,12 @@ export type components = {
* @default 0.1
*/
bg_th?: number;
+ /**
+ * Image Resolution
+ * @description Pixel resolution for output image
+ * @default 512
+ */
+ image_resolution?: number;
/**
* type
* @default midas_depth_image_processor
@@ -8949,6 +8967,12 @@ export type components = {
use_cache?: boolean;
/** @description The image to process */
image?: components["schemas"]["ImageField"];
+ /**
+ * Image Resolution
+ * @description Pixel resolution for output image
+ * @default 512
+ */
+ image_resolution?: number;
/**
* type
* @default segment_anything_processor
@@ -10741,6 +10765,12 @@ export type components = {
use_cache?: boolean;
/** @description The image to process */
image?: components["schemas"]["ImageField"];
+ /**
+ * Image Resolution
+ * @description Pixel resolution for output image
+ * @default 512
+ */
+ image_resolution?: number;
/**
* type
* @default zoe_depth_image_processor