Skip to content

Commit

Permalink
Merge pull request #90 from chrisgoringe/masks
Browse files Browse the repository at this point in the history
Mask support #86
  • Loading branch information
chrisgoringe committed Mar 26, 2024
2 parents 00f2b74 + 67dfeac commit 4ffbbbe
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 15 deletions.
5 changes: 5 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,9 @@ This uses only regular nodes and the Unified Chooser. If you're going to try the

## Recent changes

2.11 (26 March 2024)
- added masks

2.10 (12 March 2024)
- added `Repeat last selection`

Expand Down Expand Up @@ -61,6 +64,8 @@ Once the run finishes you can `Progress... (as restart)` to send one or more ima

You should always have the image input connected; if you have the latent input connected, the same latents are output as images (in the same order - which is actually the order in which you selected them). I *very strongly* suggest that you always pass the latents through the `Preview Chooser` - the order of node execution, and the ability to restart, are much more reliable if you do.

If you wish, you can also pass any masks used through in a similar way. You can also just pass a single mask into the chooser; it will be turned into a batch of masks the same length as the output.

As you can see from the workflow, it is possible now to have multiple choosers, and to mix and match image and latent choosers.

You can control the position of the HUD (in the top left) using the main settings menu.
Expand Down
37 changes: 22 additions & 15 deletions image_chooser_preview.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@
import random

class PreviewAndChoose(PreviewImage):
RETURN_TYPES = ("IMAGE","LATENT",)
RETURN_NAMES = ("images","latent",)
RETURN_TYPES = ("IMAGE","LATENT","MASK")
RETURN_NAMES = ("images","latents","masks")
FUNCTION = "func"
CATEGORY = "image_chooser"
INPUT_IS_LIST=True
Expand All @@ -21,7 +21,7 @@ def INPUT_TYPES(s):
"mode" : (["Always pause", "Repeat last selection", "Only pause if batch", "Progress first pick", "Pass through", "Take First n", "Take Last n"],{}),
"count": ("INT", { "default": 1, "min": 1, "max": 999, "step": 1 }),
},
"optional": {"images": ("IMAGE", ), "latents": ("LATENT", ), },
"optional": {"images": ("IMAGE", ), "latents": ("LATENT", ), "masks": ("MASK", ) },
"hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "id":"UNIQUE_ID"},
}

Expand Down Expand Up @@ -50,16 +50,19 @@ def func(self, id, **kwargs):
if 'images' in kwargs:
my_stash['images'] = kwargs['images']
my_stash['latents'] = kwargs.get('latents', None)
my_stash['masks'] = kwargs.get('masks', None)
else:
kwargs['images'] = my_stash.get('images', None)
kwargs['latents'] = my_stash.get('latents', None)
kwargs['masks'] = my_stash.get('masks', None)

if (kwargs['images'] is None):
return (None, None,)
return (None, None, None,)

# convert list to batch
images_in = torch.cat(kwargs.pop('images'))
latents_in = kwargs.pop('latents', None)
masks_in = torch.cat(kwargs.pop('masks')) if kwargs.get('masks', None) is not None else None
latent_samples_in = torch.cat(list(l['samples'] for l in latents_in)) if latents_in is not None else None
self.batch = images_in.shape[0]

Expand All @@ -81,30 +84,34 @@ def func(self, id, **kwargs):
raise InterruptProcessingException()
#return (None, None,)

return self.batch_up_selections(images_in, latent_samples_in, selections, mode)
return self.batch_up_selections(images_in=images_in, latent_samples_in=latent_samples_in, masks_in=masks_in, selections=selections, mode=mode)

def tensor_bundle(self, tensor_in:torch.Tensor, picks):
return torch.cat(tuple([tensor_in[(x)%self.batch].unsqueeze_(0) for x in picks])).reshape([-1]+list(tensor_in.shape[1:]))
if tensor_in is not None and len(picks):
batch = tensor_in.shape[0]
return torch.cat(tuple([tensor_in[(x)%batch].unsqueeze_(0) for x in picks])).reshape([-1]+list(tensor_in.shape[1:]))
else:
return None

def latent_bundle(self, latent_samples_in:torch.Tensor, picks):
if (latent_samples_in is not None and len(picks)):
return { "samples" : self.tensor_bundle(latent_samples_in, picks) }
else:
return None

def batch_up_selections(self, images_in:torch.Tensor, latent_samples_in:torch.Tensor, selections, mode):
def batch_up_selections(self, images_in:torch.Tensor, latent_samples_in:torch.Tensor, masks_in:torch.Tensor, selections, mode):
if (mode=="Pass through"):
return (self.tensor_bundle(images_in, range(0, self.batch)), self.latent_bundle(latent_samples_in, range(0, self.batch)),)
chosen = range(0, self.batch)
elif (mode=="Take First n"):
end = self.count if self.batch >= self.count else self.batch
return (self.tensor_bundle(images_in, range(0, end)), self.latent_bundle(latent_samples_in, range(0, end)),)
chosen = range(0, end)
elif (mode=="Take Last n"):
start = self.batch - self.count if self.batch - self.count >= 0 else 0
return (self.tensor_bundle(images_in, range(start, self.batch)), self.latent_bundle(latent_samples_in, range(start, self.batch)),)
good = [x for x in selections if x>=0]
images_out = self.tensor_bundle(images_in, good)
latents_out = self.latent_bundle(latent_samples_in, good)
return (images_out, latents_out,)
chosen = range(start, self.batch)
else:
chosen = [x for x in selections if x>=0]

return (self.tensor_bundle(images_in, chosen), self.latent_bundle(latent_samples_in, chosen), self.tensor_bundle(masks_in, chosen))

class PreviewAndChooseDouble(PreviewAndChoose):
RETURN_TYPES = ("LATENT","LATENT",)
Expand All @@ -119,7 +126,7 @@ def INPUT_TYPES(s):
"hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "id":"UNIQUE_ID"},
}

def batch_up_selections(self, images_in, latent_samples_in, selections:list, mode):
def batch_up_selections(self, images_in, latent_samples_in, masks_in, selections:list, mode):
divider = selections.index(-1)
latents_out_good = self.latent_bundle(latent_samples_in, selections[:divider])
latents_out_bad = self.latent_bundle(latent_samples_in, selections[divider+1:])
Expand Down

0 comments on commit 4ffbbbe

Please sign in to comment.