From b748b48dbbaf6ddd011f032f5ede47aea094a208 Mon Sep 17 00:00:00 2001 From: Kohya S Date: Wed, 3 Apr 2024 12:43:08 +0900 Subject: [PATCH] fix attention couple+deep shink cause error in some reso --- networks/lora.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/networks/lora.py b/networks/lora.py index 948b30b0e..d1208040f 100644 --- a/networks/lora.py +++ b/networks/lora.py @@ -247,14 +247,13 @@ def get_mask_for_x(self, x): area = x.size()[1] mask = self.network.mask_dic.get(area, None) - if mask is None: - # raise ValueError(f"mask is None for resolution {area}") + if mask is None or len(x.size()) == 2: # emb_layers in SDXL doesn't have mask # if "emb" not in self.lora_name: # print(f"mask is None for resolution {self.lora_name}, {area}, {x.size()}") mask_size = (1, x.size()[1]) if len(x.size()) == 2 else (1, *x.size()[1:-1], 1) return torch.ones(mask_size, dtype=x.dtype, device=x.device) / self.network.num_sub_prompts - if len(x.size()) != 4: + if len(x.size()) == 3: mask = torch.reshape(mask, (1, -1, 1)) return mask