Skip to content

Commit

Permalink
do code formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
thanhnguyentung95 authored Nov 16, 2024
1 parent d5fd7a8 commit 33ca485
Showing 1 changed file with 9 additions and 3 deletions.
12 changes: 9 additions & 3 deletions aria/lora/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,9 @@ def forward(self, x: torch.Tensor, *args: Any, **kwargs: Any) -> torch.Tensor:

return result

def merge(self, safe_merge: bool = False, adapter_names: Optional[list[str]] = None) -> None:
def merge(
self, safe_merge: bool = False, adapter_names: Optional[list[str]] = None
) -> None:
"""
Merge the active adapter weights into the base weights
Expand All @@ -173,13 +175,17 @@ def merge(self, safe_merge: bool = False, adapter_names: Optional[list[str]] = N
if active_adapter in self.lora_A.keys():
base_layer = self.get_base_layer()
if safe_merge:
raise NotImplementedError("Safe merge is not supported for GroupedGemmLoraLayer, try not using it instead.")
raise NotImplementedError(
"Safe merge is not supported for GroupedGemmLoraLayer, try not using it instead."
)
else:
delta_weight = self.get_delta_weight(active_adapter)
if not self.use_dora[active_adapter]:
base_layer.weight.data += delta_weight
else:
raise NotImplementedError("Dora is not supported for GroupedGemmLoraLayer, try not using it instead.")
raise NotImplementedError(
"Dora is not supported for GroupedGemmLoraLayer, try not using it instead."
)

self.merged_adapters.append(active_adapter)

Expand Down

0 comments on commit 33ca485

Please sign in to comment.