diff options
author | AUTOMATIC <16777216c@gmail.com> | 2023-05-10 11:37:18 +0300 |
---|---|---|
committer | AUTOMATIC <16777216c@gmail.com> | 2023-05-10 11:37:18 +0300 |
commit | a5121e7a0623db328a9462d340d389ed6737374a (patch) | |
tree | 2fa6051d457555ef2d61793af2756a44c0ea221c /extensions-builtin | |
parent | 550256db1ce18778a9d56ff343d844c61b9f9b83 (diff) |
fixes for B007
Diffstat (limited to 'extensions-builtin')
-rw-r--r-- | extensions-builtin/LDSR/ldsr_model_arch.py | 2 | ||||
-rw-r--r-- | extensions-builtin/Lora/lora.py | 2 | ||||
-rw-r--r-- | extensions-builtin/ScuNET/scripts/scunet_model.py | 2 | ||||
-rw-r--r-- | extensions-builtin/SwinIR/swinir_model_arch.py | 2 | ||||
-rw-r--r-- | extensions-builtin/SwinIR/swinir_model_arch_v2.py | 2 |
5 files changed, 5 insertions, 5 deletions
diff --git a/extensions-builtin/LDSR/ldsr_model_arch.py b/extensions-builtin/LDSR/ldsr_model_arch.py index a5fb8907..27e38549 100644 --- a/extensions-builtin/LDSR/ldsr_model_arch.py +++ b/extensions-builtin/LDSR/ldsr_model_arch.py @@ -88,7 +88,7 @@ class LDSR: x_t = None logs = None - for n in range(n_runs): + for _ in range(n_runs): if custom_shape is not None: x_t = torch.randn(1, custom_shape[1], custom_shape[2], custom_shape[3]).to(model.device) x_t = repeat(x_t, '1 c h w -> b c h w', b=custom_shape[0]) diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py index 9795540f..7b56136f 100644 --- a/extensions-builtin/Lora/lora.py +++ b/extensions-builtin/Lora/lora.py @@ -418,7 +418,7 @@ def infotext_pasted(infotext, params): added = []
- for k, v in params.items():
+ for k in params:
if not k.startswith("AddNet Model "):
continue
diff --git a/extensions-builtin/ScuNET/scripts/scunet_model.py b/extensions-builtin/ScuNET/scripts/scunet_model.py index aa2fdb3a..1f5ea0d3 100644 --- a/extensions-builtin/ScuNET/scripts/scunet_model.py +++ b/extensions-builtin/ScuNET/scripts/scunet_model.py @@ -132,7 +132,7 @@ class UpscalerScuNET(modules.upscaler.Upscaler): model = net(in_nc=3, config=[4, 4, 4, 4, 4, 4, 4], dim=64) model.load_state_dict(torch.load(filename), strict=True) model.eval() - for k, v in model.named_parameters(): + for _, v in model.named_parameters(): v.requires_grad = False model = model.to(device) diff --git a/extensions-builtin/SwinIR/swinir_model_arch.py b/extensions-builtin/SwinIR/swinir_model_arch.py index 75f7bedc..de195d9b 100644 --- a/extensions-builtin/SwinIR/swinir_model_arch.py +++ b/extensions-builtin/SwinIR/swinir_model_arch.py @@ -848,7 +848,7 @@ class SwinIR(nn.Module): H, W = self.patches_resolution flops += H * W * 3 * self.embed_dim * 9 flops += self.patch_embed.flops() - for i, layer in enumerate(self.layers): + for layer in self.layers: flops += layer.flops() flops += H * W * 3 * self.embed_dim * self.embed_dim flops += self.upsample.flops() diff --git a/extensions-builtin/SwinIR/swinir_model_arch_v2.py b/extensions-builtin/SwinIR/swinir_model_arch_v2.py index d4c0b0da..15777af9 100644 --- a/extensions-builtin/SwinIR/swinir_model_arch_v2.py +++ b/extensions-builtin/SwinIR/swinir_model_arch_v2.py @@ -1001,7 +1001,7 @@ class Swin2SR(nn.Module): H, W = self.patches_resolution
flops += H * W * 3 * self.embed_dim * 9
flops += self.patch_embed.flops()
- for i, layer in enumerate(self.layers):
+ for layer in self.layers:
flops += layer.flops()
flops += H * W * 3 * self.embed_dim * self.embed_dim
flops += self.upsample.flops()
|