diff options
author | AUTOMATIC1111 <16777216c@gmail.com> | 2022-12-03 09:58:08 +0300 |
---|---|---|
committer | GitHub <noreply@github.com> | 2022-12-03 09:58:08 +0300 |
commit | a2feaa95fc0c4c94131eb75b5b1bc0eaa1696551 (patch) | |
tree | a7be0e8b6849aae24f7f6c7879f7ddf43d118425 /modules/hypernetworks/hypernetwork.py | |
parent | c7af672186ec09a514f0e78aa21155264e56c130 (diff) | |
parent | 0fddb4a1c06a6e2122add7eee3b001a6d473baee (diff) |
Merge pull request #5194 from brkirch/autocast-and-mps-randn-fixes
Use devices.autocast() and fix MPS randn issues
Diffstat (limited to 'modules/hypernetworks/hypernetwork.py')
-rw-r--r-- | modules/hypernetworks/hypernetwork.py | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/modules/hypernetworks/hypernetwork.py b/modules/hypernetworks/hypernetwork.py index 8466887f..eb5ae372 100644 --- a/modules/hypernetworks/hypernetwork.py +++ b/modules/hypernetworks/hypernetwork.py @@ -495,7 +495,7 @@ def train_hypernetwork(hypernetwork_name, learn_rate, batch_size, gradient_step, if shared.state.interrupted:
break
- with torch.autocast("cuda"):
+ with devices.autocast():
x = batch.latent_sample.to(devices.device, non_blocking=pin_memory)
if tag_drop_out != 0 or shuffle_tags:
shared.sd_model.cond_stage_model.to(devices.device)
|