Merge pull request #57 from w4ffl35/bugfix-for-mega-model

Fixes #16 - mega model running out of memory
This commit is contained in:
Brett Kuprel
2022-07-04 07:16:39 -04:00
committed by GitHub

View File

@@ -142,6 +142,7 @@ class MinDalle:
params = torch.load(self.detoker_params_path)
self.detokenizer.load_state_dict(params)
del params
torch.cuda.empty_cache()
if torch.cuda.is_available(): self.detokenizer = self.detokenizer.cuda()
@@ -175,6 +176,7 @@ class MinDalle:
encoder_state
)
if not self.is_reusable: del self.decoder
torch.cuda.empty_cache()
return image_tokens