Merge pull request #57 from w4ffl35/bugfix-for-mega-model
Fixes #16 - mega model running out of memory
This commit is contained in:
commit
9602055eaa
|
@ -142,6 +142,7 @@ class MinDalle:
|
|||
params = torch.load(self.detoker_params_path)
|
||||
self.detokenizer.load_state_dict(params)
|
||||
del params
|
||||
torch.cuda.empty_cache()
|
||||
if torch.cuda.is_available(): self.detokenizer = self.detokenizer.cuda()
|
||||
|
||||
|
||||
|
@ -175,6 +176,7 @@ class MinDalle:
|
|||
encoder_state
|
||||
)
|
||||
if not self.is_reusable: del self.decoder
|
||||
torch.cuda.empty_cache()
|
||||
return image_tokens
|
||||
|
||||
|
||||
|
|
Loading…
Reference in New Issue
Block a user