- Notifications
You must be signed in to change notification settings - Fork 5.9k
/
Copy pathtest_sana_transformer.py
61 lines (48 loc) · 1.86 KB
/
test_sana_transformer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
importgc
importunittest
importtorch
fromdiffusersimport (
SanaTransformer2DModel,
)
fromdiffusers.utils.testing_utilsimport (
backend_empty_cache,
enable_full_determinism,
require_torch_accelerator,
torch_device,
)
enable_full_determinism()
@require_torch_accelerator
classSanaTransformer2DModelSingleFileTests(unittest.TestCase):
model_class=SanaTransformer2DModel
ckpt_path= (
"https://huggingface.co/Efficient-Large-Model/Sana_1600M_1024px/blob/main/checkpoints/Sana_1600M_1024px.pth"
)
alternate_keys_ckpt_paths= [
"https://huggingface.co/Efficient-Large-Model/Sana_1600M_1024px/blob/main/checkpoints/Sana_1600M_1024px.pth"
]
repo_id="Efficient-Large-Model/Sana_1600M_1024px_diffusers"
defsetUp(self):
super().setUp()
gc.collect()
backend_empty_cache(torch_device)
deftearDown(self):
super().tearDown()
gc.collect()
backend_empty_cache(torch_device)
deftest_single_file_components(self):
model=self.model_class.from_pretrained(self.repo_id, subfolder="transformer")
model_single_file=self.model_class.from_single_file(self.ckpt_path)
PARAMS_TO_IGNORE= ["torch_dtype", "_name_or_path", "_use_default_values", "_diffusers_version"]
forparam_name, param_valueinmodel_single_file.config.items():
ifparam_nameinPARAMS_TO_IGNORE:
continue
assertmodel.config[param_name] ==param_value, (
f"{param_name} differs between single file loading and pretrained loading"
)
deftest_checkpoint_loading(self):
forckpt_pathinself.alternate_keys_ckpt_paths:
torch.cuda.empty_cache()
model=self.model_class.from_single_file(ckpt_path)
delmodel
gc.collect()
torch.cuda.empty_cache()