- Notifications
You must be signed in to change notification settings - Fork 28.8k
/
Copy pathcheck_config_docstrings.py
102 lines (79 loc) · 3.67 KB
/
check_config_docstrings.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
# coding=utf-8
# Copyright 2022 The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
importinspect
importre
fromtransformers.utilsimportdirect_transformers_import
# All paths are set with the intent you should run this script from the root of the repo with the command
# python utils/check_config_docstrings.py
PATH_TO_TRANSFORMERS="src/transformers"
# This is to make sure the transformers module imported is the one in the repo.
transformers=direct_transformers_import(PATH_TO_TRANSFORMERS)
CONFIG_MAPPING=transformers.models.auto.configuration_auto.CONFIG_MAPPING
# Regex pattern used to find the checkpoint mentioned in the docstring of `config_class`.
# For example, `[google-bert/bert-base-uncased](https://huggingface.co/google-bert/bert-base-uncased)`
_re_checkpoint=re.compile(r"\[(.+?)\]\((https://huggingface\.co/.+?)\)")
CONFIG_CLASSES_TO_IGNORE_FOR_DOCSTRING_CHECKPOINT_CHECK= {
"DecisionTransformerConfig",
"EncoderDecoderConfig",
"MusicgenConfig",
"RagConfig",
"SpeechEncoderDecoderConfig",
"TimmBackboneConfig",
"TimmWrapperConfig",
"VisionEncoderDecoderConfig",
"VisionTextDualEncoderConfig",
"LlamaConfig",
"GraniteConfig",
"GraniteMoeConfig",
"Qwen3MoeConfig",
"GraniteSpeechConfig",
}
defget_checkpoint_from_config_class(config_class):
checkpoint=None
# source code of `config_class`
config_source=inspect.getsource(config_class)
checkpoints=_re_checkpoint.findall(config_source)
# Each `checkpoint` is a tuple of a checkpoint name and a checkpoint link.
# For example, `('google-bert/bert-base-uncased', 'https://huggingface.co/google-bert/bert-base-uncased')`
forckpt_name, ckpt_linkincheckpoints:
# allow the link to end with `/`
ifckpt_link.endswith("/"):
ckpt_link=ckpt_link[:-1]
# verify the checkpoint name corresponds to the checkpoint link
ckpt_link_from_name=f"https://huggingface.co/{ckpt_name}"
ifckpt_link==ckpt_link_from_name:
checkpoint=ckpt_name
break
returncheckpoint
defcheck_config_docstrings_have_checkpoints():
configs_without_checkpoint= []
forconfig_classinlist(CONFIG_MAPPING.values()):
# Skip deprecated models
if"models.deprecated"inconfig_class.__module__:
continue
checkpoint=get_checkpoint_from_config_class(config_class)
name=config_class.__name__
ifcheckpointisNoneandnamenotinCONFIG_CLASSES_TO_IGNORE_FOR_DOCSTRING_CHECKPOINT_CHECK:
configs_without_checkpoint.append(name)
iflen(configs_without_checkpoint) >0:
message="\n".join(sorted(configs_without_checkpoint))
raiseValueError(
f"The following configurations don't contain any valid checkpoint:\n{message}\n\n"
"The requirement is to include a link pointing to one of the models of this architecture in the "
"docstring of the config classes listed above. The link should have be a markdown format like "
"[myorg/mymodel](https://huggingface.co/myorg/mymodel)."
)
if__name__=="__main__":
check_config_docstrings_have_checkpoints()