"...git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "1121ce9f98f718d67c3976dd30bb44933730ad56"
Unverified Commit 8c40ba73 authored by Yih-Dar's avatar Yih-Dar Committed by GitHub
Browse files

Avoid failure in `check_repo.py` due to missing backends (#21930)



* Update utils/check_repo.py
Co-authored-by: default avatarSylvain Gugger <35901082+sgugger@users.noreply.github.com>

* Update utils/check_repo.py
Co-authored-by: default avatarSylvain Gugger <35901082+sgugger@users.noreply.github.com>

---------
Co-authored-by: default avatarydshieh <ydshieh@users.noreply.github.com>
Co-authored-by: default avatarSylvain Gugger <35901082+sgugger@users.noreply.github.com>
parent d4306dae
...@@ -323,6 +323,30 @@ MODEL_TYPE_TO_DOC_MAPPING = OrderedDict( ...@@ -323,6 +323,30 @@ MODEL_TYPE_TO_DOC_MAPPING = OrderedDict(
transformers = direct_transformers_import(PATH_TO_TRANSFORMERS) transformers = direct_transformers_import(PATH_TO_TRANSFORMERS)
def check_missing_backends():
missing_backends = []
if not is_torch_available():
missing_backends.append("PyTorch")
if not is_tf_available():
missing_backends.append("TensorFlow")
if not is_flax_available():
missing_backends.append("Flax")
if len(missing_backends) > 0:
missing = ", ".join(missing_backends)
if os.getenv("TRANSFORMERS_IS_CI", "").upper() in ENV_VARS_TRUE_VALUES:
raise Exception(
"Full repo consistency checks require all backends to be installed (with `pip install -e .[dev]` in the "
f"Transformers repo, the following are missing: {missing}."
)
else:
warnings.warn(
"Full repo consistency checks require all backends to be installed (with `pip install -e .[dev]` in the "
f"Transformers repo, the following are missing: {missing}. While it's probably fine as long as you "
"didn't make any change in one of those backends modeling files, you should probably execute the "
"command above to be on the safe side."
)
def check_model_list(): def check_model_list():
"""Check the model list inside the transformers library.""" """Check the model list inside the transformers library."""
# Get the models from the directory structure of `src/transformers/models/` # Get the models from the directory structure of `src/transformers/models/`
...@@ -578,27 +602,7 @@ def check_models_are_auto_configured(module, all_auto_models): ...@@ -578,27 +602,7 @@ def check_models_are_auto_configured(module, all_auto_models):
def check_all_models_are_auto_configured(): def check_all_models_are_auto_configured():
"""Check all models are each in an auto class.""" """Check all models are each in an auto class."""
missing_backends = [] check_missing_backends()
if not is_torch_available():
missing_backends.append("PyTorch")
if not is_tf_available():
missing_backends.append("TensorFlow")
if not is_flax_available():
missing_backends.append("Flax")
if len(missing_backends) > 0:
missing = ", ".join(missing_backends)
if os.getenv("TRANSFORMERS_IS_CI", "").upper() in ENV_VARS_TRUE_VALUES:
raise Exception(
"Full quality checks require all backends to be installed (with `pip install -e .[dev]` in the "
f"Transformers repo, the following are missing: {missing}."
)
else:
warnings.warn(
"Full quality checks require all backends to be installed (with `pip install -e .[dev]` in the "
f"Transformers repo, the following are missing: {missing}. While it's probably fine as long as you "
"didn't make any change in one of those backends modeling files, you should probably execute the "
"command above to be on the safe side."
)
modules = get_model_modules() modules = get_model_modules()
all_auto_models = get_all_auto_configured_models() all_auto_models = get_all_auto_configured_models()
failures = [] failures = []
...@@ -612,8 +616,9 @@ def check_all_models_are_auto_configured(): ...@@ -612,8 +616,9 @@ def check_all_models_are_auto_configured():
def check_all_auto_object_names_being_defined(): def check_all_auto_object_names_being_defined():
"""Check all names defined in auto (name) mappings exist in the library.""" """Check all names defined in auto (name) mappings exist in the library."""
failures = [] check_missing_backends()
failures = []
mappings_to_check = { mappings_to_check = {
"TOKENIZER_MAPPING_NAMES": TOKENIZER_MAPPING_NAMES, "TOKENIZER_MAPPING_NAMES": TOKENIZER_MAPPING_NAMES,
"IMAGE_PROCESSOR_MAPPING_NAMES": IMAGE_PROCESSOR_MAPPING_NAMES, "IMAGE_PROCESSOR_MAPPING_NAMES": IMAGE_PROCESSOR_MAPPING_NAMES,
...@@ -623,7 +628,9 @@ def check_all_auto_object_names_being_defined(): ...@@ -623,7 +628,9 @@ def check_all_auto_object_names_being_defined():
# Each auto modeling files contains multiple mappings. Let's get them in a dynamic way. # Each auto modeling files contains multiple mappings. Let's get them in a dynamic way.
for module_name in ["modeling_auto", "modeling_tf_auto", "modeling_flax_auto"]: for module_name in ["modeling_auto", "modeling_tf_auto", "modeling_flax_auto"]:
module = getattr(transformers.models.auto, module_name) module = getattr(transformers.models.auto, module_name, None)
if module is None:
continue
# all mappings in a single auto modeling file # all mappings in a single auto modeling file
mapping_names = [x for x in dir(module) if x.endswith("_MAPPING_NAMES")] mapping_names = [x for x in dir(module) if x.endswith("_MAPPING_NAMES")]
mappings_to_check.update({name: getattr(module, name) for name in mapping_names}) mappings_to_check.update({name: getattr(module, name) for name in mapping_names})
...@@ -650,8 +657,9 @@ def check_all_auto_object_names_being_defined(): ...@@ -650,8 +657,9 @@ def check_all_auto_object_names_being_defined():
def check_all_auto_mapping_names_in_config_mapping_names(): def check_all_auto_mapping_names_in_config_mapping_names():
"""Check all keys defined in auto mappings (mappings of names) appear in `CONFIG_MAPPING_NAMES`.""" """Check all keys defined in auto mappings (mappings of names) appear in `CONFIG_MAPPING_NAMES`."""
failures = [] check_missing_backends()
failures = []
# `TOKENIZER_PROCESSOR_MAPPING_NAMES` and `AutoTokenizer` is special, and don't need to follow the rule. # `TOKENIZER_PROCESSOR_MAPPING_NAMES` and `AutoTokenizer` is special, and don't need to follow the rule.
mappings_to_check = { mappings_to_check = {
"IMAGE_PROCESSOR_MAPPING_NAMES": IMAGE_PROCESSOR_MAPPING_NAMES, "IMAGE_PROCESSOR_MAPPING_NAMES": IMAGE_PROCESSOR_MAPPING_NAMES,
...@@ -661,7 +669,9 @@ def check_all_auto_mapping_names_in_config_mapping_names(): ...@@ -661,7 +669,9 @@ def check_all_auto_mapping_names_in_config_mapping_names():
# Each auto modeling files contains multiple mappings. Let's get them in a dynamic way. # Each auto modeling files contains multiple mappings. Let's get them in a dynamic way.
for module_name in ["modeling_auto", "modeling_tf_auto", "modeling_flax_auto"]: for module_name in ["modeling_auto", "modeling_tf_auto", "modeling_flax_auto"]:
module = getattr(transformers.models.auto, module_name) module = getattr(transformers.models.auto, module_name, None)
if module is None:
continue
# all mappings in a single auto modeling file # all mappings in a single auto modeling file
mapping_names = [x for x in dir(module) if x.endswith("_MAPPING_NAMES")] mapping_names = [x for x in dir(module) if x.endswith("_MAPPING_NAMES")]
mappings_to_check.update({name: getattr(module, name) for name in mapping_names}) mappings_to_check.update({name: getattr(module, name) for name in mapping_names})
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment