Unverified Commit 67d07487 authored by Sylvain Gugger's avatar Sylvain Gugger Committed by GitHub
Browse files

Cleanup quality (#21493)

* Remove mentions of flake8/isort

* Clean up inits

* Deall with all other inits

* Last special rule for dummy files
parent 571fa585
#!/usr/bin/env python #!/usr/bin/env python
# coding=utf-8 # coding=utf-8
# flake8: noqa
# There's no way to ignore "F401 '...' imported but unused" warnings in this
# module, but to preserve other warnings. So, don't check this module at all.
# Copyright 2021 The HuggingFace Inc. team. All rights reserved. # Copyright 2021 The HuggingFace Inc. team. All rights reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
......
# This file is autogenerated by the command `make fix-copies`, do not edit. # This file is autogenerated by the command `make fix-copies`, do not edit.
# flake8: noqa
from ..utils import DummyObject, requires_backends from ..utils import DummyObject, requires_backends
......
# This file is autogenerated by the command `make fix-copies`, do not edit. # This file is autogenerated by the command `make fix-copies`, do not edit.
# flake8: noqa
from ..utils import DummyObject, requires_backends from ..utils import DummyObject, requires_backends
......
# This file is autogenerated by the command `make fix-copies`, do not edit. # This file is autogenerated by the command `make fix-copies`, do not edit.
# flake8: noqa
from ..utils import DummyObject, requires_backends from ..utils import DummyObject, requires_backends
......
# This file is autogenerated by the command `make fix-copies`, do not edit. # This file is autogenerated by the command `make fix-copies`, do not edit.
# flake8: noqa
from ..utils import DummyObject, requires_backends from ..utils import DummyObject, requires_backends
......
# This file is autogenerated by the command `make fix-copies`, do not edit. # This file is autogenerated by the command `make fix-copies`, do not edit.
# flake8: noqa
from ..utils import DummyObject, requires_backends from ..utils import DummyObject, requires_backends
......
# This file is autogenerated by the command `make fix-copies`, do not edit. # This file is autogenerated by the command `make fix-copies`, do not edit.
# flake8: noqa
from ..utils import DummyObject, requires_backends from ..utils import DummyObject, requires_backends
......
# This file is autogenerated by the command `make fix-copies`, do not edit. # This file is autogenerated by the command `make fix-copies`, do not edit.
# flake8: noqa
from ..utils import DummyObject, requires_backends from ..utils import DummyObject, requires_backends
......
# This file is autogenerated by the command `make fix-copies`, do not edit. # This file is autogenerated by the command `make fix-copies`, do not edit.
# flake8: noqa
from ..utils import DummyObject, requires_backends from ..utils import DummyObject, requires_backends
......
# This file is autogenerated by the command `make fix-copies`, do not edit. # This file is autogenerated by the command `make fix-copies`, do not edit.
# flake8: noqa
from ..utils import DummyObject, requires_backends from ..utils import DummyObject, requires_backends
......
# This file is autogenerated by the command `make fix-copies`, do not edit. # This file is autogenerated by the command `make fix-copies`, do not edit.
# flake8: noqa
from ..utils import DummyObject, requires_backends from ..utils import DummyObject, requires_backends
......
# This file is autogenerated by the command `make fix-copies`, do not edit. # This file is autogenerated by the command `make fix-copies`, do not edit.
# flake8: noqa
from ..utils import DummyObject, requires_backends from ..utils import DummyObject, requires_backends
......
# This file is autogenerated by the command `make fix-copies`, do not edit. # This file is autogenerated by the command `make fix-copies`, do not edit.
# flake8: noqa
from ..utils import DummyObject, requires_backends from ..utils import DummyObject, requires_backends
......
# flake8: noqa
# Generated by the protocol buffer compiler. DO NOT EDIT! # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: sentencepiece_model.proto # source: sentencepiece_model.proto
......
...@@ -43,7 +43,7 @@ open-source contribution to Transformers. Along the way, you will: ...@@ -43,7 +43,7 @@ open-source contribution to Transformers. Along the way, you will:
- understand the design principles of one of the most popular NLP - understand the design principles of one of the most popular NLP
libraries libraries
- learn how to do efficiently test large NLP models - learn how to do efficiently test large NLP models
- learn how to integrate Python utilities like `black`, `isort`, - learn how to integrate Python utilities like `black`, `ruff`,
`make fix-copies` into a library to always ensure clean and readable `make fix-copies` into a library to always ensure clean and readable
code code
......
# flake8: noqa
# There's no way to ignore "F401 '...' imported but unused" warnings in this
# module, but to preserve other warnings. So, don't check this module at all.
# Copyright 2020 The HuggingFace Team. All rights reserved. # Copyright 2020 The HuggingFace Team. All rights reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
...@@ -17,7 +13,6 @@ ...@@ -17,7 +13,6 @@
# limitations under the License. # limitations under the License.
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
# rely on isort to merge the imports
from ...utils import _LazyModule, OptionalDependencyNotAvailable, is_tokenizers_available from ...utils import _LazyModule, OptionalDependencyNotAvailable, is_tokenizers_available
......
...@@ -25,7 +25,7 @@ open-source contribution to Transformers. Along the way, you will: ...@@ -25,7 +25,7 @@ open-source contribution to Transformers. Along the way, you will:
- understand the design principles of one of the most popular NLP - understand the design principles of one of the most popular NLP
libraries libraries
- learn how to do efficiently test large NLP models - learn how to do efficiently test large NLP models
- learn how to integrate Python utilities like `black`, `isort`, - learn how to integrate Python utilities like `black`, `ruff`,
`make fix-copies` into a library to always ensure clean and readable `make fix-copies` into a library to always ensure clean and readable
code code
......
...@@ -106,7 +106,6 @@ class FakeClass(metaclass=DummyObject): ...@@ -106,7 +106,6 @@ class FakeClass(metaclass=DummyObject):
def test_create_dummy_files(self): def test_create_dummy_files(self):
expected_dummy_pytorch_file = """# This file is autogenerated by the command `make fix-copies`, do not edit. expected_dummy_pytorch_file = """# This file is autogenerated by the command `make fix-copies`, do not edit.
# flake8: noqa
from ..utils import DummyObject, requires_backends from ..utils import DummyObject, requires_backends
......
...@@ -2469,7 +2469,7 @@ class TokenizerTesterMixin: ...@@ -2469,7 +2469,7 @@ class TokenizerTesterMixin:
batch_encoded_sequence = tokenizer.batch_encode_plus([sequence, sequence], return_tensors="np") batch_encoded_sequence = tokenizer.batch_encode_plus([sequence, sequence], return_tensors="np")
# TODO: add forward through JAX/Flax when PR is merged # TODO: add forward through JAX/Flax when PR is merged
# This is currently here to make flake8 happy ! # This is currently here to make ruff happy !
if encoded_sequence is None: if encoded_sequence is None:
raise ValueError("Cannot convert list to numpy tensor on encode_plus()") raise ValueError("Cannot convert list to numpy tensor on encode_plus()")
...@@ -2484,7 +2484,7 @@ class TokenizerTesterMixin: ...@@ -2484,7 +2484,7 @@ class TokenizerTesterMixin:
) )
# TODO: add forward through JAX/Flax when PR is merged # TODO: add forward through JAX/Flax when PR is merged
# This is currently here to make flake8 happy ! # This is currently here to make ruff happy !
if encoded_sequence_fast is None: if encoded_sequence_fast is None:
raise ValueError("Cannot convert list to numpy tensor on encode_plus() (fast)") raise ValueError("Cannot convert list to numpy tensor on encode_plus() (fast)")
......
...@@ -115,7 +115,6 @@ def create_dummy_files(backend_specific_objects=None): ...@@ -115,7 +115,6 @@ def create_dummy_files(backend_specific_objects=None):
for backend, objects in backend_specific_objects.items(): for backend, objects in backend_specific_objects.items():
backend_name = "[" + ", ".join(f'"{b}"' for b in backend.split("_and_")) + "]" backend_name = "[" + ", ".join(f'"{b}"' for b in backend.split("_and_")) + "]"
dummy_file = "# This file is autogenerated by the command `make fix-copies`, do not edit.\n" dummy_file = "# This file is autogenerated by the command `make fix-copies`, do not edit.\n"
dummy_file += "# flake8: noqa\n"
dummy_file += "from ..utils import DummyObject, requires_backends\n\n" dummy_file += "from ..utils import DummyObject, requires_backends\n\n"
dummy_file += "\n".join([create_dummy_object(o, backend_name) for o in objects]) dummy_file += "\n".join([create_dummy_object(o, backend_name) for o in objects])
dummy_files[backend] = dummy_file dummy_files[backend] = dummy_file
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment