Unverified Commit b97cab7e authored by Arthur's avatar Arthur Committed by GitHub
Browse files

Remove-auth-token (#27060)

* don't use `use_auth_token`internally

* let's use token everywhere

* fixup
parent 8f577dca
......@@ -314,14 +314,14 @@ def main():
finetuning_task=data_args.task_name,
cache_dir=model_args.cache_dir,
revision=model_args.model_revision,
use_auth_token=True if model_args.use_auth_token else None,
token=True if model_args.use_auth_token else None,
)
tokenizer = AutoTokenizer.from_pretrained(
model_args.tokenizer_name if model_args.tokenizer_name else model_args.model_name_or_path,
cache_dir=model_args.cache_dir,
use_fast=model_args.use_fast_tokenizer,
revision=model_args.model_revision,
use_auth_token=True if model_args.use_auth_token else None,
token=True if model_args.use_auth_token else None,
)
model = AutoModelForSequenceClassification.from_pretrained(
model_args.model_name_or_path,
......@@ -329,7 +329,7 @@ def main():
config=config,
cache_dir=model_args.cache_dir,
revision=model_args.model_revision,
use_auth_token=True if model_args.use_auth_token else None,
token=True if model_args.use_auth_token else None,
)
# Preprocessing the datasets
......
......@@ -142,7 +142,7 @@ class ConfigPushToHubTester(unittest.TestCase):
config = BertConfig(
vocab_size=99, hidden_size=32, num_hidden_layers=5, num_attention_heads=4, intermediate_size=37
)
config.push_to_hub("valid_org/test-config-org", use_auth_token=self._token)
config.push_to_hub("valid_org/test-config-org", token=self._token)
new_config = BertConfig.from_pretrained("valid_org/test-config-org")
for k, v in config.to_dict().items():
......@@ -154,9 +154,7 @@ class ConfigPushToHubTester(unittest.TestCase):
# Push to hub via save_pretrained
with tempfile.TemporaryDirectory() as tmp_dir:
config.save_pretrained(
tmp_dir, repo_id="valid_org/test-config-org", push_to_hub=True, use_auth_token=self._token
)
config.save_pretrained(tmp_dir, repo_id="valid_org/test-config-org", push_to_hub=True, token=self._token)
new_config = BertConfig.from_pretrained("valid_org/test-config-org")
for k, v in config.to_dict().items():
......@@ -167,7 +165,7 @@ class ConfigPushToHubTester(unittest.TestCase):
CustomConfig.register_for_auto_class()
config = CustomConfig(attribute=42)
config.push_to_hub("test-dynamic-config", use_auth_token=self._token)
config.push_to_hub("test-dynamic-config", token=self._token)
# This has added the proper auto_map field to the config
self.assertDictEqual(config.auto_map, {"AutoConfig": "custom_configuration.CustomConfig"})
......
......@@ -85,7 +85,7 @@ class FeatureExtractorPushToHubTester(unittest.TestCase):
def test_push_to_hub(self):
feature_extractor = Wav2Vec2FeatureExtractor.from_pretrained(SAMPLE_FEATURE_EXTRACTION_CONFIG_DIR)
feature_extractor.push_to_hub("test-feature-extractor", use_auth_token=self._token)
feature_extractor.push_to_hub("test-feature-extractor", token=self._token)
new_feature_extractor = Wav2Vec2FeatureExtractor.from_pretrained(f"{USER}/test-feature-extractor")
for k, v in feature_extractor.__dict__.items():
......@@ -97,7 +97,7 @@ class FeatureExtractorPushToHubTester(unittest.TestCase):
# Push to hub via save_pretrained
with tempfile.TemporaryDirectory() as tmp_dir:
feature_extractor.save_pretrained(
tmp_dir, repo_id="test-feature-extractor", push_to_hub=True, use_auth_token=self._token
tmp_dir, repo_id="test-feature-extractor", push_to_hub=True, token=self._token
)
new_feature_extractor = Wav2Vec2FeatureExtractor.from_pretrained(f"{USER}/test-feature-extractor")
......@@ -106,7 +106,7 @@ class FeatureExtractorPushToHubTester(unittest.TestCase):
def test_push_to_hub_in_organization(self):
feature_extractor = Wav2Vec2FeatureExtractor.from_pretrained(SAMPLE_FEATURE_EXTRACTION_CONFIG_DIR)
feature_extractor.push_to_hub("valid_org/test-feature-extractor", use_auth_token=self._token)
feature_extractor.push_to_hub("valid_org/test-feature-extractor", token=self._token)
new_feature_extractor = Wav2Vec2FeatureExtractor.from_pretrained("valid_org/test-feature-extractor")
for k, v in feature_extractor.__dict__.items():
......@@ -118,7 +118,7 @@ class FeatureExtractorPushToHubTester(unittest.TestCase):
# Push to hub via save_pretrained
with tempfile.TemporaryDirectory() as tmp_dir:
feature_extractor.save_pretrained(
tmp_dir, repo_id="valid_org/test-feature-extractor-org", push_to_hub=True, use_auth_token=self._token
tmp_dir, repo_id="valid_org/test-feature-extractor-org", push_to_hub=True, token=self._token
)
new_feature_extractor = Wav2Vec2FeatureExtractor.from_pretrained("valid_org/test-feature-extractor-org")
......@@ -129,7 +129,7 @@ class FeatureExtractorPushToHubTester(unittest.TestCase):
CustomFeatureExtractor.register_for_auto_class()
feature_extractor = CustomFeatureExtractor.from_pretrained(SAMPLE_FEATURE_EXTRACTION_CONFIG_DIR)
feature_extractor.push_to_hub("test-dynamic-feature-extractor", use_auth_token=self._token)
feature_extractor.push_to_hub("test-dynamic-feature-extractor", token=self._token)
# This has added the proper auto_map field to the config
self.assertDictEqual(
......
......@@ -95,7 +95,7 @@ class ImageProcessorPushToHubTester(unittest.TestCase):
def test_push_to_hub(self):
image_processor = ViTImageProcessor.from_pretrained(SAMPLE_IMAGE_PROCESSING_CONFIG_DIR)
image_processor.push_to_hub("test-image-processor", use_auth_token=self._token)
image_processor.push_to_hub("test-image-processor", token=self._token)
new_image_processor = ViTImageProcessor.from_pretrained(f"{USER}/test-image-processor")
for k, v in image_processor.__dict__.items():
......@@ -107,7 +107,7 @@ class ImageProcessorPushToHubTester(unittest.TestCase):
# Push to hub via save_pretrained
with tempfile.TemporaryDirectory() as tmp_dir:
image_processor.save_pretrained(
tmp_dir, repo_id="test-image-processor", push_to_hub=True, use_auth_token=self._token
tmp_dir, repo_id="test-image-processor", push_to_hub=True, token=self._token
)
new_image_processor = ViTImageProcessor.from_pretrained(f"{USER}/test-image-processor")
......@@ -116,7 +116,7 @@ class ImageProcessorPushToHubTester(unittest.TestCase):
def test_push_to_hub_in_organization(self):
image_processor = ViTImageProcessor.from_pretrained(SAMPLE_IMAGE_PROCESSING_CONFIG_DIR)
image_processor.push_to_hub("valid_org/test-image-processor", use_auth_token=self._token)
image_processor.push_to_hub("valid_org/test-image-processor", token=self._token)
new_image_processor = ViTImageProcessor.from_pretrained("valid_org/test-image-processor")
for k, v in image_processor.__dict__.items():
......@@ -128,7 +128,7 @@ class ImageProcessorPushToHubTester(unittest.TestCase):
# Push to hub via save_pretrained
with tempfile.TemporaryDirectory() as tmp_dir:
image_processor.save_pretrained(
tmp_dir, repo_id="valid_org/test-image-processor-org", push_to_hub=True, use_auth_token=self._token
tmp_dir, repo_id="valid_org/test-image-processor-org", push_to_hub=True, token=self._token
)
new_image_processor = ViTImageProcessor.from_pretrained("valid_org/test-image-processor-org")
......@@ -139,7 +139,7 @@ class ImageProcessorPushToHubTester(unittest.TestCase):
CustomImageProcessor.register_for_auto_class()
image_processor = CustomImageProcessor.from_pretrained(SAMPLE_IMAGE_PROCESSING_CONFIG_DIR)
image_processor.push_to_hub("test-dynamic-image-processor", use_auth_token=self._token)
image_processor.push_to_hub("test-dynamic-image-processor", token=self._token)
# This has added the proper auto_map field to the config
self.assertDictEqual(
......
......@@ -60,7 +60,7 @@ class FlaxModelPushToHubTester(unittest.TestCase):
vocab_size=99, hidden_size=32, num_hidden_layers=5, num_attention_heads=4, intermediate_size=37
)
model = FlaxBertModel(config)
model.push_to_hub("test-model-flax", use_auth_token=self._token)
model.push_to_hub("test-model-flax", token=self._token)
new_model = FlaxBertModel.from_pretrained(f"{USER}/test-model-flax")
......@@ -76,7 +76,7 @@ class FlaxModelPushToHubTester(unittest.TestCase):
# Push to hub via save_pretrained
with tempfile.TemporaryDirectory() as tmp_dir:
model.save_pretrained(tmp_dir, repo_id="test-model-flax", push_to_hub=True, use_auth_token=self._token)
model.save_pretrained(tmp_dir, repo_id="test-model-flax", push_to_hub=True, token=self._token)
new_model = FlaxBertModel.from_pretrained(f"{USER}/test-model-flax")
......@@ -92,7 +92,7 @@ class FlaxModelPushToHubTester(unittest.TestCase):
vocab_size=99, hidden_size=32, num_hidden_layers=5, num_attention_heads=4, intermediate_size=37
)
model = FlaxBertModel(config)
model.push_to_hub("valid_org/test-model-flax-org", use_auth_token=self._token)
model.push_to_hub("valid_org/test-model-flax-org", token=self._token)
new_model = FlaxBertModel.from_pretrained("valid_org/test-model-flax-org")
......@@ -109,7 +109,7 @@ class FlaxModelPushToHubTester(unittest.TestCase):
# Push to hub via save_pretrained
with tempfile.TemporaryDirectory() as tmp_dir:
model.save_pretrained(
tmp_dir, repo_id="valid_org/test-model-flax-org", push_to_hub=True, use_auth_token=self._token
tmp_dir, repo_id="valid_org/test-model-flax-org", push_to_hub=True, token=self._token
)
new_model = FlaxBertModel.from_pretrained("valid_org/test-model-flax-org")
......
......@@ -572,7 +572,7 @@ class TFModelPushToHubTester(unittest.TestCase):
logging.set_verbosity_info()
logger = logging.get_logger("transformers.utils.hub")
with CaptureLogger(logger) as cl:
model.push_to_hub("test-model-tf", use_auth_token=self._token)
model.push_to_hub("test-model-tf", token=self._token)
logging.set_verbosity_warning()
# Check the model card was created and uploaded.
self.assertIn("Uploading the following files to __DUMMY_TRANSFORMERS_USER__/test-model-tf", cl.out)
......@@ -590,7 +590,7 @@ class TFModelPushToHubTester(unittest.TestCase):
# Push to hub via save_pretrained
with tempfile.TemporaryDirectory() as tmp_dir:
model.save_pretrained(tmp_dir, repo_id="test-model-tf", push_to_hub=True, use_auth_token=self._token)
model.save_pretrained(tmp_dir, repo_id="test-model-tf", push_to_hub=True, token=self._token)
new_model = TFBertModel.from_pretrained(f"{USER}/test-model-tf")
models_equal = True
......@@ -638,7 +638,7 @@ class TFModelPushToHubTester(unittest.TestCase):
# Make sure model is properly initialized
model.build()
model.push_to_hub("valid_org/test-model-tf-org", use_auth_token=self._token)
model.push_to_hub("valid_org/test-model-tf-org", token=self._token)
new_model = TFBertModel.from_pretrained("valid_org/test-model-tf-org")
models_equal = True
......@@ -653,9 +653,7 @@ class TFModelPushToHubTester(unittest.TestCase):
# Push to hub via save_pretrained
with tempfile.TemporaryDirectory() as tmp_dir:
model.save_pretrained(
tmp_dir, push_to_hub=True, use_auth_token=self._token, repo_id="valid_org/test-model-tf-org"
)
model.save_pretrained(tmp_dir, push_to_hub=True, token=self._token, repo_id="valid_org/test-model-tf-org")
new_model = TFBertModel.from_pretrained("valid_org/test-model-tf-org")
models_equal = True
......
......@@ -1162,7 +1162,7 @@ class ModelPushToHubTester(unittest.TestCase):
vocab_size=99, hidden_size=32, num_hidden_layers=5, num_attention_heads=4, intermediate_size=37
)
model = BertModel(config)
model.push_to_hub("test-model", use_auth_token=self._token)
model.push_to_hub("test-model", token=self._token)
new_model = BertModel.from_pretrained(f"{USER}/test-model")
for p1, p2 in zip(model.parameters(), new_model.parameters()):
......@@ -1173,7 +1173,7 @@ class ModelPushToHubTester(unittest.TestCase):
# Push to hub via save_pretrained
with tempfile.TemporaryDirectory() as tmp_dir:
model.save_pretrained(tmp_dir, repo_id="test-model", push_to_hub=True, use_auth_token=self._token)
model.save_pretrained(tmp_dir, repo_id="test-model", push_to_hub=True, token=self._token)
new_model = BertModel.from_pretrained(f"{USER}/test-model")
for p1, p2 in zip(model.parameters(), new_model.parameters()):
......@@ -1202,7 +1202,7 @@ The commit description supports markdown synthax see:
vocab_size=99, hidden_size=32, num_hidden_layers=5, num_attention_heads=4, intermediate_size=37
)
model = BertModel(config)
model.push_to_hub("valid_org/test-model-org", use_auth_token=self._token)
model.push_to_hub("valid_org/test-model-org", token=self._token)
new_model = BertModel.from_pretrained("valid_org/test-model-org")
for p1, p2 in zip(model.parameters(), new_model.parameters()):
......@@ -1213,9 +1213,7 @@ The commit description supports markdown synthax see:
# Push to hub via save_pretrained
with tempfile.TemporaryDirectory() as tmp_dir:
model.save_pretrained(
tmp_dir, push_to_hub=True, use_auth_token=self._token, repo_id="valid_org/test-model-org"
)
model.save_pretrained(tmp_dir, push_to_hub=True, token=self._token, repo_id="valid_org/test-model-org")
new_model = BertModel.from_pretrained("valid_org/test-model-org")
for p1, p2 in zip(model.parameters(), new_model.parameters()):
......@@ -1228,7 +1226,7 @@ The commit description supports markdown synthax see:
config = CustomConfig(hidden_size=32)
model = CustomModel(config)
model.push_to_hub("test-dynamic-model", use_auth_token=self._token)
model.push_to_hub("test-dynamic-model", token=self._token)
# checks
self.assertDictEqual(
config.auto_map,
......
......@@ -146,7 +146,7 @@ class TokenizerPushToHubTester(unittest.TestCase):
vocab_writer.write("".join([x + "\n" for x in self.vocab_tokens]))
tokenizer = BertTokenizer(vocab_file)
tokenizer.push_to_hub("test-tokenizer", use_auth_token=self._token)
tokenizer.push_to_hub("test-tokenizer", token=self._token)
new_tokenizer = BertTokenizer.from_pretrained(f"{USER}/test-tokenizer")
self.assertDictEqual(new_tokenizer.vocab, tokenizer.vocab)
......@@ -155,7 +155,7 @@ class TokenizerPushToHubTester(unittest.TestCase):
# Push to hub via save_pretrained
with tempfile.TemporaryDirectory() as tmp_dir:
tokenizer.save_pretrained(tmp_dir, repo_id="test-tokenizer", push_to_hub=True, use_auth_token=self._token)
tokenizer.save_pretrained(tmp_dir, repo_id="test-tokenizer", push_to_hub=True, token=self._token)
new_tokenizer = BertTokenizer.from_pretrained(f"{USER}/test-tokenizer")
self.assertDictEqual(new_tokenizer.vocab, tokenizer.vocab)
......@@ -167,7 +167,7 @@ class TokenizerPushToHubTester(unittest.TestCase):
vocab_writer.write("".join([x + "\n" for x in self.vocab_tokens]))
tokenizer = BertTokenizer(vocab_file)
tokenizer.push_to_hub("valid_org/test-tokenizer-org", use_auth_token=self._token)
tokenizer.push_to_hub("valid_org/test-tokenizer-org", token=self._token)
new_tokenizer = BertTokenizer.from_pretrained("valid_org/test-tokenizer-org")
self.assertDictEqual(new_tokenizer.vocab, tokenizer.vocab)
......@@ -177,7 +177,7 @@ class TokenizerPushToHubTester(unittest.TestCase):
# Push to hub via save_pretrained
with tempfile.TemporaryDirectory() as tmp_dir:
tokenizer.save_pretrained(
tmp_dir, repo_id="valid_org/test-tokenizer-org", push_to_hub=True, use_auth_token=self._token
tmp_dir, repo_id="valid_org/test-tokenizer-org", push_to_hub=True, token=self._token
)
new_tokenizer = BertTokenizer.from_pretrained("valid_org/test-tokenizer-org")
......@@ -193,7 +193,7 @@ class TokenizerPushToHubTester(unittest.TestCase):
tokenizer = CustomTokenizer(vocab_file)
# No fast custom tokenizer
tokenizer.push_to_hub("test-dynamic-tokenizer", use_auth_token=self._token)
tokenizer.push_to_hub("test-dynamic-tokenizer", token=self._token)
tokenizer = AutoTokenizer.from_pretrained(f"{USER}/test-dynamic-tokenizer", trust_remote_code=True)
# Can't make an isinstance check because the new_model.config is from the CustomTokenizer class of a dynamic module
......@@ -210,7 +210,7 @@ class TokenizerPushToHubTester(unittest.TestCase):
bert_tokenizer.save_pretrained(tmp_dir)
tokenizer = CustomTokenizerFast.from_pretrained(tmp_dir)
tokenizer.push_to_hub("test-dynamic-tokenizer", use_auth_token=self._token)
tokenizer.push_to_hub("test-dynamic-tokenizer", token=self._token)
tokenizer = AutoTokenizer.from_pretrained(f"{USER}/test-dynamic-tokenizer", trust_remote_code=True)
# Can't make an isinstance check because the new_model.config is from the FakeConfig class of a dynamic module
......
......@@ -132,10 +132,10 @@ class GetFromCacheTests(unittest.TestCase):
"""Test download file from a gated repo fails with correct message when not authenticated."""
with self.assertRaisesRegex(EnvironmentError, "You are trying to access a gated repo."):
# All files except README.md are protected on a gated repo.
cached_file(GATED_REPO, "gated_file.txt", use_auth_token=False)
cached_file(GATED_REPO, "gated_file.txt", token=False)
def test_has_file_gated_repo(self):
"""Test check file existence from a gated repo fails with correct message when not authenticated."""
with self.assertRaisesRegex(EnvironmentError, "is a gated repository"):
# All files except README.md are protected on a gated repo.
has_file(GATED_REPO, "gated_file.txt", use_auth_token=False)
has_file(GATED_REPO, "gated_file.txt", token=False)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment