"git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "92f2fbad508f0f4640e91d5de67601e64e8bd2f3"
Unverified Commit faacdf00 authored by Sylvain Gugger's avatar Sylvain Gugger Committed by GitHub
Browse files

Move cache folder to huggingface/hub for consistency with hf_hub (#18492)

* Move cache folder to just huggingface

* Thank you VsCode for this needless import

* Move to hub

* Forgot one
parent 280db2e3
...@@ -139,11 +139,11 @@ conda install -c huggingface transformers ...@@ -139,11 +139,11 @@ conda install -c huggingface transformers
## Cache setup ## Cache setup
Pretrained models are downloaded and locally cached at: `~/.cache/huggingface/transformers/`. This is the default directory given by the shell environment variable `TRANSFORMERS_CACHE`. On Windows, the default directory is given by `C:\Users\username\.cache\huggingface\transformers`. You can change the shell environment variables shown below - in order of priority - to specify a different cache directory: Pretrained models are downloaded and locally cached at: `~/.cache/huggingface/hub`. This is the default directory given by the shell environment variable `TRANSFORMERS_CACHE`. On Windows, the default directory is given by `C:\Users\username\.cache\huggingface\hub`. You can change the shell environment variables shown below - in order of priority - to specify a different cache directory:
1. Shell environment variable (default): `TRANSFORMERS_CACHE`. 1. Shell environment variable (default): `HUGGINGFACE_HUB_CACHE` or `TRANSFORMERS_CACHE`.
2. Shell environment variable: `HF_HOME` + `transformers/`. 2. Shell environment variable: `HF_HOME`.
3. Shell environment variable: `XDG_CACHE_HOME` + `/huggingface/transformers`. 3. Shell environment variable: `XDG_CACHE_HOME` + `/huggingface`.
<Tip> <Tip>
......
...@@ -81,7 +81,7 @@ old_default_cache_path = os.path.join(torch_cache_home, "transformers") ...@@ -81,7 +81,7 @@ old_default_cache_path = os.path.join(torch_cache_home, "transformers")
hf_cache_home = os.path.expanduser( hf_cache_home = os.path.expanduser(
os.getenv("HF_HOME", os.path.join(os.getenv("XDG_CACHE_HOME", "~/.cache"), "huggingface")) os.getenv("HF_HOME", os.path.join(os.getenv("XDG_CACHE_HOME", "~/.cache"), "huggingface"))
) )
default_cache_path = os.path.join(hf_cache_home, "transformers") default_cache_path = os.path.join(hf_cache_home, "hub")
# Onetime move from the old location to the new one if no ENV variable has been set. # Onetime move from the old location to the new one if no ENV variable has been set.
if ( if (
...@@ -102,7 +102,8 @@ if ( ...@@ -102,7 +102,8 @@ if (
PYTORCH_PRETRAINED_BERT_CACHE = os.getenv("PYTORCH_PRETRAINED_BERT_CACHE", default_cache_path) PYTORCH_PRETRAINED_BERT_CACHE = os.getenv("PYTORCH_PRETRAINED_BERT_CACHE", default_cache_path)
PYTORCH_TRANSFORMERS_CACHE = os.getenv("PYTORCH_TRANSFORMERS_CACHE", PYTORCH_PRETRAINED_BERT_CACHE) PYTORCH_TRANSFORMERS_CACHE = os.getenv("PYTORCH_TRANSFORMERS_CACHE", PYTORCH_PRETRAINED_BERT_CACHE)
TRANSFORMERS_CACHE = os.getenv("TRANSFORMERS_CACHE", PYTORCH_TRANSFORMERS_CACHE) HUGGINGFACE_HUB_CACHE = os.getenv("HUGGINGFACE_HUB_CACHE", PYTORCH_TRANSFORMERS_CACHE)
TRANSFORMERS_CACHE = os.getenv("TRANSFORMERS_CACHE", HUGGINGFACE_HUB_CACHE)
HF_MODULES_CACHE = os.getenv("HF_MODULES_CACHE", os.path.join(hf_cache_home, "modules")) HF_MODULES_CACHE = os.getenv("HF_MODULES_CACHE", os.path.join(hf_cache_home, "modules"))
TRANSFORMERS_DYNAMIC_MODULE_NAME = "transformers_modules" TRANSFORMERS_DYNAMIC_MODULE_NAME = "transformers_modules"
SESSION_ID = uuid4().hex SESSION_ID = uuid4().hex
...@@ -1475,9 +1476,16 @@ def move_to_new_cache(file, repo, filename, revision, etag, commit_hash): ...@@ -1475,9 +1476,16 @@ def move_to_new_cache(file, repo, filename, revision, etag, commit_hash):
clean_files_for(file) clean_files_for(file)
def move_cache(cache_dir=None, token=None): def move_cache(cache_dir=None, new_cache_dir=None, token=None):
if new_cache_dir is None:
new_cache_dir = TRANSFORMERS_CACHE
if cache_dir is None: if cache_dir is None:
cache_dir = TRANSFORMERS_CACHE # Migrate from old cache in .cache/huggingface/hub
old_cache = Path(TRANSFORMERS_CACHE).parent / "transformers"
if os.path.isdir(str(old_cache)):
cache_dir = str(old_cache)
else:
cache_dir = new_cache_dir
if token is None: if token is None:
token = HfFolder.get_token() token = HfFolder.get_token()
cached_files = get_all_cached_files(cache_dir=cache_dir) cached_files = get_all_cached_files(cache_dir=cache_dir)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment