"...git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "93dccf527b81b807c3ec8c658a7bd7e9fca8d3f4"
Unverified Commit 5cd8d7ad authored by Thomas Wolf's avatar Thomas Wolf Committed by GitHub
Browse files

Merge pull request #16 from donatasrep/master

Excluding AdamWeightDecayOptimizer internal variables from restoring
parents 278fd28a 20d07b3a
...@@ -68,11 +68,17 @@ def convert(): ...@@ -68,11 +68,17 @@ def convert():
arrays.append(array) arrays.append(array)
for name, array in zip(names, arrays): for name, array in zip(names, arrays):
name = name[5:] # skip "bert/" if not name.startswith("bert"):
print("Skipping {}".format(name))
continue
else:
name = name.replace("bert/", "") # skip "bert/"
print("Loading {}".format(name)) print("Loading {}".format(name))
name = name.split('/') name = name.split('/')
if name[0] in ['redictions', 'eq_relationship']: # adam_v and adam_m are variables used in AdamWeightDecayOptimizer to calculated m and v
print("Skipping") # which are not required for using pretrained model
if name[0] in ['redictions', 'eq_relationship'] or name[-1] == "adam_v" or name[-1] == "adam_m":
print("Skipping {}".format("/".join(name)))
continue continue
pointer = model pointer = model
for m_name in name: for m_name in name:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment