Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
dcuai
dlexamples
Commits
78e30386
"...composable_kernel_onnxruntime.git" did not exist on "9f7c1930646ae54e90644a6f869f92b70b2dcdba"
Commit
78e30386
authored
Jul 29, 2022
by
panning
Browse files
规范代码,删除不必要的注释
parent
3cb38de2
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
0 additions
and
26 deletions
+0
-26
PyTorch/Compute-Vision/Accuracy_Validation/ResNet50/main_acc.py
...h/Compute-Vision/Accuracy_Validation/ResNet50/main_acc.py
+0
-26
No files found.
PyTorch/Compute-Vision/Accuracy_Validation/ResNet50/main_acc.py
View file @
78e30386
...
@@ -209,22 +209,6 @@ def main_worker(gpu, ngpus_per_node, args):
...
@@ -209,22 +209,6 @@ def main_worker(gpu, ngpus_per_node, args):
model
=
torch
.
nn
.
DataParallel
(
model
).
cuda
()
model
=
torch
.
nn
.
DataParallel
(
model
).
cuda
()
# optionally resume from a checkpoint
# optionally resume from a checkpoint
# if args.resume:
# if os.path.isfile(args.resume):
# print("=> loading checkpoint '{}'".format(args.resume))
# checkpoint = torch.load(args.resume)
# args.start_epoch = checkpoint['epoch']
# best_acc1 = checkpoint['best_acc1']
# if args.gpu is not None:
# # best_acc1 may be from a checkpoint from a different GPU
# best_acc1 = best_acc1.to(args.gpu)
# model.load_state_dict(checkpoint['state_dict'])
# optimizer.load_state_dict(checkpoint['optimizer'])
# print("=> loaded checkpoint '{}' (epoch {})"
# .format(args.resume, checkpoint['epoch']))
# else:
# print("=> no checkpoint found at '{}'".format(args.resume))
if
args
.
resume
:
if
args
.
resume
:
if
os
.
path
.
isfile
(
args
.
resume
):
if
os
.
path
.
isfile
(
args
.
resume
):
print
(
"=> loading checkpoint '{}'"
.
format
(
args
.
resume
))
print
(
"=> loading checkpoint '{}'"
.
format
(
args
.
resume
))
...
@@ -421,15 +405,6 @@ def validate(val_loader, model, criterion, args):
...
@@ -421,15 +405,6 @@ def validate(val_loader, model, criterion, args):
return
top1
.
avg
return
top1
.
avg
#def save_checkpoint(state, epoch, is_best, rank, filename='/public/home/aiss/419_rocm2.x_DL/examples_dl/aiss_test/base_acc/HK/HK3/test_resume/checkpoint.pth.tar'):
# filename='/public/home/aiss/419_rocm2.x_DL/examples_dl/aiss_test/base_acc/HK/HK3/test_resume'+str(rank)+'/checkpoint_'+str(epoch)+'.pth.tar'
# torch.save(state, filename)
# if is_best:
# #shutil.copyfile(filename, '/public/home/aiss/419_rocm2.x_DL/examples_dl/aiss_test/base_acc/HK/HK3/test_resume/model_best.pth.tar')
# best_dir='/public/home/aiss/419_rocm2.x_DL/examples_dl/aiss_test/base_acc/HK/HK3/test_resume'+str(rank)+'/model_best.pth.tar'
# shutil.copyfile(filename, best_dir)
def
save_checkpoint
(
state
,
epoch
,
is_best
,
rank
,
filename
):
def
save_checkpoint
(
state
,
epoch
,
is_best
,
rank
,
filename
):
rank_path
=
filename
+
'/'
+
str
(
rank
)
rank_path
=
filename
+
'/'
+
str
(
rank
)
if
not
os
.
path
.
isdir
(
rank_path
):
if
not
os
.
path
.
isdir
(
rank_path
):
...
@@ -437,7 +412,6 @@ def save_checkpoint(state, epoch, is_best, rank, filename):
...
@@ -437,7 +412,6 @@ def save_checkpoint(state, epoch, is_best, rank, filename):
filename
=
rank_path
+
'/checkpoint_'
+
str
(
epoch
)
+
'.pth.tar'
filename
=
rank_path
+
'/checkpoint_'
+
str
(
epoch
)
+
'.pth.tar'
torch
.
save
(
state
,
filename
)
torch
.
save
(
state
,
filename
)
if
is_best
:
if
is_best
:
#shutil.copyfile(filename, '/public/home/aiss/419_rocm2.x_DL/examples_dl/aiss_test/base_acc/HK/HK3/test_resume/model_best.pth.tar')
best_dir
=
rank_path
+
'/model_best.pth.tar'
best_dir
=
rank_path
+
'/model_best.pth.tar'
shutil
.
copyfile
(
filename
,
best_dir
)
shutil
.
copyfile
(
filename
,
best_dir
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment