Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
nni
Commits
268215ad
"app/git@developer.sourcefind.cn:OpenDAS/ollama.git" did not exist on "2b4ca6cf36c0dc31fdae7046433f4341f171026f"
Unverified
Commit
268215ad
authored
Feb 28, 2022
by
J-shang
Committed by
GitHub
Feb 28, 2022
Browse files
Fix seed when test apoz pruner (#4580)
parent
0502e2d5
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
58 additions
and
22 deletions
+58
-22
pipelines/fast-test.yml
pipelines/fast-test.yml
+1
-1
test/ut/compression/v2/test_iterative_pruner_torch.py
test/ut/compression/v2/test_iterative_pruner_torch.py
+27
-9
test/ut/compression/v2/test_pruner_torch.py
test/ut/compression/v2/test_pruner_torch.py
+30
-12
No files found.
pipelines/fast-test.yml
View file @
268215ad
...
...
@@ -177,7 +177,7 @@ stages:
-
job
:
windows
pool
:
vmImage
:
windows-latest
timeoutInMinutes
:
7
0
timeoutInMinutes
:
7
5
steps
:
-
template
:
templates/install-dependencies.yml
...
...
test/ut/compression/v2/test_iterative_pruner_torch.py
View file @
268215ad
...
...
@@ -4,6 +4,7 @@
import
random
import
unittest
import
numpy
import
torch
import
torch.nn.functional
as
F
...
...
@@ -105,6 +106,17 @@ class IterativePrunerTestCase(unittest.TestCase):
sparsity_list
=
compute_sparsity_mask2compact
(
pruned_model
,
masks
,
config_list
)
assert
0.78
<
sparsity_list
[
0
][
'total_sparsity'
]
<
0.82
def
test_amc_pruner
(
self
):
model
=
TorchModel
()
config_list
=
[{
'op_types'
:
[
'Conv2d'
],
'total_sparsity'
:
0.5
,
'max_sparsity_per_layer'
:
0.8
}]
dummy_input
=
torch
.
rand
(
10
,
1
,
28
,
28
)
ddpg_params
=
{
'hidden1'
:
300
,
'hidden2'
:
300
,
'lr_c'
:
1e-3
,
'lr_a'
:
1e-4
,
'warmup'
:
5
,
'discount'
:
1.
,
'bsize'
:
64
,
'rmsize'
:
100
,
'window_length'
:
1
,
'tau'
:
0.01
,
'init_delta'
:
0.5
,
'delta_decay'
:
0.99
,
'max_episode_length'
:
1e9
,
'epsilon'
:
50000
}
pruner
=
AMCPruner
(
10
,
model
,
config_list
,
dummy_input
,
evaluator
,
finetuner
=
finetuner
,
ddpg_params
=
ddpg_params
,
target
=
'flops'
,
log_dir
=
'../../../logs'
)
pruner
.
compress
()
class
FixSeedPrunerTestCase
(
unittest
.
TestCase
):
def
test_auto_compress_pruner
(
self
):
model
=
TorchModel
()
config_list
=
[{
'op_types'
:
[
'Conv2d'
],
'total_sparsity'
:
0.8
}]
...
...
@@ -126,15 +138,21 @@ class IterativePrunerTestCase(unittest.TestCase):
print
(
sparsity_list
)
assert
0.78
<
sparsity_list
[
0
][
'total_sparsity'
]
<
0.82
def
test_amc_pruner
(
self
):
model
=
TorchModel
()
config_list
=
[{
'op_types'
:
[
'Conv2d'
],
'total_sparsity'
:
0.5
,
'max_sparsity_per_layer'
:
0.8
}]
dummy_input
=
torch
.
rand
(
10
,
1
,
28
,
28
)
ddpg_params
=
{
'hidden1'
:
300
,
'hidden2'
:
300
,
'lr_c'
:
1e-3
,
'lr_a'
:
1e-4
,
'warmup'
:
5
,
'discount'
:
1.
,
'bsize'
:
64
,
'rmsize'
:
100
,
'window_length'
:
1
,
'tau'
:
0.01
,
'init_delta'
:
0.5
,
'delta_decay'
:
0.99
,
'max_episode_length'
:
1e9
,
'epsilon'
:
50000
}
pruner
=
AMCPruner
(
10
,
model
,
config_list
,
dummy_input
,
evaluator
,
finetuner
=
finetuner
,
ddpg_params
=
ddpg_params
,
target
=
'flops'
,
log_dir
=
'../../../logs'
)
pruner
.
compress
()
def
setUp
(
self
)
->
None
:
# fix seed in order to solve the random failure of ut
random
.
seed
(
1024
)
numpy
.
random
.
seed
(
1024
)
torch
.
manual_seed
(
1024
)
def
tearDown
(
self
)
->
None
:
# reset seed
import
time
now
=
int
(
time
.
time
()
*
100
)
random
.
seed
(
now
)
seed
=
random
.
randint
(
0
,
2
**
32
-
1
)
random
.
seed
(
seed
)
numpy
.
random
.
seed
(
seed
)
torch
.
manual_seed
(
seed
)
if
__name__
==
'__main__'
:
unittest
.
main
()
test/ut/compression/v2/test_pruner_torch.py
View file @
268215ad
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import
random
import
unittest
import
numpy
import
torch
import
torch.nn.functional
as
F
...
...
@@ -122,18 +124,6 @@ class PrunerTestCase(unittest.TestCase):
sparsity_list
=
compute_sparsity_mask2compact
(
pruned_model
,
masks
,
config_list
)
assert
0.78
<
sparsity_list
[
0
][
'total_sparsity'
]
<
0.82
def
test_activation_apoz_rank_pruner
(
self
):
model
=
TorchModel
()
config_list
=
[{
'op_types'
:
[
'Conv2d'
],
'sparsity'
:
0.8
}]
pruner
=
ActivationAPoZRankPruner
(
model
=
model
,
config_list
=
config_list
,
trainer
=
trainer
,
traced_optimizer
=
get_optimizer
(
model
),
criterion
=
criterion
,
training_batches
=
5
,
activation
=
'relu'
,
mode
=
'dependency_aware'
,
dummy_input
=
torch
.
rand
(
10
,
1
,
28
,
28
))
pruned_model
,
masks
=
pruner
.
compress
()
pruner
.
_unwrap_model
()
sparsity_list
=
compute_sparsity_mask2compact
(
pruned_model
,
masks
,
config_list
)
assert
0.78
<
sparsity_list
[
0
][
'total_sparsity'
]
<
0.82
def
test_activation_mean_rank_pruner
(
self
):
model
=
TorchModel
()
config_list
=
[{
'op_types'
:
[
'Conv2d'
],
'sparsity'
:
0.8
}]
...
...
@@ -177,6 +167,34 @@ class PrunerTestCase(unittest.TestCase):
sparsity_list
=
compute_sparsity_mask2compact
(
pruned_model
,
masks
,
config_list
)
assert
0.78
<
sparsity_list
[
0
][
'total_sparsity'
]
<
0.82
class
FixSeedPrunerTestCase
(
unittest
.
TestCase
):
def
test_activation_apoz_rank_pruner
(
self
):
model
=
TorchModel
()
config_list
=
[{
'op_types'
:
[
'Conv2d'
],
'sparsity'
:
0.8
}]
pruner
=
ActivationAPoZRankPruner
(
model
=
model
,
config_list
=
config_list
,
trainer
=
trainer
,
traced_optimizer
=
get_optimizer
(
model
),
criterion
=
criterion
,
training_batches
=
5
,
activation
=
'relu'
,
mode
=
'dependency_aware'
,
dummy_input
=
torch
.
rand
(
10
,
1
,
28
,
28
))
pruned_model
,
masks
=
pruner
.
compress
()
pruner
.
_unwrap_model
()
sparsity_list
=
compute_sparsity_mask2compact
(
pruned_model
,
masks
,
config_list
)
assert
0.78
<
sparsity_list
[
0
][
'total_sparsity'
]
<
0.82
def
setUp
(
self
)
->
None
:
# fix seed in order to solve the random failure of ut
random
.
seed
(
1024
)
numpy
.
random
.
seed
(
1024
)
torch
.
manual_seed
(
1024
)
def
tearDown
(
self
)
->
None
:
# reset seed
import
time
now
=
int
(
time
.
time
()
*
100
)
random
.
seed
(
now
)
seed
=
random
.
randint
(
0
,
2
**
32
-
1
)
random
.
seed
(
seed
)
numpy
.
random
.
seed
(
seed
)
torch
.
manual_seed
(
seed
)
if
__name__
==
'__main__'
:
unittest
.
main
()
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment