Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
nni
Commits
4773c918
Unverified
Commit
4773c918
authored
Mar 18, 2020
by
SparkSnail
Committed by
GitHub
Mar 18, 2020
Browse files
Merge pull request #236 from microsoft/master
merge master
parents
75028bd7
3c0ef842
Changes
18
Show whitespace changes
Inline
Side-by-side
Showing
18 changed files
with
285 additions
and
37 deletions
+285
-37
src/sdk/pynni/nni/bohb_advisor/bohb_advisor.py
src/sdk/pynni/nni/bohb_advisor/bohb_advisor.py
+1
-0
src/sdk/pynni/nni/compression/torch/weight_rank_filter_pruners.py
...pynni/nni/compression/torch/weight_rank_filter_pruners.py
+4
-4
src/sdk/pynni/nni/smac_tuner/smac_tuner.py
src/sdk/pynni/nni/smac_tuner/smac_tuner.py
+1
-0
src/sdk/pynni/tests/test_builtin_tuners.py
src/sdk/pynni/tests/test_builtin_tuners.py
+88
-10
src/sdk/pynni/tests/test_pruners.py
src/sdk/pynni/tests/test_pruners.py
+162
-0
src/webui/src/components/trial-detail/Intermediate.tsx
src/webui/src/components/trial-detail/Intermediate.tsx
+1
-1
src/webui/src/components/trial-detail/TableList.tsx
src/webui/src/components/trial-detail/TableList.tsx
+14
-11
src/webui/src/static/function.ts
src/webui/src/static/function.ts
+1
-1
src/webui/src/static/style/trialsDetail.scss
src/webui/src/static/style/trialsDetail.scss
+1
-1
test/pipelines-it-frameworkcontroller.yml
test/pipelines-it-frameworkcontroller.yml
+1
-1
test/pipelines-it-kubeflow.yml
test/pipelines-it-kubeflow.yml
+1
-1
test/pipelines-it-local-windows.yml
test/pipelines-it-local-windows.yml
+1
-1
test/pipelines-it-local.yml
test/pipelines-it-local.yml
+1
-1
test/pipelines-it-pai-windows.yml
test/pipelines-it-pai-windows.yml
+1
-1
test/pipelines-it-pai.yml
test/pipelines-it-pai.yml
+1
-1
test/pipelines-it-remote-windows.yml
test/pipelines-it-remote-windows.yml
+2
-1
test/pipelines-it-remote.yml
test/pipelines-it-remote.yml
+3
-1
tools/nni_trial_tool/log_utils.py
tools/nni_trial_tool/log_utils.py
+1
-1
No files found.
src/sdk/pynni/nni/bohb_advisor/bohb_advisor.py
View file @
4773c918
...
...
@@ -641,6 +641,7 @@ class BOHB(MsgDispatcherBase):
if
not
_value
:
logger
.
info
(
"Useless trial data, value is %s, skip this trial data."
,
_value
)
continue
_value
=
extract_scalar_reward
(
_value
)
budget_exist_flag
=
False
barely_params
=
dict
()
for
keys
in
_params
:
...
...
src/sdk/pynni/nni/compression/torch/weight_rank_filter_pruners.py
View file @
4773c918
...
...
@@ -124,9 +124,9 @@ class L1FilterPruner(WeightRankFilterPruner):
w_abs_structured
=
w_abs
.
view
(
filters
,
-
1
).
sum
(
dim
=
1
)
threshold
=
torch
.
topk
(
w_abs_structured
.
view
(
-
1
),
num_prune
,
largest
=
False
)[
0
].
max
()
mask_weight
=
torch
.
gt
(
w_abs_structured
,
threshold
)[:,
None
,
None
,
None
].
expand_as
(
weight
).
type_as
(
weight
)
mask_bias
=
torch
.
gt
(
w_abs_structured
,
threshold
).
type_as
(
weight
)
mask_bias
=
torch
.
gt
(
w_abs_structured
,
threshold
).
type_as
(
weight
)
.
detach
()
if
base_mask
[
'bias_mask'
]
is
not
None
else
None
return
{
'weight_mask'
:
mask_weight
.
detach
(),
'bias_mask'
:
mask_bias
.
detach
()
}
return
{
'weight_mask'
:
mask_weight
.
detach
(),
'bias_mask'
:
mask_bias
}
class
L2FilterPruner
(
WeightRankFilterPruner
):
...
...
@@ -172,9 +172,9 @@ class L2FilterPruner(WeightRankFilterPruner):
w_l2_norm
=
torch
.
sqrt
((
w
**
2
).
sum
(
dim
=
1
))
threshold
=
torch
.
topk
(
w_l2_norm
.
view
(
-
1
),
num_prune
,
largest
=
False
)[
0
].
max
()
mask_weight
=
torch
.
gt
(
w_l2_norm
,
threshold
)[:,
None
,
None
,
None
].
expand_as
(
weight
).
type_as
(
weight
)
mask_bias
=
torch
.
gt
(
w_l2_norm
,
threshold
).
type_as
(
weight
)
mask_bias
=
torch
.
gt
(
w_l2_norm
,
threshold
).
type_as
(
weight
)
.
detach
()
if
base_mask
[
'bias_mask'
]
is
not
None
else
None
return
{
'weight_mask'
:
mask_weight
.
detach
(),
'bias_mask'
:
mask_bias
.
detach
()
}
return
{
'weight_mask'
:
mask_weight
.
detach
(),
'bias_mask'
:
mask_bias
}
class
FPGMPruner
(
WeightRankFilterPruner
):
...
...
src/sdk/pynni/nni/smac_tuner/smac_tuner.py
View file @
4773c918
...
...
@@ -309,6 +309,7 @@ class SMACTuner(Tuner):
if
not
_value
:
self
.
logger
.
info
(
"Useless trial data, value is %s, skip this trial data."
,
_value
)
continue
_value
=
extract_scalar_reward
(
_value
)
# convert the keys in loguniform and categorical types
valid_entry
=
True
for
key
,
value
in
_params
.
items
():
...
...
src/sdk/pynni/tests/test_builtin_tuners.py
View file @
4773c918
...
...
@@ -34,7 +34,7 @@ class BuiltinTunersTestCase(TestCase):
- [ ] save_checkpoint
- [X] update_search_space
- [X] generate_multiple_parameters
- [
] import_data
- [
X
] import_data
- [ ] trial_end
- [x] receive_trial_result
"""
...
...
@@ -141,50 +141,128 @@ class BuiltinTunersTestCase(TestCase):
logger
.
info
(
"Full supported search space: %s"
,
full_supported_search_space
)
self
.
search_space_test_one
(
tuner_factory
,
full_supported_search_space
)
def
import_data_test
(
self
,
tuner_factory
,
stype
=
"choice_str"
):
"""
import data at the beginning with number value and dict value
import data in the middle also with number value and dict value, and duplicate data record
generate parameters after data import
Parameters
----------
tuner_factory : lambda
a lambda for instantiate a tuner
stype : str
the value type of hp choice, support "choice_str" and "choice_num"
"""
if
stype
==
"choice_str"
:
search_space
=
{
"choice_str"
:
{
"_type"
:
"choice"
,
"_value"
:
[
"cat"
,
"dog"
,
"elephant"
,
"cow"
,
"sheep"
,
"panda"
]
}
}
elif
stype
==
"choice_num"
:
search_space
=
{
"choice_num"
:
{
"_type"
:
"choice"
,
"_value"
:
[
10
,
20
,
30
,
40
,
50
,
60
]
}
}
else
:
raise
RuntimeError
(
"Unexpected stype"
)
tuner
=
tuner_factory
()
self
.
assertIsInstance
(
tuner
,
Tuner
)
tuner
.
update_search_space
(
search_space
)
# import data at the beginning
if
stype
==
"choice_str"
:
data
=
[{
"parameter"
:
{
"choice_str"
:
"cat"
},
"value"
:
1.1
},
{
"parameter"
:
{
"choice_str"
:
"dog"
},
"value"
:
{
"default"
:
1.2
,
"tmp"
:
2
}}]
else
:
data
=
[{
"parameter"
:
{
"choice_num"
:
20
},
"value"
:
1.1
},
{
"parameter"
:
{
"choice_num"
:
60
},
"value"
:
{
"default"
:
1.2
,
"tmp"
:
2
}}]
tuner
.
import_data
(
data
)
logger
.
info
(
"Imported data successfully at the beginning"
)
# generate parameters
parameters
=
tuner
.
generate_multiple_parameters
(
list
(
range
(
3
)))
for
i
in
range
(
3
):
tuner
.
receive_trial_result
(
i
,
parameters
[
i
],
random
.
uniform
(
-
100
,
100
))
# import data in the middle
if
stype
==
"choice_str"
:
data
=
[{
"parameter"
:
{
"choice_str"
:
"cat"
},
"value"
:
1.1
},
{
"parameter"
:
{
"choice_str"
:
"dog"
},
"value"
:
{
"default"
:
1.2
,
"tmp"
:
2
}},
{
"parameter"
:
{
"choice_str"
:
"cow"
},
"value"
:
1.3
}]
else
:
data
=
[{
"parameter"
:
{
"choice_num"
:
20
},
"value"
:
1.1
},
{
"parameter"
:
{
"choice_num"
:
60
},
"value"
:
{
"default"
:
1.2
,
"tmp"
:
2
}},
{
"parameter"
:
{
"choice_num"
:
50
},
"value"
:
1.3
}]
tuner
.
import_data
(
data
)
logger
.
info
(
"Imported data successfully in the middle"
)
# generate parameters again
parameters
=
tuner
.
generate_multiple_parameters
([
3
])
tuner
.
receive_trial_result
(
3
,
parameters
[
0
],
random
.
uniform
(
-
100
,
100
))
def
test_grid_search
(
self
):
self
.
exhaustive
=
True
self
.
search_space_test_all
(
lambda
:
GridSearchTuner
(),
tuner_fn
=
lambda
:
GridSearchTuner
()
self
.
search_space_test_all
(
tuner_fn
,
supported_types
=
[
"choice"
,
"randint"
,
"quniform"
])
self
.
import_data_test
(
tuner_fn
)
def
test_tpe
(
self
):
self
.
search_space_test_all
(
lambda
:
HyperoptTuner
(
"tpe"
),
tuner_fn
=
lambda
:
HyperoptTuner
(
"tpe"
)
self
.
search_space_test_all
(
tuner_fn
,
ignore_types
=
[
"uniform_equal"
,
"qloguniform_equal"
,
"loguniform_equal"
,
"quniform_clip_2"
])
# NOTE: types are ignored because `tpe.py line 465, in adaptive_parzen_normal assert prior_sigma > 0`
self
.
import_data_test
(
tuner_fn
)
def
test_random_search
(
self
):
self
.
search_space_test_all
(
lambda
:
HyperoptTuner
(
"random_search"
))
tuner_fn
=
lambda
:
HyperoptTuner
(
"random_search"
)
self
.
search_space_test_all
(
tuner_fn
)
self
.
import_data_test
(
tuner_fn
)
def
test_anneal
(
self
):
self
.
search_space_test_all
(
lambda
:
HyperoptTuner
(
"anneal"
))
tuner_fn
=
lambda
:
HyperoptTuner
(
"anneal"
)
self
.
search_space_test_all
(
tuner_fn
)
self
.
import_data_test
(
tuner_fn
)
def
test_smac
(
self
):
if
sys
.
platform
==
"win32"
:
return
# smac doesn't work on windows
self
.
search_space_test_all
(
lambda
:
SMACTuner
(),
tuner_fn
=
lambda
:
SMACTuner
()
self
.
search_space_test_all
(
tuner_fn
,
supported_types
=
[
"choice"
,
"randint"
,
"uniform"
,
"quniform"
,
"loguniform"
])
self
.
import_data_test
(
tuner_fn
)
def
test_batch
(
self
):
self
.
exhaustive
=
True
self
.
search_space_test_all
(
lambda
:
BatchTuner
(),
tuner_fn
=
lambda
:
BatchTuner
()
self
.
search_space_test_all
(
tuner_fn
,
supported_types
=
[
"choice"
])
self
.
import_data_test
(
tuner_fn
)
def
test_evolution
(
self
):
# Needs enough population size, otherwise it will throw a runtime error
self
.
search_space_test_all
(
lambda
:
EvolutionTuner
(
population_size
=
100
))
tuner_fn
=
lambda
:
EvolutionTuner
(
population_size
=
100
)
self
.
search_space_test_all
(
tuner_fn
)
self
.
import_data_test
(
tuner_fn
)
def
test_gp
(
self
):
self
.
test_round
=
1
# NOTE: GP tuner got hanged for multiple testing round
self
.
search_space_test_all
(
lambda
:
GPTuner
(),
tuner_fn
=
lambda
:
GPTuner
()
self
.
search_space_test_all
(
tuner_fn
,
supported_types
=
[
"choice"
,
"randint"
,
"uniform"
,
"quniform"
,
"loguniform"
,
"qloguniform"
],
ignore_types
=
[
"normal"
,
"lognormal"
,
"qnormal"
,
"qlognormal"
],
fail_types
=
[
"choice_str"
,
"choice_mixed"
])
self
.
import_data_test
(
tuner_fn
,
"choice_num"
)
def
test_metis
(
self
):
self
.
test_round
=
1
# NOTE: Metis tuner got hanged for multiple testing round
self
.
search_space_test_all
(
lambda
:
MetisTuner
(),
tuner_fn
=
lambda
:
MetisTuner
()
self
.
search_space_test_all
(
tuner_fn
,
supported_types
=
[
"choice"
,
"randint"
,
"uniform"
,
"quniform"
],
fail_types
=
[
"choice_str"
,
"choice_mixed"
])
self
.
import_data_test
(
tuner_fn
,
"choice_num"
)
def
test_networkmorphism
(
self
):
pass
...
...
src/sdk/pynni/tests/test_pruners.py
0 → 100644
View file @
4773c918
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import
os
import
torch
import
torch.nn
as
nn
import
torch.nn.functional
as
F
import
math
from
unittest
import
TestCase
,
main
from
nni.compression.torch
import
LevelPruner
,
SlimPruner
,
FPGMPruner
,
L1FilterPruner
,
\
L2FilterPruner
,
AGP_Pruner
,
ActivationMeanRankFilterPruner
,
ActivationAPoZRankFilterPruner
def
validate_sparsity
(
wrapper
,
sparsity
,
bias
=
False
):
masks
=
[
wrapper
.
weight_mask
]
if
bias
and
wrapper
.
bias_mask
is
not
None
:
masks
.
append
(
wrapper
.
bias_mask
)
for
m
in
masks
:
actual_sparsity
=
(
m
==
0
).
sum
().
item
()
/
m
.
numel
()
msg
=
'actual sparsity: {:.2f}, target sparsity: {:.2f}'
.
format
(
actual_sparsity
,
sparsity
)
assert
math
.
isclose
(
actual_sparsity
,
sparsity
,
abs_tol
=
0.1
),
msg
prune_config
=
{
'level'
:
{
'pruner_class'
:
LevelPruner
,
'config_list'
:
[{
'sparsity'
:
0.5
,
'op_types'
:
[
'default'
],
}],
'validators'
:
[
lambda
model
:
validate_sparsity
(
model
.
conv1
,
0.5
,
False
),
lambda
model
:
validate_sparsity
(
model
.
fc
,
0.5
,
False
)
]
},
'agp'
:
{
'pruner_class'
:
AGP_Pruner
,
'config_list'
:
[{
'initial_sparsity'
:
0
,
'final_sparsity'
:
0.8
,
'start_epoch'
:
0
,
'end_epoch'
:
10
,
'frequency'
:
1
,
'op_types'
:
[
'default'
]
}],
'validators'
:
[]
},
'slim'
:
{
'pruner_class'
:
SlimPruner
,
'config_list'
:
[{
'sparsity'
:
0.7
,
'op_types'
:
[
'BatchNorm2d'
]
}],
'validators'
:
[
lambda
model
:
validate_sparsity
(
model
.
bn1
,
0.7
,
model
.
bias
)
]
},
'fpgm'
:
{
'pruner_class'
:
FPGMPruner
,
'config_list'
:[{
'sparsity'
:
0.5
,
'op_types'
:
[
'Conv2d'
]
}],
'validators'
:
[
lambda
model
:
validate_sparsity
(
model
.
conv1
,
0.5
,
model
.
bias
)
]
},
'l1'
:
{
'pruner_class'
:
L1FilterPruner
,
'config_list'
:
[{
'sparsity'
:
0.5
,
'op_types'
:
[
'Conv2d'
],
}],
'validators'
:
[
lambda
model
:
validate_sparsity
(
model
.
conv1
,
0.5
,
model
.
bias
)
]
},
'l2'
:
{
'pruner_class'
:
L2FilterPruner
,
'config_list'
:
[{
'sparsity'
:
0.5
,
'op_types'
:
[
'Conv2d'
],
}],
'validators'
:
[
lambda
model
:
validate_sparsity
(
model
.
conv1
,
0.5
,
model
.
bias
)
]
},
'mean_activation'
:
{
'pruner_class'
:
ActivationMeanRankFilterPruner
,
'config_list'
:
[{
'sparsity'
:
0.5
,
'op_types'
:
[
'Conv2d'
],
}],
'validators'
:
[
lambda
model
:
validate_sparsity
(
model
.
conv1
,
0.5
,
model
.
bias
)
]
},
'apoz'
:
{
'pruner_class'
:
ActivationAPoZRankFilterPruner
,
'config_list'
:
[{
'sparsity'
:
0.5
,
'op_types'
:
[
'Conv2d'
],
}],
'validators'
:
[
lambda
model
:
validate_sparsity
(
model
.
conv1
,
0.5
,
model
.
bias
)
]
}
}
class
Model
(
nn
.
Module
):
def
__init__
(
self
,
bias
=
True
):
super
(
Model
,
self
).
__init__
()
self
.
conv1
=
nn
.
Conv2d
(
1
,
8
,
kernel_size
=
3
,
padding
=
1
,
bias
=
bias
)
self
.
bn1
=
nn
.
BatchNorm2d
(
8
)
self
.
pool
=
nn
.
AdaptiveAvgPool2d
(
1
)
self
.
fc
=
nn
.
Linear
(
8
,
2
,
bias
=
bias
)
self
.
bias
=
bias
def
forward
(
self
,
x
):
return
self
.
fc
(
self
.
pool
(
self
.
bn1
(
self
.
conv1
(
x
))).
view
(
x
.
size
(
0
),
-
1
))
def
pruners_test
(
pruner_names
=
[
'level'
,
'agp'
,
'slim'
,
'fpgm'
,
'l1'
,
'l2'
,
'mean_activation'
,
'apoz'
],
bias
=
True
):
for
pruner_name
in
pruner_names
:
print
(
'testing {}...'
.
format
(
pruner_name
))
model
=
Model
(
bias
=
bias
)
optimizer
=
torch
.
optim
.
SGD
(
model
.
parameters
(),
lr
=
0.01
)
config_list
=
prune_config
[
pruner_name
][
'config_list'
]
x
=
torch
.
randn
(
2
,
1
,
28
,
28
)
y
=
torch
.
tensor
([
0
,
1
]).
long
()
out
=
model
(
x
)
loss
=
F
.
cross_entropy
(
out
,
y
)
optimizer
.
zero_grad
()
loss
.
backward
()
optimizer
.
step
()
pruner
=
prune_config
[
pruner_name
][
'pruner_class'
](
model
,
config_list
,
optimizer
)
pruner
.
compress
()
x
=
torch
.
randn
(
2
,
1
,
28
,
28
)
y
=
torch
.
tensor
([
0
,
1
]).
long
()
out
=
model
(
x
)
loss
=
F
.
cross_entropy
(
out
,
y
)
optimizer
.
zero_grad
()
loss
.
backward
()
optimizer
.
step
()
pruner
.
export_model
(
'./model_tmp.pth'
,
'./mask_tmp.pth'
,
'./onnx_tmp.pth'
,
input_shape
=
(
2
,
1
,
28
,
28
))
for
v
in
prune_config
[
pruner_name
][
'validators'
]:
v
(
model
)
os
.
remove
(
'./model_tmp.pth'
)
os
.
remove
(
'./mask_tmp.pth'
)
os
.
remove
(
'./onnx_tmp.pth'
)
class
PrunerTestCase
(
TestCase
):
def
test_pruners
(
self
):
pruners_test
(
bias
=
True
)
def
test_pruners_no_bias
(
self
):
pruners_test
(
bias
=
False
)
if
__name__
==
'__main__'
:
main
()
src/webui/src/components/trial-detail/Intermediate.tsx
View file @
4773c918
...
...
@@ -282,7 +282,7 @@ class Intermediate extends React.Component<IntermediateProps, IntermediateState>
notMerge
=
{
true
}
// update now
onEvents
=
{
IntermediateEvents
}
/>
<
div
className
=
"
y
Axis"
>
# Intermediate result
</
div
>
<
div
className
=
"
x
Axis"
>
# Intermediate result
</
div
>
</
div
>
</
div
>
);
...
...
src/webui/src/components/trial-detail/TableList.tsx
View file @
4773c918
...
...
@@ -245,8 +245,8 @@ class TableList extends React.Component<TableListProps, TableListState> {
if
(
res
.
data
.
length
!==
0
)
{
// just add type=number keys
const
intermediateMetrics
=
parseMetrics
(
res
.
data
[
0
].
data
);
for
(
const
key
in
intermediateMetrics
){
if
(
typeof
intermediateMetrics
[
key
]
===
'
number
'
)
{
for
(
const
key
in
intermediateMetrics
)
{
if
(
typeof
intermediateMetrics
[
key
]
===
'
number
'
)
{
otherkeys
.
push
(
key
);
}
}
...
...
@@ -622,6 +622,7 @@ class TableList extends React.Component<TableListProps, TableListState> {
:
null
}
<
div
className
=
"intermediate-graph"
>
<
ReactEcharts
option
=
{
intermediateOption
}
style
=
{
{
...
...
@@ -631,6 +632,8 @@ class TableList extends React.Component<TableListProps, TableListState> {
}
}
theme
=
"my_theme"
/>
<
div
className
=
"xAxis"
>
#Intermediate result
</
div
>
</
div
>
</
Modal
>
{
/* Add Column Modal */
}
{
...
...
src/webui/src/static/function.ts
View file @
4773c918
...
...
@@ -110,7 +110,7 @@ const intermediateGraphOption = (intermediateArr: number[], id: string): any =>
trigger
:
'
item
'
},
xAxis
:
{
name
:
'
Trial
'
,
//
name: '
#Intermediate result
',
data
:
sequence
},
yAxis
:
{
...
...
src/webui/src/static/style/trialsDetail.scss
View file @
4773c918
...
...
@@ -108,7 +108,7 @@ $bg: #b3b3b3;
/* for yAxis # intermediate position in intermediate graph*/
.intermediate-graph
{
position
:
relative
;
.
y
Axis
{
.
x
Axis
{
color
:
#333
;
position
:
absolute
;
left
:
50%
;
...
...
test/pipelines-it-frameworkcontroller.yml
View file @
4773c918
...
...
@@ -3,7 +3,7 @@
jobs
:
-
job
:
'
integration_test_frameworkController'
timeoutInMinutes
:
0
timeoutInMinutes
:
12
0
steps
:
-
script
:
python3 -m pip install --upgrade pip setuptools --user
...
...
test/pipelines-it-kubeflow.yml
View file @
4773c918
...
...
@@ -3,7 +3,7 @@
jobs
:
-
job
:
'
integration_test_kubeflow'
timeoutInMinutes
:
0
timeoutInMinutes
:
12
0
steps
:
-
script
:
python3 -m pip install --upgrade pip setuptools --user
...
...
test/pipelines-it-local-windows.yml
View file @
4773c918
jobs
:
-
job
:
'
integration_test_local_windows'
timeoutInMinutes
:
0
timeoutInMinutes
:
12
0
steps
:
-
script
:
|
...
...
test/pipelines-it-local.yml
View file @
4773c918
jobs
:
-
job
:
'
integration_test_local_ubuntu'
timeoutInMinutes
:
0
timeoutInMinutes
:
12
0
steps
:
-
script
:
python3 -m pip install --upgrade pip setuptools --user
...
...
test/pipelines-it-pai-windows.yml
View file @
4773c918
jobs
:
-
job
:
'
build_docker_image'
timeoutInMinutes
:
0
timeoutInMinutes
:
12
0
pool
:
vmImage
:
'
Ubuntu
16.04'
steps
:
...
...
test/pipelines-it-pai.yml
View file @
4773c918
...
...
@@ -3,7 +3,7 @@
jobs
:
-
job
:
'
integration_test_pai'
timeoutInMinutes
:
0
timeoutInMinutes
:
12
0
steps
:
-
script
:
python3 -m pip install --upgrade pip setuptools --user
...
...
test/pipelines-it-remote-windows.yml
View file @
4773c918
jobs
:
-
job
:
'
integration_test_remote_windows'
timeoutInMinutes
:
0
timeoutInMinutes
:
12
0
steps
:
-
script
:
python -m pip install --upgrade pip setuptools
...
...
@@ -11,6 +11,7 @@ jobs:
targetFolder
:
/tmp/nnitest/$(Build.BuildId)/nni-remote
overwrite
:
true
displayName
:
'
Copy
all
files
to
remote
machine'
timeoutInMinutes
:
10
-
script
:
|
powershell.exe -file install.ps1
displayName
:
'
Install
nni
toolkit
via
source
code'
...
...
test/pipelines-it-remote.yml
View file @
4773c918
jobs
:
-
job
:
'
integration_test_remote'
timeoutInMinutes
:
0
timeoutInMinutes
:
12
0
steps
:
-
script
:
python3 -m pip install --upgrade pip setuptools --user
...
...
@@ -26,6 +26,7 @@ jobs:
targetFolder
:
/tmp/nnitest/$(Build.BuildId)/dist
overwrite
:
true
displayName
:
'
Copy
dist
files
to
remote
machine'
timeoutInMinutes
:
10
-
task
:
CopyFilesOverSSH@0
inputs
:
sshEndpoint
:
$(end_point)
...
...
@@ -33,6 +34,7 @@ jobs:
targetFolder
:
/tmp/nnitest/$(Build.BuildId)/test
overwrite
:
true
displayName
:
'
Copy
test
files
to
remote
machine'
timeoutInMinutes
:
10
-
task
:
SSH@0
inputs
:
sshEndpoint
:
$(end_point)
...
...
tools/nni_trial_tool/log_utils.py
View file @
4773c918
...
...
@@ -36,7 +36,7 @@ class StdOutputType(Enum):
def
nni_log
(
log_type
,
log_message
):
'''Log message into stdout'''
dt
=
datetime
.
now
()
print
(
'[{0}] {1} {2}'
.
format
(
dt
,
log_type
.
value
,
log_message
))
print
(
'[{0}] {1} {2}'
.
format
(
dt
,
log_type
.
value
,
log_message
)
,
flush
=
True
)
class
NNIRestLogHanlder
(
StreamHandler
):
def
__init__
(
self
,
host
,
port
,
tag
,
std_output_type
=
StdOutputType
.
Stdout
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment