Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
nni
Commits
67287997
Unverified
Commit
67287997
authored
Apr 16, 2020
by
SparkSnail
Committed by
GitHub
Apr 16, 2020
Browse files
Merge pull request #241 from microsoft/master
merge master
parents
b4773e1e
f8d42a33
Changes
74
Hide whitespace changes
Inline
Side-by-side
Showing
20 changed files
with
852 additions
and
1974 deletions
+852
-1974
examples/trials/nas_cifar10/config_paiYarn_ppo.yml
examples/trials/nas_cifar10/config_paiYarn_ppo.yml
+0
-31
examples/trials/nas_cifar10/config_pai_ppo.yml
examples/trials/nas_cifar10/config_pai_ppo.yml
+0
-34
examples/trials/nas_cifar10/config_ppo.yml
examples/trials/nas_cifar10/config_ppo.yml
+0
-24
examples/trials/nas_cifar10/data/download.sh
examples/trials/nas_cifar10/data/download.sh
+0
-2
examples/trials/nas_cifar10/macro_cifar10.sh
examples/trials/nas_cifar10/macro_cifar10.sh
+0
-31
examples/trials/nas_cifar10/macro_cifar10_pai.sh
examples/trials/nas_cifar10/macro_cifar10_pai.sh
+0
-31
examples/trials/nas_cifar10/src/__init__.py
examples/trials/nas_cifar10/src/__init__.py
+0
-0
examples/trials/nas_cifar10/src/cifar10/__init__.py
examples/trials/nas_cifar10/src/cifar10/__init__.py
+0
-0
examples/trials/nas_cifar10/src/cifar10/data_utils.py
examples/trials/nas_cifar10/src/cifar10/data_utils.py
+0
-74
examples/trials/nas_cifar10/src/cifar10/general_child.py
examples/trials/nas_cifar10/src/cifar10/general_child.py
+0
-423
examples/trials/nas_cifar10/src/cifar10/models.py
examples/trials/nas_cifar10/src/cifar10/models.py
+0
-196
examples/trials/nas_cifar10/src/cifar10/nni_child_cifar10.py
examples/trials/nas_cifar10/src/cifar10/nni_child_cifar10.py
+0
-162
examples/trials/nas_cifar10/src/cifar10_flags.py
examples/trials/nas_cifar10/src/cifar10_flags.py
+0
-45
examples/trials/nas_cifar10/src/common_ops.py
examples/trials/nas_cifar10/src/common_ops.py
+0
-255
examples/trials/nas_cifar10/src/utils.py
examples/trials/nas_cifar10/src/utils.py
+0
-262
examples/tuners/enas_nni/README.md
examples/tuners/enas_nni/README.md
+0
-6
examples/tuners/enas_nni/README_zh_CN.md
examples/tuners/enas_nni/README_zh_CN.md
+0
-5
src/nni_manager/package.json
src/nni_manager/package.json
+2
-3
src/nni_manager/training_service/common/util.ts
src/nni_manager/training_service/common/util.ts
+1
-1
src/nni_manager/yarn.lock
src/nni_manager/yarn.lock
+849
-389
No files found.
examples/trials/nas_cifar10/config_paiYarn_ppo.yml
deleted
100644 → 0
View file @
b4773e1e
authorName
:
Unknown
experimentName
:
enas_macro
trialConcurrency
:
20
maxExecDuration
:
2400h
maxTrialNum
:
20000
#choice: local, remote
trainingServicePlatform
:
paiYarn
#choice: true, false
useAnnotation
:
true
multiPhase
:
false
versionCheck
:
false
nniManagerIp
:
0.0.0.0
tuner
:
builtinTunerName
:
PPOTuner
classArgs
:
optimize_mode
:
maximize
trials_per_update
:
60
epochs_per_update
:
20
minibatch_size
:
6
trial
:
command
:
sh ./macro_cifar10_pai.sh
codeDir
:
./
gpuNum
:
1
cpuNum
:
1
memoryMB
:
8196
image
:
msranni/nni:latest
virtualCluster
:
nni
paiYarnConfig
:
userName
:
your_account
passWord
:
your_passwd
host
:
0.0.0.0
examples/trials/nas_cifar10/config_pai_ppo.yml
deleted
100644 → 0
View file @
b4773e1e
authorName
:
Unknown
experimentName
:
enas_macro
trialConcurrency
:
20
maxExecDuration
:
2400h
maxTrialNum
:
20000
#choice: local, remote
trainingServicePlatform
:
pai
#choice: true, false
useAnnotation
:
true
multiPhase
:
false
versionCheck
:
false
nniManagerIp
:
0.0.0.0
tuner
:
builtinTunerName
:
PPOTuner
classArgs
:
optimize_mode
:
maximize
trials_per_update
:
60
epochs_per_update
:
20
minibatch_size
:
6
trial
:
command
:
sh ./macro_cifar10_pai.sh
codeDir
:
./
gpuNum
:
1
cpuNum
:
1
memoryMB
:
8196
image
:
msranni/nni:latest
virtualCluster
:
nni
nniManagerNFSMountPath
:
/home/user/mnt
containerNFSMountPath
:
/mnt/data/user
paiStoragePlugin
:
team_wise
paiConfig
:
userName
:
your_account
token
:
your_token
host
:
0.0.0.0
examples/trials/nas_cifar10/config_ppo.yml
deleted
100644 → 0
View file @
b4773e1e
authorName
:
Unknown
experimentName
:
enas_macro
trialConcurrency
:
4
maxExecDuration
:
2400h
maxTrialNum
:
20000
#choice: local, remote
trainingServicePlatform
:
local
#choice: true, false
useAnnotation
:
true
multiPhase
:
false
tuner
:
builtinTunerName
:
PPOTuner
classArgs
:
optimize_mode
:
maximize
trials_per_update
:
60
epochs_per_update
:
12
minibatch_size
:
10
#could use the No. 0 gpu for this tuner
#if want to specify multiple gpus, here is an example of specifying three gpus: 0,1,2
gpuIndices
:
0
trial
:
command
:
sh ./macro_cifar10.sh
codeDir
:
./
gpuNum
:
1
examples/trials/nas_cifar10/data/download.sh
deleted
100755 → 0
View file @
b4773e1e
wget https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz
tar
xzf cifar-10-python.tar.gz
&&
mv
cifar-10-batches-py cifar10
\ No newline at end of file
examples/trials/nas_cifar10/macro_cifar10.sh
deleted
100644 → 0
View file @
b4773e1e
#!/bin/bash
set
-e
export
PYTHONPATH
=
"
$(
pwd
)
"
python3 src/cifar10/nni_child_cifar10.py
\
--data_format
=
"NCHW"
\
--search_for
=
"macro"
\
--reset_output_dir
\
--data_path
=
"data/cifar10"
\
--output_dir
=
"outputs"
\
--train_data_size
=
45000
\
--batch_size
=
100
\
--num_epochs
=
8
\
--log_every
=
50
\
--eval_every_epochs
=
1
\
--child_use_aux_heads
\
--child_num_layers
=
12
\
--child_out_filters
=
36
\
--child_l2_reg
=
0.0002
\
--child_num_branches
=
6
\
--child_num_cell_layers
=
5
\
--child_keep_prob
=
0.50
\
--child_drop_path_keep_prob
=
0.60
\
--child_lr_cosine
\
--child_lr_max
=
0.05
\
--child_lr_min
=
0.001
\
--child_lr_T_0
=
10
\
--child_lr_T_mul
=
2
\
--child_mode
=
"subgraph"
\
"
$@
"
examples/trials/nas_cifar10/macro_cifar10_pai.sh
deleted
100644 → 0
View file @
b4773e1e
#!/bin/bash
set
-e
export
PYTHONPATH
=
"
$(
pwd
)
"
python3 src/cifar10/nni_child_cifar10.py
\
--data_format
=
"NCHW"
\
--search_for
=
"macro"
\
--reset_output_dir
\
--data_path
=
"data/cifar10"
\
--output_dir
=
"outputs"
\
--train_data_size
=
45000
\
--batch_size
=
100
\
--num_epochs
=
30
\
--log_every
=
50
\
--eval_every_epochs
=
1
\
--child_use_aux_heads
\
--child_num_layers
=
12
\
--child_out_filters
=
36
\
--child_l2_reg
=
0.0002
\
--child_num_branches
=
6
\
--child_num_cell_layers
=
5
\
--child_keep_prob
=
0.50
\
--child_drop_path_keep_prob
=
0.60
\
--child_lr_cosine
\
--child_lr_max
=
0.05
\
--child_lr_min
=
0.001
\
--child_lr_T_0
=
10
\
--child_lr_T_mul
=
2
\
--child_mode
=
"subgraph"
\
"
$@
"
examples/trials/nas_cifar10/src/__init__.py
deleted
100644 → 0
View file @
b4773e1e
examples/trials/nas_cifar10/src/cifar10/__init__.py
deleted
100644 → 0
View file @
b4773e1e
examples/trials/nas_cifar10/src/cifar10/data_utils.py
deleted
100644 → 0
View file @
b4773e1e
import
os
import
sys
import
pickle
import
numpy
as
np
import
tensorflow
as
tf
def
_read_data
(
data_path
,
train_files
):
"""Reads CIFAR-10 format data. Always returns NHWC format.
Returns:
images: np tensor of size [N, H, W, C]
labels: np tensor of size [N]
"""
images
,
labels
=
[],
[]
for
file_name
in
train_files
:
print
(
file_name
)
full_name
=
os
.
path
.
join
(
data_path
,
file_name
)
with
open
(
full_name
,
"rb"
)
as
finp
:
data
=
pickle
.
load
(
finp
,
encoding
=
'latin1'
)
batch_images
=
data
[
"data"
].
astype
(
np
.
float32
)
/
255.0
batch_labels
=
np
.
array
(
data
[
"labels"
],
dtype
=
np
.
int32
)
images
.
append
(
batch_images
)
labels
.
append
(
batch_labels
)
images
=
np
.
concatenate
(
images
,
axis
=
0
)
labels
=
np
.
concatenate
(
labels
,
axis
=
0
)
images
=
np
.
reshape
(
images
,
[
-
1
,
3
,
32
,
32
])
images
=
np
.
transpose
(
images
,
[
0
,
2
,
3
,
1
])
return
images
,
labels
def
read_data
(
data_path
,
num_valids
=
5000
):
print
(
"-"
*
80
)
print
(
"Reading data"
)
images
,
labels
=
{},
{}
train_files
=
[
"data_batch_1"
,
"data_batch_2"
,
"data_batch_3"
,
"data_batch_4"
,
"data_batch_5"
,
]
test_file
=
[
"test_batch"
,
]
images
[
"train"
],
labels
[
"train"
]
=
_read_data
(
data_path
,
train_files
)
if
num_valids
:
images
[
"valid"
]
=
images
[
"train"
][
-
num_valids
:]
labels
[
"valid"
]
=
labels
[
"train"
][
-
num_valids
:]
images
[
"train"
]
=
images
[
"train"
][:
-
num_valids
]
labels
[
"train"
]
=
labels
[
"train"
][:
-
num_valids
]
else
:
images
[
"valid"
],
labels
[
"valid"
]
=
None
,
None
images
[
"test"
],
labels
[
"test"
]
=
_read_data
(
data_path
,
test_file
)
print
(
"Prepropcess: [subtract mean], [divide std]"
)
mean
=
np
.
mean
(
images
[
"train"
],
axis
=
(
0
,
1
,
2
),
keepdims
=
True
)
std
=
np
.
std
(
images
[
"train"
],
axis
=
(
0
,
1
,
2
),
keepdims
=
True
)
print
(
"mean: {}"
.
format
(
np
.
reshape
(
mean
*
255.0
,
[
-
1
])))
print
(
"std: {}"
.
format
(
np
.
reshape
(
std
*
255.0
,
[
-
1
])))
images
[
"train"
]
=
(
images
[
"train"
]
-
mean
)
/
std
if
num_valids
:
images
[
"valid"
]
=
(
images
[
"valid"
]
-
mean
)
/
std
images
[
"test"
]
=
(
images
[
"test"
]
-
mean
)
/
std
return
images
,
labels
examples/trials/nas_cifar10/src/cifar10/general_child.py
deleted
100644 → 0
View file @
b4773e1e
from
__future__
import
absolute_import
from
__future__
import
division
from
__future__
import
print_function
import
numpy
as
np
import
tensorflow
as
tf
from
src.common_ops
import
create_weight
,
batch_norm
,
batch_norm_with_mask
,
global_avg_pool
,
conv_op
,
pool_op
from
src.utils
import
count_model_params
,
get_train_ops
,
get_C
,
get_strides
from
src.cifar10.models
import
Model
class
GeneralChild
(
Model
):
def
__init__
(
self
,
images
,
labels
,
cutout_size
=
None
,
fixed_arc
=
None
,
out_filters_scale
=
1
,
num_layers
=
2
,
num_branches
=
6
,
out_filters
=
24
,
keep_prob
=
1.0
,
batch_size
=
32
,
clip_mode
=
None
,
grad_bound
=
None
,
l2_reg
=
1e-4
,
lr_init
=
0.1
,
lr_dec_start
=
0
,
lr_dec_every
=
10000
,
lr_dec_rate
=
0.1
,
lr_cosine
=
False
,
lr_max
=
None
,
lr_min
=
None
,
lr_T_0
=
None
,
lr_T_mul
=
None
,
optim_algo
=
None
,
sync_replicas
=
False
,
num_aggregate
=
None
,
num_replicas
=
None
,
data_format
=
"NHWC"
,
name
=
"child"
,
mode
=
"subgraph"
,
*
args
,
**
kwargs
):
super
(
self
.
__class__
,
self
).
__init__
(
images
,
labels
,
cutout_size
=
cutout_size
,
batch_size
=
batch_size
,
clip_mode
=
clip_mode
,
grad_bound
=
grad_bound
,
l2_reg
=
l2_reg
,
lr_init
=
lr_init
,
lr_dec_start
=
lr_dec_start
,
lr_dec_every
=
lr_dec_every
,
lr_dec_rate
=
lr_dec_rate
,
keep_prob
=
keep_prob
,
optim_algo
=
optim_algo
,
sync_replicas
=
sync_replicas
,
num_aggregate
=
num_aggregate
,
num_replicas
=
num_replicas
,
data_format
=
data_format
,
name
=
name
)
self
.
lr_cosine
=
lr_cosine
self
.
lr_max
=
lr_max
self
.
lr_min
=
lr_min
self
.
lr_T_0
=
lr_T_0
self
.
lr_T_mul
=
lr_T_mul
self
.
out_filters
=
out_filters
*
out_filters_scale
self
.
num_layers
=
num_layers
self
.
mode
=
mode
self
.
num_branches
=
num_branches
self
.
fixed_arc
=
fixed_arc
self
.
out_filters_scale
=
out_filters_scale
pool_distance
=
self
.
num_layers
//
3
self
.
pool_layers
=
[
pool_distance
-
1
,
2
*
pool_distance
-
1
]
def
_factorized_reduction
(
self
,
x
,
out_filters
,
stride
,
is_training
):
"""Reduces the shape of x without information loss due to striding."""
assert
out_filters
%
2
==
0
,
(
"Need even number of filters when using this factorized reduction."
)
if
stride
==
1
:
with
tf
.
variable_scope
(
"path_conv"
):
inp_c
=
get_C
(
x
,
self
.
data_format
)
w
=
create_weight
(
"w"
,
[
1
,
1
,
inp_c
,
out_filters
])
x
=
tf
.
nn
.
conv2d
(
x
,
w
,
[
1
,
1
,
1
,
1
],
"SAME"
,
data_format
=
self
.
data_format
)
x
=
batch_norm
(
x
,
is_training
,
data_format
=
self
.
data_format
)
return
x
stride_spec
=
get_strides
(
stride
,
self
.
data_format
)
# Skip path 1
path1
=
tf
.
nn
.
avg_pool
(
x
,
[
1
,
1
,
1
,
1
],
stride_spec
,
"VALID"
,
data_format
=
self
.
data_format
)
with
tf
.
variable_scope
(
"path1_conv"
):
inp_c
=
get_C
(
path1
,
self
.
data_format
)
w
=
create_weight
(
"w"
,
[
1
,
1
,
inp_c
,
out_filters
//
2
])
path1
=
tf
.
nn
.
conv2d
(
path1
,
w
,
[
1
,
1
,
1
,
1
],
"SAME"
,
data_format
=
self
.
data_format
)
# Skip path 2
# First pad with 0"s on the right and bottom, then shift the filter to
# include those 0"s that were added.
if
self
.
data_format
==
"NHWC"
:
pad_arr
=
[[
0
,
0
],
[
0
,
1
],
[
0
,
1
],
[
0
,
0
]]
path2
=
tf
.
pad
(
x
,
pad_arr
)[:,
1
:,
1
:,
:]
concat_axis
=
3
else
:
pad_arr
=
[[
0
,
0
],
[
0
,
0
],
[
0
,
1
],
[
0
,
1
]]
path2
=
tf
.
pad
(
x
,
pad_arr
)[:,
:,
1
:,
1
:]
concat_axis
=
1
path2
=
tf
.
nn
.
avg_pool
(
path2
,
[
1
,
1
,
1
,
1
],
stride_spec
,
"VALID"
,
data_format
=
self
.
data_format
)
with
tf
.
variable_scope
(
"path2_conv"
):
inp_c
=
get_C
(
path2
,
self
.
data_format
)
w
=
create_weight
(
"w"
,
[
1
,
1
,
inp_c
,
out_filters
//
2
])
path2
=
tf
.
nn
.
conv2d
(
path2
,
w
,
[
1
,
1
,
1
,
1
],
"SAME"
,
data_format
=
self
.
data_format
)
# Concat and apply BN
final_path
=
tf
.
concat
(
values
=
[
path1
,
path2
],
axis
=
concat_axis
)
final_path
=
batch_norm
(
final_path
,
is_training
,
data_format
=
self
.
data_format
)
return
final_path
def
_model
(
self
,
images
,
is_training
,
reuse
=
False
):
'''Build model'''
with
tf
.
variable_scope
(
self
.
name
,
reuse
=
reuse
):
layers
=
[]
out_filters
=
self
.
out_filters
with
tf
.
variable_scope
(
"stem_conv"
):
w
=
create_weight
(
"w"
,
[
3
,
3
,
3
,
out_filters
])
x
=
tf
.
nn
.
conv2d
(
images
,
w
,
[
1
,
1
,
1
,
1
],
"SAME"
,
data_format
=
self
.
data_format
)
x
=
batch_norm
(
x
,
is_training
,
data_format
=
self
.
data_format
)
layers
.
append
(
x
)
def
add_fixed_pooling_layer
(
layer_id
,
layers
,
out_filters
,
is_training
):
'''Add a fixed pooling layer every four layers'''
out_filters
*=
2
with
tf
.
variable_scope
(
"pool_at_{0}"
.
format
(
layer_id
)):
pooled_layers
=
[]
for
i
,
layer
in
enumerate
(
layers
):
with
tf
.
variable_scope
(
"from_{0}"
.
format
(
i
)):
x
=
self
.
_factorized_reduction
(
layer
,
out_filters
,
2
,
is_training
)
pooled_layers
.
append
(
x
)
return
pooled_layers
,
out_filters
def
post_process_out
(
out
,
optional_inputs
):
'''Form skip connection and perform batch norm'''
with
tf
.
variable_scope
(
"skip"
):
inputs
=
layers
[
-
1
]
if
self
.
data_format
==
"NHWC"
:
inp_h
=
inputs
.
get_shape
()[
1
].
value
inp_w
=
inputs
.
get_shape
()[
2
].
value
inp_c
=
inputs
.
get_shape
()[
3
].
value
out
.
set_shape
([
None
,
inp_h
,
inp_w
,
out_filters
])
elif
self
.
data_format
==
"NCHW"
:
inp_c
=
inputs
.
get_shape
()[
1
].
value
inp_h
=
inputs
.
get_shape
()[
2
].
value
inp_w
=
inputs
.
get_shape
()[
3
].
value
out
.
set_shape
([
None
,
out_filters
,
inp_h
,
inp_w
])
optional_inputs
.
append
(
out
)
pout
=
tf
.
add_n
(
optional_inputs
)
out
=
batch_norm
(
pout
,
is_training
,
data_format
=
self
.
data_format
)
layers
.
append
(
out
)
return
out
global
layer_id
layer_id
=
-
1
def
get_layer_id
():
global
layer_id
layer_id
+=
1
return
'layer_'
+
str
(
layer_id
)
def
conv3
(
inputs
):
# res_layers is pre_layers that are chosen to form skip connection
# layers[-1] is always the latest input
with
tf
.
variable_scope
(
get_layer_id
()):
with
tf
.
variable_scope
(
'branch_0'
):
out
=
conv_op
(
inputs
[
0
][
0
],
3
,
is_training
,
out_filters
,
out_filters
,
self
.
data_format
,
start_idx
=
None
)
out
=
post_process_out
(
out
,
inputs
[
1
])
return
out
def
conv3_sep
(
inputs
):
with
tf
.
variable_scope
(
get_layer_id
()):
with
tf
.
variable_scope
(
'branch_1'
):
out
=
conv_op
(
inputs
[
0
][
0
],
3
,
is_training
,
out_filters
,
out_filters
,
self
.
data_format
,
start_idx
=
None
,
separable
=
True
)
out
=
post_process_out
(
out
,
inputs
[
1
])
return
out
def
conv5
(
inputs
):
with
tf
.
variable_scope
(
get_layer_id
()):
with
tf
.
variable_scope
(
'branch_2'
):
out
=
conv_op
(
inputs
[
0
][
0
],
5
,
is_training
,
out_filters
,
out_filters
,
self
.
data_format
,
start_idx
=
None
)
out
=
post_process_out
(
out
,
inputs
[
1
])
return
out
def
conv5_sep
(
inputs
):
with
tf
.
variable_scope
(
get_layer_id
()):
with
tf
.
variable_scope
(
'branch_3'
):
out
=
conv_op
(
inputs
[
0
][
0
],
5
,
is_training
,
out_filters
,
out_filters
,
self
.
data_format
,
start_idx
=
None
,
separable
=
True
)
out
=
post_process_out
(
out
,
inputs
[
1
])
return
out
def
avg_pool
(
inputs
):
with
tf
.
variable_scope
(
get_layer_id
()):
with
tf
.
variable_scope
(
'branch_4'
):
out
=
pool_op
(
inputs
[
0
][
0
],
is_training
,
out_filters
,
out_filters
,
"avg"
,
self
.
data_format
,
start_idx
=
None
)
out
=
post_process_out
(
out
,
inputs
[
1
])
return
out
def
max_pool
(
inputs
):
with
tf
.
variable_scope
(
get_layer_id
()):
with
tf
.
variable_scope
(
'branch_5'
):
out
=
pool_op
(
inputs
[
0
][
0
],
is_training
,
out_filters
,
out_filters
,
"max"
,
self
.
data_format
,
start_idx
=
None
)
out
=
post_process_out
(
out
,
inputs
[
1
])
return
out
"""@nni.mutable_layers(
{
layer_choice: [conv3(), conv3_sep(), conv5(), conv5_sep(), avg_pool(), max_pool()],
fixed_inputs:[x],
layer_output: layer_0_out
},
{
layer_choice: [conv3(), conv3_sep(), conv5(), conv5_sep(), avg_pool(), max_pool()],
fixed_inputs:[layer_0_out],
optional_inputs: [layer_0_out],
optional_input_size: [0, 1],
layer_output: layer_1_out
},
{
layer_choice: [conv3(), conv3_sep(), conv5(), conv5_sep(), avg_pool(), max_pool()],
fixed_inputs:[layer_1_out],
optional_inputs: [layer_0_out, layer_1_out],
optional_input_size: [0, 1],
layer_output: layer_2_out
},
{
layer_choice: [conv3(), conv3_sep(), conv5(), conv5_sep(), avg_pool(), max_pool()],
fixed_inputs:[layer_2_out],
optional_inputs: [layer_0_out, layer_1_out, layer_2_out],
optional_input_size: [0, 1],
layer_output: layer_3_out
}
)"""
layers
,
out_filters
=
add_fixed_pooling_layer
(
3
,
layers
,
out_filters
,
is_training
)
layer_0_out
,
layer_1_out
,
layer_2_out
,
layer_3_out
=
layers
[
-
4
:]
"""@nni.mutable_layers(
{
layer_choice: [conv3(), conv3_sep(), conv5(), conv5_sep(), avg_pool(), max_pool()],
fixed_inputs: [layer_3_out],
optional_inputs: [layer_0_out, layer_1_out, layer_2_out, layer_3_out],
optional_input_size: [0, 1],
layer_output: layer_4_out
},
{
layer_choice: [conv3(), conv3_sep(), conv5(), conv5_sep(), avg_pool(), max_pool()],
fixed_inputs: [layer_4_out],
optional_inputs: [layer_0_out, layer_1_out, layer_2_out, layer_3_out, layer_4_out],
optional_input_size: [0, 1],
layer_output: layer_5_out
},
{
layer_choice: [conv3(), conv3_sep(), conv5(), conv5_sep(), avg_pool(), max_pool()],
fixed_inputs: [layer_5_out],
optional_inputs: [layer_0_out, layer_1_out, layer_2_out, layer_3_out, layer_4_out, layer_5_out],
optional_input_size: [0, 1],
layer_output: layer_6_out
},
{
layer_choice: [conv3(), conv3_sep(), conv5(), conv5_sep(), avg_pool(), max_pool()],
fixed_inputs: [layer_6_out],
optional_inputs: [layer_0_out, layer_1_out, layer_2_out, layer_3_out, layer_4_out, layer_5_out, layer_6_out],
optional_input_size: [0, 1],
layer_output: layer_7_out
}
)"""
layers
,
out_filters
=
add_fixed_pooling_layer
(
7
,
layers
,
out_filters
,
is_training
)
layer_0_out
,
layer_1_out
,
layer_2_out
,
layer_3_out
,
layer_4_out
,
layer_5_out
,
layer_6_out
,
layer_7_out
=
layers
[
-
8
:]
"""@nni.mutable_layers(
{
layer_choice: [conv3(), conv3_sep(), conv5(), conv5_sep(), avg_pool(), max_pool()],
fixed_inputs: [layer_7_out],
optional_inputs: [layer_0_out, layer_1_out, layer_2_out, layer_3_out, layer_4_out, layer_5_out, layer_6_out, layer_7_out],
optional_input_size: [0, 1],
layer_output: layer_8_out
},
{
layer_choice: [conv3(), conv3_sep(), conv5(), conv5_sep(), avg_pool(), max_pool()],
fixed_inputs: [layer_8_out],
optional_inputs: [layer_0_out, layer_1_out, layer_2_out, layer_3_out, layer_4_out, layer_5_out, layer_6_out, layer_7_out, layer_8_out],
optional_input_size: [0, 1],
layer_output: layer_9_out
},
{
layer_choice: [conv3(), conv3_sep(), conv5(), conv5_sep(), avg_pool(), max_pool()],
fixed_inputs: [layer_9_out],
optional_inputs: [layer_0_out, layer_1_out, layer_2_out, layer_3_out, layer_4_out, layer_5_out, layer_6_out, layer_7_out, layer_8_out, layer_9_out],
optional_input_size: [0, 1],
layer_output: layer_10_out
},
{
layer_choice: [conv3(), conv3_sep(), conv5(), conv5_sep(), avg_pool(), max_pool()],
fixed_inputs:[layer_10_out],
optional_inputs: [layer_0_out, layer_1_out, layer_2_out, layer_3_out, layer_4_out, layer_5_out, layer_6_out, layer_7_out, layer_8_out, layer_9_out, layer_10_out],
optional_input_size: [0, 1],
layer_output: layer_11_out
}
)"""
x
=
global_avg_pool
(
layer_11_out
,
data_format
=
self
.
data_format
)
if
is_training
:
x
=
tf
.
nn
.
dropout
(
x
,
self
.
keep_prob
)
with
tf
.
variable_scope
(
"fc"
):
if
self
.
data_format
==
"NHWC"
:
inp_c
=
x
.
get_shape
()[
3
].
value
elif
self
.
data_format
==
"NCHW"
:
inp_c
=
x
.
get_shape
()[
1
].
value
else
:
raise
ValueError
(
"Unknown data_format {0}"
.
format
(
self
.
data_format
))
w
=
create_weight
(
"w"
,
[
inp_c
,
10
])
x
=
tf
.
matmul
(
x
,
w
)
return
x
# override
def
_build_train
(
self
):
print
(
"-"
*
80
)
print
(
"Build train graph"
)
logits
=
self
.
_model
(
self
.
x_train
,
is_training
=
True
)
log_probs
=
tf
.
nn
.
sparse_softmax_cross_entropy_with_logits
(
logits
=
logits
,
labels
=
self
.
y_train
)
self
.
loss
=
tf
.
reduce_mean
(
log_probs
)
self
.
train_preds
=
tf
.
argmax
(
logits
,
axis
=
1
)
self
.
train_preds
=
tf
.
to_int32
(
self
.
train_preds
)
self
.
train_acc
=
tf
.
equal
(
self
.
train_preds
,
self
.
y_train
)
self
.
train_acc
=
tf
.
to_int32
(
self
.
train_acc
)
self
.
train_acc
=
tf
.
reduce_sum
(
self
.
train_acc
)
tf_variables
=
[
var
for
var
in
tf
.
trainable_variables
()
if
var
.
name
.
startswith
(
self
.
name
)]
self
.
num_vars
=
count_model_params
(
tf_variables
)
print
(
"Model has {} params"
.
format
(
self
.
num_vars
))
self
.
global_step
=
tf
.
Variable
(
0
,
dtype
=
tf
.
int32
,
trainable
=
False
,
name
=
"global_step"
)
self
.
train_op
,
self
.
lr
,
self
.
grad_norm
,
self
.
optimizer
=
get_train_ops
(
self
.
loss
,
tf_variables
,
self
.
global_step
,
clip_mode
=
self
.
clip_mode
,
grad_bound
=
self
.
grad_bound
,
l2_reg
=
self
.
l2_reg
,
lr_init
=
self
.
lr_init
,
lr_dec_start
=
self
.
lr_dec_start
,
lr_dec_every
=
self
.
lr_dec_every
,
lr_dec_rate
=
self
.
lr_dec_rate
,
lr_cosine
=
self
.
lr_cosine
,
lr_max
=
self
.
lr_max
,
lr_min
=
self
.
lr_min
,
lr_T_0
=
self
.
lr_T_0
,
lr_T_mul
=
self
.
lr_T_mul
,
num_train_batches
=
self
.
num_train_batches
,
optim_algo
=
self
.
optim_algo
,
sync_replicas
=
False
,
num_aggregate
=
self
.
num_aggregate
,
num_replicas
=
self
.
num_replicas
)
# override
def
_build_valid
(
self
):
if
self
.
x_valid
is
not
None
:
print
(
"-"
*
80
)
print
(
"Build valid graph"
)
logits
=
self
.
_model
(
self
.
x_valid
,
False
,
reuse
=
True
)
self
.
valid_preds
=
tf
.
argmax
(
logits
,
axis
=
1
)
self
.
valid_preds
=
tf
.
to_int32
(
self
.
valid_preds
)
self
.
valid_acc
=
tf
.
equal
(
self
.
valid_preds
,
self
.
y_valid
)
self
.
valid_acc
=
tf
.
to_int32
(
self
.
valid_acc
)
self
.
valid_acc
=
tf
.
reduce_sum
(
self
.
valid_acc
)
# override
def
_build_test
(
self
):
print
(
"-"
*
80
)
print
(
"Build test graph"
)
logits
=
self
.
_model
(
self
.
x_test
,
False
,
reuse
=
True
)
self
.
test_preds
=
tf
.
argmax
(
logits
,
axis
=
1
)
self
.
test_preds
=
tf
.
to_int32
(
self
.
test_preds
)
self
.
test_acc
=
tf
.
equal
(
self
.
test_preds
,
self
.
y_test
)
self
.
test_acc
=
tf
.
to_int32
(
self
.
test_acc
)
self
.
test_acc
=
tf
.
reduce_sum
(
self
.
test_acc
)
def
build_model
(
self
):
self
.
_build_train
()
self
.
_build_valid
()
self
.
_build_test
()
examples/trials/nas_cifar10/src/cifar10/models.py
deleted
100644 → 0
View file @
b4773e1e
import
os
import
sys
import
numpy
as
np
import
tensorflow
as
tf
class
Model
(
object
):
def
__init__
(
self
,
images
,
labels
,
cutout_size
=
None
,
batch_size
=
32
,
eval_batch_size
=
100
,
clip_mode
=
None
,
grad_bound
=
None
,
l2_reg
=
1e-4
,
lr_init
=
0.1
,
lr_dec_start
=
0
,
lr_dec_every
=
100
,
lr_dec_rate
=
0.1
,
keep_prob
=
1.0
,
optim_algo
=
None
,
sync_replicas
=
False
,
num_aggregate
=
None
,
num_replicas
=
None
,
data_format
=
"NHWC"
,
name
=
"generic_model"
,
seed
=
None
,
):
"""
Args:
lr_dec_every: number of epochs to decay
"""
print
(
"-"
*
80
)
print
(
"Build model {}"
.
format
(
name
))
self
.
cutout_size
=
cutout_size
self
.
batch_size
=
batch_size
self
.
eval_batch_size
=
eval_batch_size
self
.
clip_mode
=
clip_mode
self
.
grad_bound
=
grad_bound
self
.
l2_reg
=
l2_reg
self
.
lr_init
=
lr_init
self
.
lr_dec_start
=
lr_dec_start
self
.
lr_dec_rate
=
lr_dec_rate
self
.
keep_prob
=
keep_prob
self
.
optim_algo
=
optim_algo
self
.
sync_replicas
=
sync_replicas
self
.
num_aggregate
=
num_aggregate
self
.
num_replicas
=
num_replicas
self
.
data_format
=
data_format
self
.
name
=
name
self
.
seed
=
seed
self
.
global_step
=
None
self
.
valid_acc
=
None
self
.
test_acc
=
None
print
(
"Build data ops"
)
with
tf
.
device
(
"/cpu:0"
):
# training data
self
.
num_train_examples
=
np
.
shape
(
images
[
"train"
])[
0
]
self
.
num_train_batches
=
(
self
.
num_train_examples
+
self
.
batch_size
-
1
)
//
self
.
batch_size
x_train
,
y_train
=
tf
.
train
.
shuffle_batch
(
[
images
[
"train"
],
labels
[
"train"
]],
batch_size
=
self
.
batch_size
,
capacity
=
50000
,
enqueue_many
=
True
,
min_after_dequeue
=
0
,
num_threads
=
16
,
seed
=
self
.
seed
,
allow_smaller_final_batch
=
True
,
)
self
.
lr_dec_every
=
lr_dec_every
*
self
.
num_train_batches
def
_pre_process
(
x
):
x
=
tf
.
pad
(
x
,
[[
4
,
4
],
[
4
,
4
],
[
0
,
0
]])
x
=
tf
.
random_crop
(
x
,
[
32
,
32
,
3
],
seed
=
self
.
seed
)
x
=
tf
.
image
.
random_flip_left_right
(
x
,
seed
=
self
.
seed
)
if
self
.
cutout_size
is
not
None
:
mask
=
tf
.
ones
(
[
self
.
cutout_size
,
self
.
cutout_size
],
dtype
=
tf
.
int32
)
start
=
tf
.
random_uniform
(
[
2
],
minval
=
0
,
maxval
=
32
,
dtype
=
tf
.
int32
)
mask
=
tf
.
pad
(
mask
,
[[
self
.
cutout_size
+
start
[
0
],
32
-
start
[
0
]],
[
self
.
cutout_size
+
start
[
1
],
32
-
start
[
1
]]])
mask
=
mask
[
self
.
cutout_size
:
self
.
cutout_size
+
32
,
self
.
cutout_size
:
self
.
cutout_size
+
32
]
mask
=
tf
.
reshape
(
mask
,
[
32
,
32
,
1
])
mask
=
tf
.
tile
(
mask
,
[
1
,
1
,
3
])
x
=
tf
.
where
(
tf
.
equal
(
mask
,
0
),
x
=
x
,
y
=
tf
.
zeros_like
(
x
))
if
self
.
data_format
==
"NCHW"
:
x
=
tf
.
transpose
(
x
,
[
2
,
0
,
1
])
return
x
self
.
x_train
=
tf
.
map_fn
(
_pre_process
,
x_train
,
back_prop
=
False
)
self
.
y_train
=
y_train
# valid data
self
.
x_valid
,
self
.
y_valid
=
None
,
None
if
images
[
"valid"
]
is
not
None
:
images
[
"valid_original"
]
=
np
.
copy
(
images
[
"valid"
])
labels
[
"valid_original"
]
=
np
.
copy
(
labels
[
"valid"
])
if
self
.
data_format
==
"NCHW"
:
images
[
"valid"
]
=
tf
.
transpose
(
images
[
"valid"
],
[
0
,
3
,
1
,
2
])
self
.
num_valid_examples
=
np
.
shape
(
images
[
"valid"
])[
0
]
self
.
num_valid_batches
=
(
(
self
.
num_valid_examples
+
self
.
eval_batch_size
-
1
)
//
self
.
eval_batch_size
)
self
.
x_valid
,
self
.
y_valid
=
tf
.
train
.
batch
(
[
images
[
"valid"
],
labels
[
"valid"
]],
batch_size
=
self
.
eval_batch_size
,
capacity
=
5000
,
enqueue_many
=
True
,
num_threads
=
1
,
allow_smaller_final_batch
=
True
,
)
# test data
if
self
.
data_format
==
"NCHW"
:
images
[
"test"
]
=
tf
.
transpose
(
images
[
"test"
],
[
0
,
3
,
1
,
2
])
self
.
num_test_examples
=
np
.
shape
(
images
[
"test"
])[
0
]
self
.
num_test_batches
=
(
(
self
.
num_test_examples
+
self
.
eval_batch_size
-
1
)
//
self
.
eval_batch_size
)
self
.
x_test
,
self
.
y_test
=
tf
.
train
.
batch
(
[
images
[
"test"
],
labels
[
"test"
]],
batch_size
=
self
.
eval_batch_size
,
capacity
=
10000
,
enqueue_many
=
True
,
num_threads
=
1
,
allow_smaller_final_batch
=
True
,
)
# cache images and labels
self
.
images
=
images
self
.
labels
=
labels
def
eval_once
(
self
,
sess
,
eval_set
,
child_model
,
verbose
=
False
):
"""Expects self.acc and self.global_step to be defined.
Args:
sess: tf.Session() or one of its wrap arounds.
feed_dict: can be used to give more information to sess.run().
eval_set: "valid" or "test"
"""
assert
self
.
global_step
is
not
None
global_step
=
sess
.
run
(
self
.
global_step
)
print
(
"Eval at {}"
.
format
(
global_step
))
if
eval_set
==
"valid"
:
assert
self
.
x_valid
is
not
None
assert
self
.
valid_acc
is
not
None
num_examples
=
self
.
num_valid_examples
num_batches
=
self
.
num_valid_batches
acc_op
=
self
.
valid_acc
elif
eval_set
==
"test"
:
assert
self
.
test_acc
is
not
None
num_examples
=
self
.
num_test_examples
num_batches
=
self
.
num_test_batches
acc_op
=
self
.
test_acc
else
:
raise
NotImplementedError
(
"Unknown eval_set '{}'"
.
format
(
eval_set
))
total_acc
=
0
total_exp
=
0
for
batch_id
in
range
(
num_batches
):
acc
=
sess
.
run
(
acc_op
)
total_acc
+=
acc
total_exp
+=
self
.
eval_batch_size
if
verbose
:
sys
.
stdout
.
write
(
"
\r
{:<5d}/{:>5d}"
.
format
(
total_acc
,
total_exp
))
if
verbose
:
print
(
""
)
print
(
"{}_accuracy: {:<6.4f}"
.
format
(
eval_set
,
float
(
total_acc
)
/
total_exp
))
return
float
(
total_acc
)
/
total_exp
def
_model
(
self
,
images
,
is_training
,
reuse
=
None
):
raise
NotImplementedError
(
"Abstract method"
)
def
_build_train
(
self
):
raise
NotImplementedError
(
"Abstract method"
)
def
_build_valid
(
self
):
raise
NotImplementedError
(
"Abstract method"
)
def
_build_test
(
self
):
raise
NotImplementedError
(
"Abstract method"
)
examples/trials/nas_cifar10/src/cifar10/nni_child_cifar10.py
deleted
100644 → 0
View file @
b4773e1e
from
__future__
import
absolute_import
from
__future__
import
division
from
__future__
import
print_function
import
os
import
shutil
import
logging
import
tensorflow
as
tf
from
src.cifar10.data_utils
import
read_data
from
src.cifar10.general_child
import
GeneralChild
import
src.cifar10_flags
from
src.cifar10_flags
import
FLAGS
def
build_logger
(
log_name
):
logger
=
logging
.
getLogger
(
log_name
)
logger
.
setLevel
(
logging
.
DEBUG
)
fh
=
logging
.
FileHandler
(
log_name
+
'.log'
)
fh
.
setLevel
(
logging
.
DEBUG
)
logger
.
addHandler
(
fh
)
return
logger
logger
=
build_logger
(
"nni_child_cifar10"
)
def
build_trial
(
images
,
labels
,
ChildClass
):
'''Build child class'''
child_model
=
ChildClass
(
images
,
labels
,
use_aux_heads
=
FLAGS
.
child_use_aux_heads
,
cutout_size
=
FLAGS
.
child_cutout_size
,
num_layers
=
FLAGS
.
child_num_layers
,
num_cells
=
FLAGS
.
child_num_cells
,
num_branches
=
FLAGS
.
child_num_branches
,
fixed_arc
=
FLAGS
.
child_fixed_arc
,
out_filters_scale
=
FLAGS
.
child_out_filters_scale
,
out_filters
=
FLAGS
.
child_out_filters
,
keep_prob
=
FLAGS
.
child_keep_prob
,
drop_path_keep_prob
=
FLAGS
.
child_drop_path_keep_prob
,
num_epochs
=
FLAGS
.
num_epochs
,
l2_reg
=
FLAGS
.
child_l2_reg
,
data_format
=
FLAGS
.
data_format
,
batch_size
=
FLAGS
.
batch_size
,
clip_mode
=
"norm"
,
grad_bound
=
FLAGS
.
child_grad_bound
,
lr_init
=
FLAGS
.
child_lr
,
lr_dec_every
=
FLAGS
.
child_lr_dec_every
,
lr_dec_rate
=
FLAGS
.
child_lr_dec_rate
,
lr_cosine
=
FLAGS
.
child_lr_cosine
,
lr_max
=
FLAGS
.
child_lr_max
,
lr_min
=
FLAGS
.
child_lr_min
,
lr_T_0
=
FLAGS
.
child_lr_T_0
,
lr_T_mul
=
FLAGS
.
child_lr_T_mul
,
optim_algo
=
"momentum"
,
sync_replicas
=
FLAGS
.
child_sync_replicas
,
num_aggregate
=
FLAGS
.
child_num_aggregate
,
num_replicas
=
FLAGS
.
child_num_replicas
)
return
child_model
def
get_child_ops
(
child_model
):
'''Assemble child op to a dict'''
child_ops
=
{
"global_step"
:
child_model
.
global_step
,
"loss"
:
child_model
.
loss
,
"train_op"
:
child_model
.
train_op
,
"lr"
:
child_model
.
lr
,
"grad_norm"
:
child_model
.
grad_norm
,
"train_acc"
:
child_model
.
train_acc
,
"optimizer"
:
child_model
.
optimizer
,
"num_train_batches"
:
child_model
.
num_train_batches
,
"eval_every"
:
child_model
.
num_train_batches
*
FLAGS
.
eval_every_epochs
,
"eval_func"
:
child_model
.
eval_once
,
}
return
child_ops
class
NASTrial
():
def
__init__
(
self
):
images
,
labels
=
read_data
(
FLAGS
.
data_path
,
num_valids
=
0
)
self
.
output_dir
=
os
.
path
.
join
(
os
.
getenv
(
'NNI_OUTPUT_DIR'
),
'../..'
)
self
.
file_path
=
os
.
path
.
join
(
self
.
output_dir
,
'trainable_variable.txt'
)
self
.
graph
=
tf
.
Graph
()
with
self
.
graph
.
as_default
():
self
.
child_model
=
build_trial
(
images
,
labels
,
GeneralChild
)
self
.
total_data
=
{}
self
.
child_model
.
build_model
()
self
.
child_ops
=
get_child_ops
(
self
.
child_model
)
config
=
tf
.
ConfigProto
(
intra_op_parallelism_threads
=
0
,
inter_op_parallelism_threads
=
0
,
allow_soft_placement
=
True
)
self
.
sess
=
tf
.
train
.
SingularMonitoredSession
(
config
=
config
)
logger
.
debug
(
'initlize NASTrial done.'
)
def
run_one_step
(
self
):
'''Run this model on a batch of data'''
run_ops
=
[
self
.
child_ops
[
"loss"
],
self
.
child_ops
[
"lr"
],
self
.
child_ops
[
"grad_norm"
],
self
.
child_ops
[
"train_acc"
],
self
.
child_ops
[
"train_op"
],
]
loss
,
lr
,
gn
,
tr_acc
,
_
=
self
.
sess
.
run
(
run_ops
)
global_step
=
self
.
sess
.
run
(
self
.
child_ops
[
"global_step"
])
log_string
=
""
log_string
+=
"ch_step={:<6d}"
.
format
(
global_step
)
log_string
+=
" loss={:<8.6f}"
.
format
(
loss
)
log_string
+=
" lr={:<8.4f}"
.
format
(
lr
)
log_string
+=
" |g|={:<8.4f}"
.
format
(
gn
)
log_string
+=
" tr_acc={:<3d}/{:>3d}"
.
format
(
tr_acc
,
FLAGS
.
batch_size
)
if
int
(
global_step
)
%
FLAGS
.
log_every
==
0
:
logger
.
debug
(
log_string
)
return
loss
,
global_step
def
run
(
self
):
'''Run this model according to the `epoch` set in FALGS'''
max_acc
=
0
while
True
:
_
,
global_step
=
self
.
run_one_step
()
if
global_step
%
self
.
child_ops
[
'num_train_batches'
]
==
0
:
acc
=
self
.
child_ops
[
"eval_func"
](
self
.
sess
,
"test"
,
self
.
child_model
)
max_acc
=
max
(
max_acc
,
acc
)
'''@nni.report_intermediate_result(acc)'''
if
global_step
/
self
.
child_ops
[
'num_train_batches'
]
>=
FLAGS
.
num_epochs
:
'''@nni.report_final_result(max_acc)'''
break
def
main
(
_
):
logger
.
debug
(
"-"
*
80
)
if
not
os
.
path
.
isdir
(
FLAGS
.
output_dir
):
logger
.
debug
(
"Path {} does not exist. Creating."
.
format
(
FLAGS
.
output_dir
))
os
.
makedirs
(
FLAGS
.
output_dir
)
elif
FLAGS
.
reset_output_dir
:
logger
.
debug
(
"Path {} exists. Remove and remake."
.
format
(
FLAGS
.
output_dir
))
shutil
.
rmtree
(
FLAGS
.
output_dir
)
os
.
makedirs
(
FLAGS
.
output_dir
)
logger
.
debug
(
"-"
*
80
)
trial
=
NASTrial
()
trial
.
run
()
if
__name__
==
"__main__"
:
tf
.
app
.
run
()
examples/trials/nas_cifar10/src/cifar10_flags.py
deleted
100644 → 0
View file @
b4773e1e
import
tensorflow
as
tf
from
src.utils
import
DEFINE_boolean
from
src.utils
import
DEFINE_float
from
src.utils
import
DEFINE_integer
from
src.utils
import
DEFINE_string
flags
=
tf
.
app
.
flags
FLAGS
=
flags
.
FLAGS
DEFINE_boolean
(
"reset_output_dir"
,
False
,
"Delete output_dir if exists."
)
DEFINE_string
(
"data_path"
,
""
,
""
)
DEFINE_string
(
"output_dir"
,
""
,
""
)
DEFINE_string
(
"data_format"
,
"NHWC"
,
"'NHWC' or 'NCWH'"
)
DEFINE_string
(
"search_for"
,
None
,
"Must be [macro|micro]"
)
DEFINE_integer
(
"train_data_size"
,
45000
,
""
)
DEFINE_integer
(
"batch_size"
,
32
,
""
)
DEFINE_integer
(
"num_epochs"
,
300
,
""
)
DEFINE_integer
(
"child_lr_dec_every"
,
100
,
""
)
DEFINE_integer
(
"child_num_layers"
,
5
,
""
)
DEFINE_integer
(
"child_num_cells"
,
5
,
""
)
DEFINE_integer
(
"child_filter_size"
,
5
,
""
)
DEFINE_integer
(
"child_out_filters"
,
48
,
""
)
DEFINE_integer
(
"child_out_filters_scale"
,
1
,
""
)
DEFINE_integer
(
"child_num_branches"
,
4
,
""
)
DEFINE_integer
(
"child_num_aggregate"
,
None
,
""
)
DEFINE_integer
(
"child_num_replicas"
,
1
,
""
)
DEFINE_integer
(
"child_block_size"
,
3
,
""
)
DEFINE_integer
(
"child_lr_T_0"
,
None
,
"for lr schedule"
)
DEFINE_integer
(
"child_lr_T_mul"
,
None
,
"for lr schedule"
)
DEFINE_integer
(
"child_cutout_size"
,
None
,
"CutOut size"
)
DEFINE_float
(
"child_grad_bound"
,
5.0
,
"Gradient clipping"
)
DEFINE_float
(
"child_lr"
,
0.1
,
""
)
DEFINE_float
(
"child_lr_dec_rate"
,
0.1
,
""
)
DEFINE_float
(
"child_keep_prob"
,
0.5
,
""
)
DEFINE_float
(
"child_drop_path_keep_prob"
,
1.0
,
"minimum drop_path_keep_prob"
)
DEFINE_float
(
"child_l2_reg"
,
1e-4
,
""
)
DEFINE_float
(
"child_lr_max"
,
None
,
"for lr schedule"
)
DEFINE_float
(
"child_lr_min"
,
None
,
"for lr schedule"
)
DEFINE_string
(
"child_skip_pattern"
,
None
,
"Must be ['dense', None]"
)
DEFINE_string
(
"child_fixed_arc"
,
None
,
""
)
DEFINE_boolean
(
"child_use_aux_heads"
,
False
,
"Should we use an aux head"
)
DEFINE_boolean
(
"child_sync_replicas"
,
False
,
"To sync or not to sync."
)
DEFINE_boolean
(
"child_lr_cosine"
,
False
,
"Use cosine lr schedule"
)
DEFINE_integer
(
"log_every"
,
50
,
"How many steps to log"
)
DEFINE_integer
(
"eval_every_epochs"
,
1
,
"How many epochs to eval"
)
examples/trials/nas_cifar10/src/common_ops.py
deleted
100644 → 0
View file @
b4773e1e
import
numpy
as
np
import
tensorflow
as
tf
from
tensorflow.python.training
import
moving_averages
def
lstm
(
x
,
prev_c
,
prev_h
,
w
):
ifog
=
tf
.
matmul
(
tf
.
concat
([
x
,
prev_h
],
axis
=
1
),
w
)
i
,
f
,
o
,
g
=
tf
.
split
(
ifog
,
4
,
axis
=
1
)
i
=
tf
.
sigmoid
(
i
)
f
=
tf
.
sigmoid
(
f
)
o
=
tf
.
sigmoid
(
o
)
g
=
tf
.
tanh
(
g
)
next_c
=
i
*
g
+
f
*
prev_c
next_h
=
o
*
tf
.
tanh
(
next_c
)
return
next_c
,
next_h
def
stack_lstm
(
x
,
prev_c
,
prev_h
,
w
):
next_c
,
next_h
=
[],
[]
for
layer_id
,
(
_c
,
_h
,
_w
)
in
enumerate
(
zip
(
prev_c
,
prev_h
,
w
)):
inputs
=
x
if
layer_id
==
0
else
next_h
[
-
1
]
curr_c
,
curr_h
=
lstm
(
inputs
,
_c
,
_h
,
_w
)
next_c
.
append
(
curr_c
)
next_h
.
append
(
curr_h
)
return
next_c
,
next_h
def
create_weight
(
name
,
shape
,
initializer
=
None
,
trainable
=
True
,
seed
=
None
):
if
initializer
is
None
:
initializer
=
tf
.
contrib
.
keras
.
initializers
.
he_normal
(
seed
=
seed
)
return
tf
.
get_variable
(
name
,
shape
,
initializer
=
initializer
,
trainable
=
trainable
)
def
create_bias
(
name
,
shape
,
initializer
=
None
):
if
initializer
is
None
:
initializer
=
tf
.
constant_initializer
(
0.0
,
dtype
=
tf
.
float32
)
return
tf
.
get_variable
(
name
,
shape
,
initializer
=
initializer
)
def
conv_op
(
inputs
,
filter_size
,
is_training
,
count
,
out_filters
,
data_format
,
ch_mul
=
1
,
start_idx
=
None
,
separable
=
False
):
"""
Args:
start_idx: where to start taking the output channels. if None, assuming
fixed_arc mode
count: how many output_channels to take.
"""
if
data_format
==
"NHWC"
:
inp_c
=
inputs
.
get_shape
()[
3
].
value
elif
data_format
==
"NCHW"
:
inp_c
=
inputs
.
get_shape
()[
1
].
value
with
tf
.
variable_scope
(
"inp_conv_1"
):
w
=
create_weight
(
"w"
,
[
1
,
1
,
inp_c
,
out_filters
])
x
=
tf
.
nn
.
conv2d
(
inputs
,
w
,
[
1
,
1
,
1
,
1
],
"SAME"
,
data_format
=
data_format
)
x
=
batch_norm
(
x
,
is_training
,
data_format
=
data_format
)
x
=
tf
.
nn
.
relu
(
x
)
with
tf
.
variable_scope
(
"out_conv_{}"
.
format
(
filter_size
)):
if
start_idx
is
None
:
if
separable
:
w_depth
=
create_weight
(
"w_depth"
,
[
filter_size
,
filter_size
,
out_filters
,
ch_mul
])
w_point
=
create_weight
(
"w_point"
,
[
1
,
1
,
out_filters
*
ch_mul
,
count
])
x
=
tf
.
nn
.
separable_conv2d
(
x
,
w_depth
,
w_point
,
strides
=
[
1
,
1
,
1
,
1
],
padding
=
"SAME"
,
data_format
=
data_format
)
x
=
batch_norm
(
x
,
is_training
,
data_format
=
data_format
)
else
:
w
=
create_weight
(
"w"
,
[
filter_size
,
filter_size
,
inp_c
,
count
])
x
=
tf
.
nn
.
conv2d
(
x
,
w
,
[
1
,
1
,
1
,
1
],
"SAME"
,
data_format
=
data_format
)
x
=
batch_norm
(
x
,
is_training
,
data_format
=
data_format
)
else
:
if
separable
:
w_depth
=
create_weight
(
"w_depth"
,
[
filter_size
,
filter_size
,
out_filters
,
ch_mul
])
#test_depth = w_depth
w_point
=
create_weight
(
"w_point"
,
[
out_filters
,
out_filters
*
ch_mul
])
w_point
=
w_point
[
start_idx
:
start_idx
+
count
,
:]
w_point
=
tf
.
transpose
(
w_point
,
[
1
,
0
])
w_point
=
tf
.
reshape
(
w_point
,
[
1
,
1
,
out_filters
*
ch_mul
,
count
])
x
=
tf
.
nn
.
separable_conv2d
(
x
,
w_depth
,
w_point
,
strides
=
[
1
,
1
,
1
,
1
],
padding
=
"SAME"
,
data_format
=
data_format
)
mask
=
tf
.
range
(
0
,
out_filters
,
dtype
=
tf
.
int32
)
mask
=
tf
.
logical_and
(
start_idx
<=
mask
,
mask
<
start_idx
+
count
)
x
=
batch_norm_with_mask
(
x
,
is_training
,
mask
,
out_filters
,
data_format
=
data_format
)
else
:
w
=
create_weight
(
"w"
,
[
filter_size
,
filter_size
,
out_filters
,
out_filters
])
w
=
tf
.
transpose
(
w
,
[
3
,
0
,
1
,
2
])
w
=
w
[
start_idx
:
start_idx
+
count
,
:,
:,
:]
w
=
tf
.
transpose
(
w
,
[
1
,
2
,
3
,
0
])
x
=
tf
.
nn
.
conv2d
(
x
,
w
,
[
1
,
1
,
1
,
1
],
"SAME"
,
data_format
=
data_format
)
mask
=
tf
.
range
(
0
,
out_filters
,
dtype
=
tf
.
int32
)
mask
=
tf
.
logical_and
(
start_idx
<=
mask
,
mask
<
start_idx
+
count
)
x
=
batch_norm_with_mask
(
x
,
is_training
,
mask
,
out_filters
,
data_format
=
data_format
)
x
=
tf
.
nn
.
relu
(
x
)
return
x
def
pool_op
(
inputs
,
is_training
,
count
,
out_filters
,
avg_or_max
,
data_format
,
start_idx
=
None
):
"""
Args:
start_idx: where to start taking the output channels. if None, assuming
fixed_arc mode
count: how many output_channels to take.
"""
if
data_format
==
"NHWC"
:
inp_c
=
inputs
.
get_shape
()[
3
].
value
elif
data_format
==
"NCHW"
:
inp_c
=
inputs
.
get_shape
()[
1
].
value
with
tf
.
variable_scope
(
"conv_1"
):
w
=
create_weight
(
"w"
,
[
1
,
1
,
inp_c
,
out_filters
])
x
=
tf
.
nn
.
conv2d
(
inputs
,
w
,
[
1
,
1
,
1
,
1
],
"SAME"
,
data_format
=
data_format
)
x
=
batch_norm
(
x
,
is_training
,
data_format
=
data_format
)
x
=
tf
.
nn
.
relu
(
x
)
with
tf
.
variable_scope
(
"pool"
):
if
data_format
==
"NHWC"
:
actual_data_format
=
"channels_last"
elif
data_format
==
"NCHW"
:
actual_data_format
=
"channels_first"
if
avg_or_max
==
"avg"
:
x
=
tf
.
layers
.
average_pooling2d
(
x
,
[
3
,
3
],
[
1
,
1
],
"SAME"
,
data_format
=
actual_data_format
)
elif
avg_or_max
==
"max"
:
x
=
tf
.
layers
.
max_pooling2d
(
x
,
[
3
,
3
],
[
1
,
1
],
"SAME"
,
data_format
=
actual_data_format
)
else
:
raise
ValueError
(
"Unknown pool {}"
.
format
(
avg_or_max
))
if
start_idx
is
not
None
:
if
data_format
==
"NHWC"
:
x
=
x
[:,
:,
:,
start_idx
:
start_idx
+
count
]
elif
data_format
==
"NCHW"
:
x
=
x
[:,
start_idx
:
start_idx
+
count
,
:,
:]
return
x
def
global_avg_pool
(
x
,
data_format
=
"NHWC"
):
if
data_format
==
"NHWC"
:
x
=
tf
.
reduce_mean
(
x
,
[
1
,
2
])
elif
data_format
==
"NCHW"
:
x
=
tf
.
reduce_mean
(
x
,
[
2
,
3
])
else
:
raise
NotImplementedError
(
"Unknown data_format {}"
.
format
(
data_format
))
return
x
def
batch_norm
(
x
,
is_training
,
name
=
"bn"
,
decay
=
0.9
,
epsilon
=
1e-5
,
data_format
=
"NHWC"
):
if
data_format
==
"NHWC"
:
shape
=
[
x
.
get_shape
()[
3
]]
elif
data_format
==
"NCHW"
:
shape
=
[
x
.
get_shape
()[
1
]]
else
:
raise
NotImplementedError
(
"Unknown data_format {}"
.
format
(
data_format
))
with
tf
.
variable_scope
(
name
,
reuse
=
None
if
is_training
else
True
):
offset
=
tf
.
get_variable
(
"offset"
,
shape
,
initializer
=
tf
.
constant_initializer
(
0.0
,
dtype
=
tf
.
float32
))
scale
=
tf
.
get_variable
(
"scale"
,
shape
,
initializer
=
tf
.
constant_initializer
(
1.0
,
dtype
=
tf
.
float32
))
moving_mean
=
tf
.
get_variable
(
"moving_mean"
,
shape
,
trainable
=
False
,
initializer
=
tf
.
constant_initializer
(
0.0
,
dtype
=
tf
.
float32
))
moving_variance
=
tf
.
get_variable
(
"moving_variance"
,
shape
,
trainable
=
False
,
initializer
=
tf
.
constant_initializer
(
1.0
,
dtype
=
tf
.
float32
))
if
is_training
:
x
,
mean
,
variance
=
tf
.
nn
.
fused_batch_norm
(
x
,
scale
,
offset
,
epsilon
=
epsilon
,
data_format
=
data_format
,
is_training
=
True
)
update_mean
=
moving_averages
.
assign_moving_average
(
moving_mean
,
mean
,
decay
)
update_variance
=
moving_averages
.
assign_moving_average
(
moving_variance
,
variance
,
decay
)
with
tf
.
control_dependencies
([
update_mean
,
update_variance
]):
x
=
tf
.
identity
(
x
)
else
:
x
,
_
,
_
=
tf
.
nn
.
fused_batch_norm
(
x
,
scale
,
offset
,
mean
=
moving_mean
,
variance
=
moving_variance
,
epsilon
=
epsilon
,
data_format
=
data_format
,
is_training
=
False
)
return
x
def
batch_norm_with_mask
(
x
,
is_training
,
mask
,
num_channels
,
name
=
"bn"
,
decay
=
0.9
,
epsilon
=
1e-3
,
data_format
=
"NHWC"
):
shape
=
[
num_channels
]
indices
=
tf
.
where
(
mask
)
indices
=
tf
.
to_int32
(
indices
)
indices
=
tf
.
reshape
(
indices
,
[
-
1
])
with
tf
.
variable_scope
(
name
,
reuse
=
None
if
is_training
else
True
):
offset
=
tf
.
get_variable
(
"offset"
,
shape
,
initializer
=
tf
.
constant_initializer
(
0.0
,
dtype
=
tf
.
float32
))
scale
=
tf
.
get_variable
(
"scale"
,
shape
,
initializer
=
tf
.
constant_initializer
(
1.0
,
dtype
=
tf
.
float32
))
offset
=
tf
.
boolean_mask
(
offset
,
mask
)
scale
=
tf
.
boolean_mask
(
scale
,
mask
)
moving_mean
=
tf
.
get_variable
(
"moving_mean"
,
shape
,
trainable
=
False
,
initializer
=
tf
.
constant_initializer
(
0.0
,
dtype
=
tf
.
float32
))
moving_variance
=
tf
.
get_variable
(
"moving_variance"
,
shape
,
trainable
=
False
,
initializer
=
tf
.
constant_initializer
(
1.0
,
dtype
=
tf
.
float32
))
if
is_training
:
x
,
mean
,
variance
=
tf
.
nn
.
fused_batch_norm
(
x
,
scale
,
offset
,
epsilon
=
epsilon
,
data_format
=
data_format
,
is_training
=
True
)
mean
=
(
1.0
-
decay
)
*
(
tf
.
boolean_mask
(
moving_mean
,
mask
)
-
mean
)
variance
=
(
1.0
-
decay
)
*
\
(
tf
.
boolean_mask
(
moving_variance
,
mask
)
-
variance
)
update_mean
=
tf
.
scatter_sub
(
moving_mean
,
indices
,
mean
,
use_locking
=
True
)
update_variance
=
tf
.
scatter_sub
(
moving_variance
,
indices
,
variance
,
use_locking
=
True
)
with
tf
.
control_dependencies
([
update_mean
,
update_variance
]):
x
=
tf
.
identity
(
x
)
else
:
masked_moving_mean
=
tf
.
boolean_mask
(
moving_mean
,
mask
)
masked_moving_variance
=
tf
.
boolean_mask
(
moving_variance
,
mask
)
x
,
_
,
_
=
tf
.
nn
.
fused_batch_norm
(
x
,
scale
,
offset
,
mean
=
masked_moving_mean
,
variance
=
masked_moving_variance
,
epsilon
=
epsilon
,
data_format
=
data_format
,
is_training
=
False
)
return
x
examples/trials/nas_cifar10/src/utils.py
deleted
100644 → 0
View file @
b4773e1e
from
__future__
import
absolute_import
from
__future__
import
division
from
__future__
import
print_function
import
sys
import
numpy
as
np
import
tensorflow
as
tf
user_flags
=
[]
def
DEFINE_string
(
name
,
default_value
,
doc_string
):
tf
.
app
.
flags
.
DEFINE_string
(
name
,
default_value
,
doc_string
)
global
user_flags
user_flags
.
append
(
name
)
def
DEFINE_integer
(
name
,
default_value
,
doc_string
):
tf
.
app
.
flags
.
DEFINE_integer
(
name
,
default_value
,
doc_string
)
global
user_flags
user_flags
.
append
(
name
)
def
DEFINE_float
(
name
,
default_value
,
doc_string
):
tf
.
app
.
flags
.
DEFINE_float
(
name
,
default_value
,
doc_string
)
global
user_flags
user_flags
.
append
(
name
)
def
DEFINE_boolean
(
name
,
default_value
,
doc_string
):
tf
.
app
.
flags
.
DEFINE_boolean
(
name
,
default_value
,
doc_string
)
global
user_flags
user_flags
.
append
(
name
)
def
print_user_flags
(
line_limit
=
80
):
print
(
"-"
*
80
)
global
user_flags
FLAGS
=
tf
.
app
.
flags
.
FLAGS
for
flag_name
in
sorted
(
user_flags
):
value
=
"{}"
.
format
(
getattr
(
FLAGS
,
flag_name
))
log_string
=
flag_name
log_string
+=
"."
*
(
line_limit
-
len
(
flag_name
)
-
len
(
value
))
log_string
+=
value
print
(
log_string
)
def
get_C
(
x
,
data_format
):
"""
Args:
x: tensor of shape [N, H, W, C] or [N, C, H, W]
"""
if
data_format
==
"NHWC"
:
return
x
.
get_shape
()[
3
].
value
elif
data_format
==
"NCHW"
:
return
x
.
get_shape
()[
1
].
value
else
:
raise
ValueError
(
"Unknown data_format '{0}'"
.
format
(
data_format
))
def
get_HW
(
x
,
data_format
):
"""
Args:
x: tensor of shape [N, H, W, C] or [N, C, H, W]
"""
return
x
.
get_shape
()[
2
].
value
def
get_strides
(
stride
,
data_format
):
"""
Args:
x: tensor of shape [N, H, W, C] or [N, C, H, W]
"""
if
data_format
==
"NHWC"
:
return
[
1
,
stride
,
stride
,
1
]
elif
data_format
==
"NCHW"
:
return
[
1
,
1
,
stride
,
stride
]
else
:
raise
ValueError
(
"Unknown data_format '{0}'"
.
format
(
data_format
))
class
TextColors
:
HEADER
=
'
\033
[95m'
OKBLUE
=
'
\033
[94m'
OKGREEN
=
'
\033
[92m'
WARNING
=
'
\033
[93m'
FAIL
=
'
\033
[91m'
ENDC
=
'
\033
[0m'
BOLD
=
'
\033
[1m'
UNDERLINE
=
'
\033
[4m'
class
Logger
(
object
):
def
__init__
(
self
,
output_file
):
self
.
terminal
=
sys
.
stdout
self
.
log
=
open
(
output_file
,
"a"
)
def
write
(
self
,
message
):
self
.
terminal
.
write
(
message
)
self
.
terminal
.
flush
()
self
.
log
.
write
(
message
)
self
.
log
.
flush
()
def
count_model_params
(
tf_variables
):
"""
Args:
tf_variables: list of all model variables
"""
num_vars
=
0
for
var
in
tf_variables
:
num_vars
+=
np
.
prod
([
dim
.
value
for
dim
in
var
.
get_shape
()])
return
num_vars
def
get_train_ops
(
loss
,
tf_variables
,
train_step
,
clip_mode
=
None
,
grad_bound
=
None
,
l2_reg
=
1e-4
,
lr_warmup_val
=
None
,
lr_warmup_steps
=
100
,
lr_init
=
0.1
,
lr_dec_start
=
0
,
lr_dec_every
=
10000
,
lr_dec_rate
=
0.1
,
lr_dec_min
=
None
,
lr_cosine
=
False
,
lr_max
=
None
,
lr_min
=
None
,
lr_T_0
=
None
,
lr_T_mul
=
None
,
num_train_batches
=
None
,
optim_algo
=
None
,
sync_replicas
=
False
,
num_aggregate
=
None
,
num_replicas
=
None
,
get_grad_norms
=
False
,
moving_average
=
None
):
"""
Args:
clip_mode: "global", "norm", or None.
moving_average: store the moving average of parameters
"""
if
l2_reg
>
0
:
l2_losses
=
[]
for
var
in
tf_variables
:
l2_losses
.
append
(
tf
.
reduce_sum
(
var
**
2
))
l2_loss
=
tf
.
add_n
(
l2_losses
)
loss
+=
l2_reg
*
l2_loss
grads
=
tf
.
gradients
(
loss
,
tf_variables
)
grad_norm
=
tf
.
global_norm
(
grads
)
grad_norms
=
{}
for
v
,
g
in
zip
(
tf_variables
,
grads
):
if
v
is
None
or
g
is
None
:
continue
if
isinstance
(
g
,
tf
.
IndexedSlices
):
grad_norms
[
v
.
name
]
=
tf
.
sqrt
(
tf
.
reduce_sum
(
g
.
values
**
2
))
else
:
grad_norms
[
v
.
name
]
=
tf
.
sqrt
(
tf
.
reduce_sum
(
g
**
2
))
if
clip_mode
is
not
None
:
assert
grad_bound
is
not
None
,
"Need grad_bound to clip gradients."
if
clip_mode
==
"global"
:
grads
,
_
=
tf
.
clip_by_global_norm
(
grads
,
grad_bound
)
elif
clip_mode
==
"norm"
:
clipped
=
[]
for
g
in
grads
:
if
isinstance
(
g
,
tf
.
IndexedSlices
):
c_g
=
tf
.
clip_by_norm
(
g
.
values
,
grad_bound
)
c_g
=
tf
.
IndexedSlices
(
g
.
indices
,
c_g
)
else
:
c_g
=
tf
.
clip_by_norm
(
g
,
grad_bound
)
clipped
.
append
(
g
)
grads
=
clipped
else
:
raise
NotImplementedError
(
"Unknown clip_mode {}"
.
format
(
clip_mode
))
if
lr_cosine
:
assert
lr_max
is
not
None
,
"Need lr_max to use lr_cosine"
assert
lr_min
is
not
None
,
"Need lr_min to use lr_cosine"
assert
lr_T_0
is
not
None
,
"Need lr_T_0 to use lr_cosine"
assert
lr_T_mul
is
not
None
,
"Need lr_T_mul to use lr_cosine"
assert
num_train_batches
is
not
None
,
(
"Need num_train_batches to use"
" lr_cosine"
)
curr_epoch
=
train_step
//
num_train_batches
last_reset
=
tf
.
Variable
(
0
,
dtype
=
tf
.
int32
,
trainable
=
False
,
name
=
"last_reset"
)
T_i
=
tf
.
Variable
(
lr_T_0
,
dtype
=
tf
.
int32
,
trainable
=
False
,
name
=
"T_i"
)
T_curr
=
curr_epoch
-
last_reset
def
_update
():
update_last_reset
=
tf
.
assign
(
last_reset
,
curr_epoch
,
use_locking
=
True
)
update_T_i
=
tf
.
assign
(
T_i
,
T_i
*
lr_T_mul
,
use_locking
=
True
)
with
tf
.
control_dependencies
([
update_last_reset
,
update_T_i
]):
rate
=
tf
.
to_float
(
T_curr
)
/
tf
.
to_float
(
T_i
)
*
3.1415926
lr
=
lr_min
+
0.5
*
(
lr_max
-
lr_min
)
*
(
1.0
+
tf
.
cos
(
rate
))
return
lr
def
_no_update
():
rate
=
tf
.
to_float
(
T_curr
)
/
tf
.
to_float
(
T_i
)
*
3.1415926
lr
=
lr_min
+
0.5
*
(
lr_max
-
lr_min
)
*
(
1.0
+
tf
.
cos
(
rate
))
return
lr
learning_rate
=
tf
.
cond
(
tf
.
greater_equal
(
T_curr
,
T_i
),
_update
,
_no_update
)
else
:
learning_rate
=
tf
.
train
.
exponential_decay
(
lr_init
,
tf
.
maximum
(
train_step
-
lr_dec_start
,
0
),
lr_dec_every
,
lr_dec_rate
,
staircase
=
True
)
if
lr_dec_min
is
not
None
:
learning_rate
=
tf
.
maximum
(
learning_rate
,
lr_dec_min
)
if
lr_warmup_val
is
not
None
:
learning_rate
=
tf
.
cond
(
tf
.
less
(
train_step
,
lr_warmup_steps
),
lambda
:
lr_warmup_val
,
lambda
:
learning_rate
)
if
optim_algo
==
"momentum"
:
opt
=
tf
.
train
.
MomentumOptimizer
(
learning_rate
,
0.9
,
use_locking
=
True
,
use_nesterov
=
True
)
elif
optim_algo
==
"sgd"
:
opt
=
tf
.
train
.
GradientDescentOptimizer
(
learning_rate
,
use_locking
=
True
)
elif
optim_algo
==
"adam"
:
opt
=
tf
.
train
.
AdamOptimizer
(
learning_rate
,
beta1
=
0.0
,
epsilon
=
1e-3
,
use_locking
=
True
)
else
:
raise
ValueError
(
"Unknown optim_algo {}"
.
format
(
optim_algo
))
if
sync_replicas
:
assert
num_aggregate
is
not
None
,
"Need num_aggregate to sync."
assert
num_replicas
is
not
None
,
"Need num_replicas to sync."
opt
=
tf
.
train
.
SyncReplicasOptimizer
(
opt
,
replicas_to_aggregate
=
num_aggregate
,
total_num_replicas
=
num_replicas
,
use_locking
=
True
)
if
moving_average
is
not
None
:
opt
=
tf
.
contrib
.
opt
.
MovingAverageOptimizer
(
opt
,
average_decay
=
moving_average
)
train_op
=
opt
.
apply_gradients
(
zip
(
grads
,
tf_variables
),
global_step
=
train_step
)
if
get_grad_norms
:
return
train_op
,
learning_rate
,
grad_norm
,
opt
,
grad_norms
else
:
return
train_op
,
learning_rate
,
grad_norm
,
opt
examples/tuners/enas_nni/README.md
deleted
100644 → 0
View file @
b4773e1e
**Run ENAS in NNI**
===
Now we have an enas example
[
enas-nni
](
https://github.com/countif/enas_nni
)
run in NNI from our contributors.
Thanks our lovely contributors.
And welcome more and more people to join us!
examples/tuners/enas_nni/README_zh_CN.md
deleted
100644 → 0
View file @
b4773e1e
**在 NNI 中运行 ENAS**
===
来自贡献者的
[
enas-nni
](
https://github.com/countif/enas_nni
)
可运行在 NNI 中。 非常感谢!
欢迎更多志愿者加入我们!
\ No newline at end of file
src/nni_manager/package.json
View file @
67287997
...
...
@@ -48,9 +48,9 @@
"chai"
:
"^4.1.2"
,
"eslint"
:
"^6.7.2"
,
"glob"
:
"^7.1.3"
,
"mocha"
:
"^
5.2.0
"
,
"mocha"
:
"^
7.1.1
"
,
"npx"
:
"^10.2.0"
,
"nyc"
:
"^1
3.1
.0"
,
"nyc"
:
"^1
5.0
.0"
,
"request"
:
"^2.87.0"
,
"rmdir"
:
"^1.2.0"
,
"tmp"
:
"^0.0.33"
,
...
...
@@ -59,7 +59,6 @@
},
"resolutions"
:
{
"mem"
:
"^4.0.0"
,
"handlebars"
:
"^4.5.3"
,
"lodash"
:
"^4.17.13"
,
"lodash.merge"
:
"^4.6.2"
,
"node.extend"
:
"^1.1.7"
,
...
...
src/nni_manager/training_service/common/util.ts
View file @
67287997
...
...
@@ -69,7 +69,7 @@ export async function execMkdir(directory: string, share: boolean = false): Prom
*/
export
async
function
execCopydir
(
source
:
string
,
destination
:
string
):
Promise
<
void
>
{
if
(
process
.
platform
===
'
win32
'
)
{
await
cpp
.
exec
(
`powershell.exe Copy-Item "
${
source
}
" -Destination "
${
destination
}
" -Recurse`
);
await
cpp
.
exec
(
`powershell.exe Copy-Item "
${
source
}
\\*
" -Destination "
${
destination
}
" -Recurse`
);
}
else
{
await
cpp
.
exec
(
`cp -r '
${
source
}
/.' '
${
destination
}
'`
);
}
...
...
src/nni_manager/yarn.lock
View file @
67287997
...
...
@@ -8,35 +8,133 @@
dependencies:
"@babel/highlight" "^7.0.0"
"@babel/generator@^7.0.0", "@babel/generator@^7.2.2":
version "7.2.2"
resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.2.2.tgz#18c816c70962640eab42fe8cae5f3947a5c65ccc"
"@babel/code-frame@^7.8.3":
version "7.8.3"
resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.8.3.tgz#33e25903d7481181534e12ec0a25f16b6fcf419e"
integrity sha512-a9gxpmdXtZEInkCSHUJDLHZVBgb1QS0jhss4cPP93EW7s+uC5bikET2twEF3KV+7rDblJcmNvTR7VJejqd2C2g==
dependencies:
"@babel/highlight" "^7.8.3"
"@babel/core@^7.7.5":
version "7.9.0"
resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.9.0.tgz#ac977b538b77e132ff706f3b8a4dbad09c03c56e"
integrity sha512-kWc7L0fw1xwvI0zi8OKVBuxRVefwGOrKSQMvrQ3dW+bIIavBY3/NpXmpjMy7bQnLgwgzWQZ8TlM57YHpHNHz4w==
dependencies:
"@babel/code-frame" "^7.8.3"
"@babel/generator" "^7.9.0"
"@babel/helper-module-transforms" "^7.9.0"
"@babel/helpers" "^7.9.0"
"@babel/parser" "^7.9.0"
"@babel/template" "^7.8.6"
"@babel/traverse" "^7.9.0"
"@babel/types" "^7.9.0"
convert-source-map "^1.7.0"
debug "^4.1.0"
gensync "^1.0.0-beta.1"
json5 "^2.1.2"
lodash "^4.17.13"
resolve "^1.3.2"
semver "^5.4.1"
source-map "^0.5.0"
"@babel/generator@^7.9.0":
version "7.9.4"
resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.9.4.tgz#12441e90c3b3c4159cdecf312075bf1a8ce2dbce"
integrity sha512-rjP8ahaDy/ouhrvCoU1E5mqaitWrxwuNGU+dy1EpaoK48jZay4MdkskKGIMHLZNewg8sAsqpGSREJwP0zH3YQA==
dependencies:
"@babel/types" "^7.
2.2
"
"@babel/types" "^7.
9.0
"
jsesc "^2.5.1"
lodash "^4.17.1
0
"
lodash "^4.17.1
3
"
source-map "^0.5.0"
trim-right "^1.0.1"
"@babel/helper-function-name@^7.1.0":
version "7.1.0"
resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.1.0.tgz#a0ceb01685f73355d4360c1247f582bfafc8ff53"
"@babel/helper-function-name@^7.8.3":
version "7.8.3"
resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.8.3.tgz#eeeb665a01b1f11068e9fb86ad56a1cb1a824cca"
integrity sha512-BCxgX1BC2hD/oBlIFUgOCQDOPV8nSINxCwM3o93xP4P9Fq6aV5sgv2cOOITDMtCfQ+3PvHp3l689XZvAM9QyOA==
dependencies:
"@babel/helper-get-function-arity" "^7.
0.0
"
"@babel/template" "^7.
1.0
"
"@babel/types" "^7.
0.0
"
"@babel/helper-get-function-arity" "^7.
8.3
"
"@babel/template" "^7.
8.3
"
"@babel/types" "^7.
8.3
"
"@babel/helper-get-function-arity@^7.0.0":
version "7.0.0"
resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.0.0.tgz#83572d4320e2a4657263734113c42868b64e49c3"
"@babel/helper-get-function-arity@^7.8.3":
version "7.8.3"
resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.8.3.tgz#b894b947bd004381ce63ea1db9f08547e920abd5"
integrity sha512-FVDR+Gd9iLjUMY1fzE2SR0IuaJToR4RkCDARVfsBBPSP53GEqSFjD8gNyxg246VUyc/ALRxFaAK8rVG7UT7xRA==
dependencies:
"@babel/types" "^7.
0.0
"
"@babel/types" "^7.
8.3
"
"@babel/helper-split-export-declaration@^7.0.0":
version "7.0.0"
resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.0.0.tgz#3aae285c0311c2ab095d997b8c9a94cad547d813"
"@babel/helper-member-expression-to-functions@^7.8.3":
version "7.8.3"
resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.8.3.tgz#659b710498ea6c1d9907e0c73f206eee7dadc24c"
integrity sha512-fO4Egq88utkQFjbPrSHGmGLFqmrshs11d46WI+WZDESt7Wu7wN2G2Iu+NMMZJFDOVRHAMIkB5SNh30NtwCA7RA==
dependencies:
"@babel/types" "^7.8.3"
"@babel/helper-module-imports@^7.8.3":
version "7.8.3"
resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.8.3.tgz#7fe39589b39c016331b6b8c3f441e8f0b1419498"
integrity sha512-R0Bx3jippsbAEtzkpZ/6FIiuzOURPcMjHp+Z6xPe6DtApDJx+w7UYyOLanZqO8+wKR9G10s/FmHXvxaMd9s6Kg==
dependencies:
"@babel/types" "^7.8.3"
"@babel/helper-module-transforms@^7.9.0":
version "7.9.0"
resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.9.0.tgz#43b34dfe15961918707d247327431388e9fe96e5"
integrity sha512-0FvKyu0gpPfIQ8EkxlrAydOWROdHpBmiCiRwLkUiBGhCUPRRbVD2/tm3sFr/c/GWFrQ/ffutGUAnx7V0FzT2wA==
dependencies:
"@babel/helper-module-imports" "^7.8.3"
"@babel/helper-replace-supers" "^7.8.6"
"@babel/helper-simple-access" "^7.8.3"
"@babel/helper-split-export-declaration" "^7.8.3"
"@babel/template" "^7.8.6"
"@babel/types" "^7.9.0"
lodash "^4.17.13"
"@babel/helper-optimise-call-expression@^7.8.3":
version "7.8.3"
resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.8.3.tgz#7ed071813d09c75298ef4f208956006b6111ecb9"
integrity sha512-Kag20n86cbO2AvHca6EJsvqAd82gc6VMGule4HwebwMlwkpXuVqrNRj6CkCV2sKxgi9MyAUnZVnZ6lJ1/vKhHQ==
dependencies:
"@babel/types" "^7.0.0"
"@babel/types" "^7.8.3"
"@babel/helper-replace-supers@^7.8.6":
version "7.8.6"
resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.8.6.tgz#5ada744fd5ad73203bf1d67459a27dcba67effc8"
integrity sha512-PeMArdA4Sv/Wf4zXwBKPqVj7n9UF/xg6slNRtZW84FM7JpE1CbG8B612FyM4cxrf4fMAMGO0kR7voy1ForHHFA==
dependencies:
"@babel/helper-member-expression-to-functions" "^7.8.3"
"@babel/helper-optimise-call-expression" "^7.8.3"
"@babel/traverse" "^7.8.6"
"@babel/types" "^7.8.6"
"@babel/helper-simple-access@^7.8.3":
version "7.8.3"
resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.8.3.tgz#7f8109928b4dab4654076986af575231deb639ae"
integrity sha512-VNGUDjx5cCWg4vvCTR8qQ7YJYZ+HBjxOgXEl7ounz+4Sn7+LMD3CFrCTEU6/qXKbA2nKg21CwhhBzO0RpRbdCw==
dependencies:
"@babel/template" "^7.8.3"
"@babel/types" "^7.8.3"
"@babel/helper-split-export-declaration@^7.8.3":
version "7.8.3"
resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.8.3.tgz#31a9f30070f91368a7182cf05f831781065fc7a9"
integrity sha512-3x3yOeyBhW851hroze7ElzdkeRXQYQbFIb7gLK1WQYsw2GWDay5gAJNw1sWJ0VFP6z5J1whqeXH/WCdCjZv6dA==
dependencies:
"@babel/types" "^7.8.3"
"@babel/helper-validator-identifier@^7.9.0":
version "7.9.0"
resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.9.0.tgz#ad53562a7fc29b3b9a91bbf7d10397fd146346ed"
integrity sha512-6G8bQKjOh+of4PV/ThDm/rRqlU7+IGoJuofpagU5GlEl29Vv0RGqqt86ZGRV8ZuSOY3o+8yXl5y782SMcG7SHw==
"@babel/helpers@^7.9.0":
version "7.9.2"
resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.9.2.tgz#b42a81a811f1e7313b88cba8adc66b3d9ae6c09f"
integrity sha512-JwLvzlXVPjO8eU9c/wF9/zOIN7X6h8DYf7mG4CiFRZRvZNKEF5dQ3H3V+ASkHoIB3mWhatgl5ONhyqHRI6MppA==
dependencies:
"@babel/template" "^7.8.3"
"@babel/traverse" "^7.9.0"
"@babel/types" "^7.9.0"
"@babel/highlight@^7.0.0":
version "7.0.0"
...
...
@@ -46,40 +144,68 @@
esutils "^2.0.2"
js-tokens "^4.0.0"
"@babel/parser@^7.0.0", "@babel/parser@^7.2.2", "@babel/parser@^7.2.3":
version "7.2.3"
resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.2.3.tgz#32f5df65744b70888d17872ec106b02434ba1489"
"@babel/template@^7.0.0", "@babel/template@^7.1.0":
version "7.2.2"
resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.2.2.tgz#005b3fdf0ed96e88041330379e0da9a708eb2907"
"@babel/highlight@^7.8.3":
version "7.9.0"
resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.9.0.tgz#4e9b45ccb82b79607271b2979ad82c7b68163079"
integrity sha512-lJZPilxX7Op3Nv/2cvFdnlepPXDxi29wxteT57Q965oc5R9v86ztx0jfxVrTcBk8C2kcPkkDa2Z4T3ZsPPVWsQ==
dependencies:
"@babel/
code-frame
" "^7.
0
.0"
"@babel/parser" "^7.2.2
"
"@babel/types" "^7.2.2
"
"@babel/
helper-validator-identifier
" "^7.
9
.0"
chalk "^2.0.0
"
js-tokens "^4.0.0
"
"@babel/traverse@^7.0.0":
version "7.2.3"
resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.2.3.tgz#7ff50cefa9c7c0bd2d81231fdac122f3957748d8"
dependencies:
"@babel/code-frame" "^7.0.0"
"@babel/generator" "^7.2.2"
"@babel/helper-function-name" "^7.1.0"
"@babel/helper-split-export-declaration" "^7.0.0"
"@babel/parser" "^7.2.3"
"@babel/types" "^7.2.2"
"@babel/parser@^7.7.5", "@babel/parser@^7.8.6", "@babel/parser@^7.9.0":
version "7.9.4"
resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.9.4.tgz#68a35e6b0319bbc014465be43828300113f2f2e8"
integrity sha512-bC49otXX6N0/VYhgOMh4gnP26E9xnDZK3TmbNpxYzzz9BQLBosQwfyOe9/cXUU3txYhTzLCbcqd5c8y/OmCjHA==
"@babel/template@^7.7.4", "@babel/template@^7.8.3", "@babel/template@^7.8.6":
version "7.8.6"
resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.8.6.tgz#86b22af15f828dfb086474f964dcc3e39c43ce2b"
integrity sha512-zbMsPMy/v0PWFZEhQJ66bqjhH+z0JgMoBWuikXybgG3Gkd/3t5oQ1Rw2WQhnSrsOmsKXnZOx15tkC4qON/+JPg==
dependencies:
"@babel/code-frame" "^7.8.3"
"@babel/parser" "^7.8.6"
"@babel/types" "^7.8.6"
"@babel/traverse@^7.7.4", "@babel/traverse@^7.8.6", "@babel/traverse@^7.9.0":
version "7.9.0"
resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.9.0.tgz#d3882c2830e513f4fe4cec9fe76ea1cc78747892"
integrity sha512-jAZQj0+kn4WTHO5dUZkZKhbFrqZE7K5LAQ5JysMnmvGij+wOdr+8lWqPeW0BcF4wFwrEXXtdGO7wcV6YPJcf3w==
dependencies:
"@babel/code-frame" "^7.8.3"
"@babel/generator" "^7.9.0"
"@babel/helper-function-name" "^7.8.3"
"@babel/helper-split-export-declaration" "^7.8.3"
"@babel/parser" "^7.9.0"
"@babel/types" "^7.9.0"
debug "^4.1.0"
globals "^11.1.0"
lodash "^4.17.1
0
"
lodash "^4.17.1
3
"
"@babel/types@^7.0.0", "@babel/types@^7.2.2":
version "7.2.2"
resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.2.2.tgz#44e10fc24e33af524488b716cdaee5360ea8ed1e"
"@babel/types@^7.8.3", "@babel/types@^7.8.6", "@babel/types@^7.9.0":
version "7.9.0"
resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.9.0.tgz#00b064c3df83ad32b2dbf5ff07312b15c7f1efb5"
integrity sha512-BS9JKfXkzzJl8RluW4JGknzpiUV7ZrvTayM6yfqLTVBEnFtyowVIOu6rqxRd5cVO6yGoWf4T8u8dgK9oB+GCng==
dependencies:
esutils "^2.0.2
"
lodash "^4.17.1
0
"
"@babel/helper-validator-identifier" "^7.9.0
"
lodash "^4.17.1
3
"
to-fast-properties "^2.0.0"
"@istanbuljs/load-nyc-config@^1.0.0":
version "1.0.0"
resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.0.0.tgz#10602de5570baea82f8afbfa2630b24e7a8cfe5b"
integrity sha512-ZR0rq/f/E4f4XcgnDvtMWXCUJpi8eO0rssVhmztsZqLIEFA9UUP9zmpE0VxlM+kv/E1ul2I876Fwil2ayptDVg==
dependencies:
camelcase "^5.3.1"
find-up "^4.1.0"
js-yaml "^3.13.1"
resolve-from "^5.0.0"
"@istanbuljs/schema@^0.1.2":
version "0.1.2"
resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.2.tgz#26520bf09abe4a5644cd5414e37125a8954241dd"
integrity sha512-tsAQNx32a8CoFhjhijUIhI4kccIAgmGhy8LZMZgGfmXcpMbPRUqn5LWmgRttILi6yeGmBJd2xsPkFMs0PzgPCw==
"@sindresorhus/is@^0.7.0":
version "0.7.0"
resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.7.0.tgz#9a06f4f137ee84d7df0460c1fdb1135ffa6c50fd"
...
...
@@ -105,6 +231,11 @@
version "4.1.4"
resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.1.4.tgz#5ca073b330d90b4066d6ce18f60d57f2084ce8ca"
"@types/color-name@^1.1.1":
version "1.1.1"
resolved "https://registry.yarnpkg.com/@types/color-name/-/color-name-1.1.1.tgz#1c1261bbeaa10a8055bbc5d8ab84b7b2afc846a0"
integrity sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==
"@types/connect@*":
version "3.4.32"
resolved "https://registry.yarnpkg.com/@types/connect/-/connect-3.4.32.tgz#aa0e9616b9435ccad02bc52b5b454ffc2c70ba28"
...
...
@@ -410,6 +541,14 @@ aggregate-error@^1.0.0:
clean-stack "^1.0.0"
indent-string "^3.0.0"
aggregate-error@^3.0.0:
version "3.0.1"
resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.0.1.tgz#db2fe7246e536f40d9b5442a39e117d7dd6a24e0"
integrity sha512-quoaXsZ9/BLNae5yiNoUz+Nhkwz83GhWwtYFglcjEQB2NDHCIpApbqXxIFnm4Pq/Nvhrsq5sYJFyohrrxnTGAA==
dependencies:
clean-stack "^2.0.0"
indent-string "^4.0.0"
ajv@^5.1.0, ajv@^5.3.0:
version "5.5.2"
resolved "https://registry.yarnpkg.com/ajv/-/ajv-5.5.2.tgz#73b5eeca3fab653e3d3f9422b341ad42205dc965"
...
...
@@ -434,6 +573,11 @@ ansi-align@^2.0.0:
dependencies:
string-width "^2.0.0"
ansi-colors@3.2.3:
version "3.2.3"
resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-3.2.3.tgz#57d35b8686e851e2cc04c403f1c00203976a1813"
integrity sha512-LEHHyuhlPY3TmuUYMh2oz89lTShfvgbmzaBcxve9t/9Wuy7Dwf4yoAKcND7KFT1HAQfqZ12qtc+DUrBMeKF9nw==
ansi-escapes@^4.2.1:
version "4.3.0"
resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.0.tgz#a4ce2b33d6b214b7950d8595c212f12ac9cc569d"
...
...
@@ -462,6 +606,14 @@ ansi-styles@^3.2.0, ansi-styles@^3.2.1:
dependencies:
color-convert "^1.9.0"
ansi-styles@^4.0.0:
version "4.2.1"
resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.2.1.tgz#90ae75c424d008d2624c5bf29ead3177ebfcf359"
integrity sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==
dependencies:
"@types/color-name" "^1.1.1"
color-convert "^2.0.1"
ansicolors@~0.3.2:
version "0.3.2"
resolved "https://registry.yarnpkg.com/ansicolors/-/ansicolors-0.3.2.tgz#665597de86a9ffe3aa9bfbe6cae5c6ea426b4979"
...
...
@@ -470,11 +622,20 @@ ansistyles@~0.1.3:
version "0.1.3"
resolved "https://registry.yarnpkg.com/ansistyles/-/ansistyles-0.1.3.tgz#5de60415bda071bb37127854c864f41b23254539"
append-transform@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/append-transform/-/append-transform-1.0.0.tgz#046a52ae582a228bd72f58acfbe2967c678759ab"
anymatch@~3.1.1:
version "3.1.1"
resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.1.tgz#c55ecf02185e2469259399310c173ce31233b142"
integrity sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==
dependencies:
normalize-path "^3.0.0"
picomatch "^2.0.4"
append-transform@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/append-transform/-/append-transform-2.0.0.tgz#99d9d29c7b38391e6f428d28ce136551f0b77e12"
integrity sha512-7yeyCEurROLQJFv5Xj4lEGTy0borxepjFv1g22oAdqFu//SrAlDl1O1Nxx15SH1RoliUml6p8dwJW9jvZughhg==
dependencies:
default-require-extensions "^
2
.0.0"
default-require-extensions "^
3
.0.0"
aproba@^1.0.3, aproba@^1.1.1, aproba@^1.1.2:
version "1.2.0"
...
...
@@ -515,7 +676,7 @@ array-uniq@^1.0.1:
version "1.0.3"
resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6"
arrify@^1.0.0
, arrify@^1.0.1
:
arrify@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d"
...
...
@@ -600,6 +761,11 @@ bin-links@^1.1.2, bin-links@^1.1.6:
npm-normalize-package-bin "^1.0.0"
write-file-atomic "^2.3.0"
binary-extensions@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.0.0.tgz#23c0df14f6a88077f5f986c0d167ec03c3d5537c"
integrity sha512-Phlt0plgpIIBOGTT/ehfFnbNlfsDEiqmzE2KRXoX1bLIlir4X/MR+zSyBEkL05ffWgnRSf/DXv+WrUAVr93/ow==
bluebird@^3.5.1, bluebird@^3.5.3, bluebird@^3.5.5:
version "3.7.2"
resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f"
...
...
@@ -644,6 +810,13 @@ brace-expansion@^1.1.7:
balanced-match "^1.0.0"
concat-map "0.0.1"
braces@~3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107"
integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==
dependencies:
fill-range "^7.0.1"
browser-stdout@1.3.1:
version "1.3.1"
resolved "https://registry.yarnpkg.com/browser-stdout/-/browser-stdout-1.3.1.tgz#baa559ee14ced73452229bad7326467c61fabd60"
...
...
@@ -660,10 +833,6 @@ buffer-stream-reader@^0.1.1:
version "0.1.1"
resolved "https://registry.yarnpkg.com/buffer-stream-reader/-/buffer-stream-reader-0.1.1.tgz#ca8bf93631deedd8b8f8c3bb44991cc30951e259"
builtin-modules@^1.0.0:
version "1.1.1"
resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-1.1.1.tgz#270f076c5a72c02f5b65a47df94c5fe3a278892f"
builtins@^1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/builtins/-/builtins-1.0.3.tgz#cb94faeb61c8696451db36534e1422f94f0aee88"
...
...
@@ -712,14 +881,15 @@ cacheable-request@^2.1.1:
normalize-url "2.0.1"
responselike "1.0.2"
caching-transform@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/caching-transform/-/caching-transform-2.0.0.tgz#e1292bd92d35b6e8b1ed7075726724b3bd64eea0"
caching-transform@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/caching-transform/-/caching-transform-4.0.0.tgz#00d297a4206d71e2163c39eaffa8157ac0651f0f"
integrity sha512-kpqOvwXnjjN44D89K5ccQC+RUrsy7jB/XLlRrx0D7/2HNcTPqzsb6XgYoErwko6QsV184CA2YgS1fxDiiDZMWA==
dependencies:
make-dir
"^
1
.0.0"
m
d5-hex
"^
2
.0.0"
package-hash "^
2
.0.0"
write-file-atomic "^
2
.0.0"
hasha
"^
5
.0.0"
m
ake-dir
"^
3
.0.0"
package-hash "^
4
.0.0"
write-file-atomic "^
3
.0.0"
call-limit@^1.1.1:
version "1.1.1"
...
...
@@ -737,6 +907,11 @@ camelcase@^4.0.0, camelcase@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-4.1.0.tgz#d545635be1e33c542649c69173e5de6acfae34dd"
camelcase@^5.0.0, camelcase@^5.3.1:
version "5.3.1"
resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320"
integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==
capture-stack-trace@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/capture-stack-trace/-/capture-stack-trace-1.0.1.tgz#a6c0bbe1f38f3aa0b92238ecb6ff42c344d4135d"
...
...
@@ -794,6 +969,21 @@ child-process-promise@^2.2.1:
node-version "^1.0.0"
promise-polyfill "^6.0.1"
chokidar@3.3.0:
version "3.3.0"
resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.3.0.tgz#12c0714668c55800f659e262d4962a97faf554a6"
integrity sha512-dGmKLDdT3Gdl7fBUe8XK+gAtGmzy5Fn0XkkWQuYxGIgWVPPse2CxFA5mtrlD0TOHaHjEUqkWNyP1XdHoJES/4A==
dependencies:
anymatch "~3.1.1"
braces "~3.0.2"
glob-parent "~5.1.0"
is-binary-path "~2.1.0"
is-glob "~4.0.1"
normalize-path "~3.0.0"
readdirp "~3.2.0"
optionalDependencies:
fsevents "~2.1.1"
chownr@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.0.1.tgz#e2a75042a9551908bebd25b8523d5f9769d79181"
...
...
@@ -820,6 +1010,11 @@ clean-stack@^1.0.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-1.3.0.tgz#9e821501ae979986c46b1d66d2d432db2fd4ae31"
clean-stack@^2.0.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b"
integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==
cli-boxes@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-1.0.0.tgz#4fa917c3e59c94a004cd61f8ee509da651687143"
...
...
@@ -858,6 +1053,24 @@ cliui@^4.0.0:
strip-ansi "^4.0.0"
wrap-ansi "^2.0.0"
cliui@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/cliui/-/cliui-5.0.0.tgz#deefcfdb2e800784aa34f46fa08e06851c7bbbc5"
integrity sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==
dependencies:
string-width "^3.1.0"
strip-ansi "^5.2.0"
wrap-ansi "^5.1.0"
cliui@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1"
integrity sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==
dependencies:
string-width "^4.2.0"
strip-ansi "^6.0.0"
wrap-ansi "^6.2.0"
clone-response@1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.2.tgz#d1dc973920314df67fbeb94223b4ee350239e96b"
...
...
@@ -889,10 +1102,22 @@ color-convert@^1.9.0:
dependencies:
color-name "1.1.1"
color-convert@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3"
integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==
dependencies:
color-name "~1.1.4"
color-name@1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.1.tgz#4b1415304cf50028ea81643643bd82ea05803689"
color-name@~1.1.4:
version "1.1.4"
resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2"
integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==
colors@^1.1.2:
version "1.4.0"
resolved "https://registry.yarnpkg.com/colors/-/colors-1.4.0.tgz#c50491479d4c1bdaed2c9ced32cf7c7dc2360f78"
...
...
@@ -916,14 +1141,6 @@ combined-stream@~1.0.6:
dependencies:
delayed-stream "~1.0.0"
commander@2.15.1:
version "2.15.1"
resolved "https://registry.yarnpkg.com/commander/-/commander-2.15.1.tgz#df46e867d0fc2aec66a34662b406a9ccafff5b0f"
commander@~2.17.1:
version "2.17.1"
resolved "https://registry.yarnpkg.com/commander/-/commander-2.17.1.tgz#bd77ab7de6de94205ceacc72f1716d29f20a77bf"
commondir@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b"
...
...
@@ -971,9 +1188,10 @@ content-type@~1.0.4:
version "1.0.4"
resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b"
convert-source-map@^1.6.0:
version "1.6.0"
resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.6.0.tgz#51b537a8c43e0f04dec1993bffcdd504e758ac20"
convert-source-map@^1.7.0:
version "1.7.0"
resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442"
integrity sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA==
dependencies:
safe-buffer "~5.1.1"
...
...
@@ -1006,7 +1224,7 @@ create-error-class@^3.0.0:
dependencies:
capture-stack-trace "^1.0.0"
cross-spawn@^4,
cross-spawn@^4.0.2:
cross-spawn@^4.0.2:
version "4.0.2"
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-4.0.2.tgz#7b9247621c23adfdd3856004a823cbe397424d41"
dependencies:
...
...
@@ -1031,6 +1249,15 @@ cross-spawn@^6.0.0, cross-spawn@^6.0.5:
shebang-command "^1.2.0"
which "^1.2.9"
cross-spawn@^7.0.0:
version "7.0.1"
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.1.tgz#0ab56286e0f7c24e153d04cc2aa027e43a9a5d14"
integrity sha512-u7v4o84SwFpD32Z8IIcPZ6z1/ie24O6RU3RbtL5Y316l3KuHVPx9ItBgWQ6VlfAFnRnTtMUrsQ9MUUTuEZjogg==
dependencies:
path-key "^3.1.0"
shebang-command "^2.0.0"
which "^2.0.1"
crypto-random-string@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-1.0.0.tgz#a230f64f568310e1498009940790ec99545bca7e"
...
...
@@ -1045,10 +1272,6 @@ dashdash@^1.12.0:
dependencies:
assert-plus "^1.0.0"
debug-log@^1.0.1:
version "1.0.1"
resolved "http://registry.npmjs.org/debug-log/-/debug-log-1.0.1.tgz#2307632d4c04382b8df8a32f70b895046d52745f"
debug@2.6.9, debug@^2.1.2:
version "2.6.9"
resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f"
...
...
@@ -1061,7 +1284,7 @@ debug@3.1.0:
dependencies:
ms "2.0.0"
debug@^3.1.0:
debug@3.2.6,
debug@^3.1.0:
version "3.2.6"
resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b"
dependencies:
...
...
@@ -1077,7 +1300,7 @@ debuglog@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/debuglog/-/debuglog-1.0.1.tgz#aa24ffb9ac3df9a2351837cfb2d279360cd78492"
decamelize@^1.1.1:
decamelize@^1.1.1
, decamelize@^1.2.0
:
version "1.2.0"
resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290"
...
...
@@ -1114,11 +1337,12 @@ deepmerge@^4.2.2:
resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955"
integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==
default-require-extensions@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/default-require-extensions/-/default-require-extensions-2.0.0.tgz#f5f8fbb18a7d6d50b21f641f649ebb522cfe24f7"
default-require-extensions@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/default-require-extensions/-/default-require-extensions-3.0.0.tgz#e03f93aac9b2b6443fc52e5e4a37b3ad9ad8df96"
integrity sha512-ek6DpXq/SCpvjhpFsLFRVtIxJCRw6fUR42lYMVZuUMK7n8eMz4Uh5clckdBjEpLhn/gEBZo7hDJnJcwdKLKQjg==
dependencies:
strip-bom "^
3
.0.0"
strip-bom "^
4
.0.0"
defaults@^1.0.3:
version "1.0.3"
...
...
@@ -1256,12 +1480,6 @@ errno@~0.1.7:
dependencies:
prr "~1.0.1"
error-ex@^1.3.1:
version "1.3.2"
resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf"
dependencies:
is-arrayish "^0.2.1"
es-abstract@^1.17.0-next.1:
version "1.17.0-next.1"
resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.17.0-next.1.tgz#94acc93e20b05a6e96dacb5ab2f1cb3a81fc2172"
...
...
@@ -1530,6 +1748,13 @@ file-entry-cache@^5.0.1:
dependencies:
flat-cache "^2.0.1"
fill-range@^7.0.1:
version "7.0.1"
resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40"
integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==
dependencies:
to-regex-range "^5.0.1"
finalhandler@1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.1.tgz#eebf4ed840079c83f4249038c9d703008301b105"
...
...
@@ -1542,29 +1767,38 @@ finalhandler@1.1.1:
statuses "~1.4.0"
unpipe "~1.0.0"
find-cache-dir@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-2.0.0.tgz#4c1faed59f45184530fb9d7fa123a4d04a98472d"
find-cache-dir@^3.2.0:
version "3.3.1"
resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-3.3.1.tgz#89b33fad4a4670daa94f855f7fbe31d6d84fe880"
integrity sha512-t2GDMt3oGC/v+BMwzmllWDuJF/xcDtE5j/fCGbqDD7OLuJkj0cfh1YSA5VKPvwMeLFLNDBkwOKZ2X85jGLVftQ==
dependencies:
commondir "^1.0.1"
make-dir "^
1
.0.
0
"
pkg-dir "^
3.0
.0"
make-dir "^
3
.0.
2
"
pkg-dir "^
4.1
.0"
find-npm-prefix@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/find-npm-prefix/-/find-npm-prefix-1.0.2.tgz#8d8ce2c78b3b4b9e66c8acc6a37c231eb841cfdf"
find-up@3.0.0, find-up@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73"
dependencies:
locate-path "^3.0.0"
find-up@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7"
dependencies:
locate-path "^2.0.0"
find-up@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73"
find-up@^4.0.0, find-up@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19"
integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==
dependencies:
locate-path "^3.0.0"
locate-path "^5.0.0"
path-exists "^4.0.0"
flat-cache@^2.0.1:
version "2.0.1"
...
...
@@ -1574,6 +1808,13 @@ flat-cache@^2.0.1:
rimraf "2.6.3"
write "1.0.3"
flat@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/flat/-/flat-4.1.0.tgz#090bec8b05e39cba309747f1d588f04dbaf98db2"
integrity sha512-Px/TiLIznH7gEDlPXcUD4KnBusa6kR6ayRUVcnEAbreRIuhkqow/mun59BuRXwoYk7ZQOLW1ZM05ilIvK38hFw==
dependencies:
is-buffer "~2.0.3"
flatted@^2.0.0:
version "2.0.1"
resolved "https://registry.yarnpkg.com/flatted/-/flatted-2.0.1.tgz#69e57caa8f0eacbc281d2e2cb458d46fdb449e08"
...
...
@@ -1591,12 +1832,13 @@ flush-write-stream@^1.0.0:
inherits "^2.0.3"
readable-stream "^2.3.6"
foreground-child@^1.5.6:
version "1.5.6"
resolved "http://registry.npmjs.org/foreground-child/-/foreground-child-1.5.6.tgz#4fd71ad2dfde96789b980a5c0a295937cb2f5ce9"
foreground-child@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/foreground-child/-/foreground-child-2.0.0.tgz#71b32800c9f15aa8f2f83f4a6bd9bff35d861a53"
integrity sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==
dependencies:
cross-spawn "^
4
"
signal-exit "^3.0.
0
"
cross-spawn "^
7.0.0
"
signal-exit "^3.0.
2
"
forever-agent@~0.6.1:
version "0.6.1"
...
...
@@ -1632,6 +1874,11 @@ from2@^2.1.0, from2@^2.1.1:
inherits "^2.0.1"
readable-stream "^2.0.0"
fromentries@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/fromentries/-/fromentries-1.2.0.tgz#e6aa06f240d6267f913cea422075ef88b63e7897"
integrity sha512-33X7H/wdfO99GdRLLgkjUrD4geAFdq/Uv0kl3HD4da6HDixd2GUg8Mw7dahLCV9r/EARkmtYBB6Tch4EEokFTQ==
fs-minipass@^1.2.5:
version "1.2.5"
resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.5.tgz#06c277218454ec288df77ada54a03b8702aacb9d"
...
...
@@ -1659,6 +1906,11 @@ fs.realpath@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
fsevents@~2.1.1:
version "2.1.2"
resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.1.2.tgz#4c0a1fb34bc68e543b4b82a9ec392bfbda840805"
integrity sha512-R4wDiBwZ0KzpgOWetKDug1FZcYhqYnUYKtfZYt4mD5SBz76q0KR4Q9o7GIPamsVPGmW3EYPPJ0dOOjvx32ldZA==
function-bind@^1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d"
...
...
@@ -1684,6 +1936,11 @@ genfun@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/genfun/-/genfun-5.0.0.tgz#9dd9710a06900a5c4a5bf57aca5da4e52fe76537"
gensync@^1.0.0-beta.1:
version "1.0.0-beta.1"
resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.1.tgz#58f4361ff987e5ff6e1e7a210827aa371eaac269"
integrity sha512-r8EC6NO1sngH/zdD9fiRDLdcgnbayXah+mLgManTaIZJqEC1MZstmnox8KpnI2/fxQwrp5OpCOYWLp4rBl4Jcg==
gentle-fs@^2.3.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/gentle-fs/-/gentle-fs-2.3.0.tgz#13538db5029400f98684be4894e8a7d8f0d1ea7f"
...
...
@@ -1704,6 +1961,11 @@ get-caller-file@^1.0.1:
version "1.0.3"
resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.3.tgz#f978fa4c90d1dfe7ff2d6beda2a515e713bdcf4a"
get-caller-file@^2.0.1:
version "2.0.5"
resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e"
integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==
get-func-name@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/get-func-name/-/get-func-name-2.0.0.tgz#ead774abee72e20409433a066366023dd6887a41"
...
...
@@ -1737,9 +1999,16 @@ glob-parent@^5.0.0:
dependencies:
is-glob "^4.0.1"
glob@7.1.2, glob@^7.0.3, glob@^7.0.5, glob@^7.1.1:
version "7.1.2"
resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.2.tgz#c19c9df9a028702d678612384a6552404c636d15"
glob-parent@~5.1.0:
version "5.1.1"
resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.1.tgz#b6c1ef417c4e5663ea498f1c45afac6916bbc229"
integrity sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ==
dependencies:
is-glob "^4.0.1"
glob@7.1.3, glob@^7.1.3:
version "7.1.3"
resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.3.tgz#3960832d3f1574108342dafd3a67b332c0969df1"
dependencies:
fs.realpath "^1.0.0"
inflight "^1.0.4"
...
...
@@ -1748,9 +2017,9 @@ glob@7.1.2, glob@^7.0.3, glob@^7.0.5, glob@^7.1.1:
once "^1.3.0"
path-is-absolute "^1.0.0"
glob@^7.
1
.3:
version "7.1.
3
"
resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.
3
.tgz#
3960832d3f1574108342dafd3a67b332c0969df1
"
glob@^7.
0
.3
, glob@^7.0.5, glob@^7.1.1
:
version "7.1.
2
"
resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.
2
.tgz#
c19c9df9a028702d678612384a6552404c636d15
"
dependencies:
fs.realpath "^1.0.0"
inflight "^1.0.4"
...
...
@@ -1846,16 +2115,6 @@ growl@1.10.5:
version "1.10.5"
resolved "https://registry.yarnpkg.com/growl/-/growl-1.10.5.tgz#f2735dc2283674fa67478b10181059355c369e5e"
handlebars@^4.0.11, handlebars@^4.5.3:
version "4.7.2"
resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.2.tgz#01127b3840156a0927058779482031afe0e730d7"
dependencies:
neo-async "^2.6.0"
optimist "^0.6.1"
source-map "^0.6.1"
optionalDependencies:
uglify-js "^3.1.4"
har-schema@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92"
...
...
@@ -1878,6 +2137,11 @@ has-flag@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd"
has-flag@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b"
integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==
has-symbol-support-x@^1.4.1:
version "1.4.2"
resolved "https://registry.yarnpkg.com/has-symbol-support-x/-/has-symbol-support-x-1.4.2.tgz#1409f98bc00247da45da67cee0a36f282ff26455"
...
...
@@ -1909,9 +2173,18 @@ hash-base@^3.0.0:
inherits "^2.0.1"
safe-buffer "^5.0.1"
he@1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/he/-/he-1.1.1.tgz#93410fd21b009735151f8868c2f271f3427e23fd"
hasha@^5.0.0:
version "5.2.0"
resolved "https://registry.yarnpkg.com/hasha/-/hasha-5.2.0.tgz#33094d1f69c40a4a6ac7be53d5fe3ff95a269e0c"
integrity sha512-2W+jKdQbAdSIrggA8Q35Br8qKadTrqCTC8+XZvBWepKDK6m9XkX6Iz1a2yh2KP01kzAR/dpuMeUnocoLYDcskw==
dependencies:
is-stream "^2.0.0"
type-fest "^0.8.0"
he@1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f"
integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==
hoek@2.x.x, hoek@^4.2.1:
version "4.2.1"
...
...
@@ -1925,6 +2198,11 @@ hosted-git-info@^2.7.1, hosted-git-info@^2.8.5:
version "2.8.5"
resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.5.tgz#759cfcf2c4d156ade59b0b2dfabddc42a6b9c70c"
html-escaper@^2.0.0:
version "2.0.2"
resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453"
integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==
http-cache-semantics@3.8.1, http-cache-semantics@^3.8.1:
version "3.8.1"
resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-3.8.1.tgz#39b0e16add9b605bf0a9ef3d9daaf4843b4cacd2"
...
...
@@ -2028,6 +2306,11 @@ indent-string@^3.0.0:
version "3.2.0"
resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-3.2.0.tgz#4a5fd6d27cc332f37e5419a504dbb837105c9289"
indent-string@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251"
integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==
infer-owner@^1.0.3, infer-owner@^1.0.4:
version "1.0.4"
resolved "https://registry.yarnpkg.com/infer-owner/-/infer-owner-1.0.4.tgz#c4cefcaa8e51051c2a40ba2ce8a3d27295af9467"
...
...
@@ -2089,10 +2372,6 @@ into-stream@^3.1.0:
from2 "^2.1.1"
p-is-promise "^1.1.0"
invert-kv@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-1.0.0.tgz#104a8e4aaca6d3d8cd157a8ef8bfab2d7a3ffdb6"
invert-kv@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-2.0.0.tgz#7393f5afa59ec9ff5f67a27620d11c226e3eec02"
...
...
@@ -2109,15 +2388,17 @@ ipaddr.js@1.6.0:
version "1.6.0"
resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.6.0.tgz#e3fa357b773da619f26e95f049d055c72796f86b"
is-arrayish@^0.2.1:
version "0.2.1"
resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d"
is-builtin-module@^1.0.0:
version "1.0.0"
resolved "http://registry.npmjs.org/is-builtin-module/-/is-builtin-module-1.0.0.tgz#540572d34f7ac3119f8f76c30cbc1b1e037affbe"
is-binary-path@~2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09"
integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==
dependencies:
builtin-modules "^1.0.0"
binary-extensions "^2.0.0"
is-buffer@~2.0.3:
version "2.0.4"
resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.4.tgz#3e572f23c8411a5cfd9557c849e3665e0b290623"
integrity sha512-Kq1rokWXOPXWuaMAqZiJW4XxsmD9zGx9q4aePabbn3qCRGedtH7Cm+zV8WETitMfu1wdh+Rvd6w5egwSngUX2A==
is-callable@^1.1.4:
version "1.1.4"
...
...
@@ -2163,7 +2444,7 @@ is-glob@^3.1.0:
dependencies:
is-extglob "^2.1.0"
is-glob@^4.0.0, is-glob@^4.0.1:
is-glob@^4.0.0, is-glob@^4.0.1
, is-glob@~4.0.1
:
version "4.0.1"
resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc"
dependencies:
...
...
@@ -2180,6 +2461,11 @@ is-npm@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/is-npm/-/is-npm-1.0.0.tgz#f2fb63a65e4905b406c86072765a1a4dc793b9f4"
is-number@^7.0.0:
version "7.0.0"
resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b"
integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==
is-obj@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f"
...
...
@@ -2224,16 +2510,26 @@ is-stream@^1.0.0, is-stream@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44"
is-stream@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.0.tgz#bde9c32680d6fae04129d6ac9d921ce7815f78e3"
integrity sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw==
is-symbol@^1.0.2:
version "1.0.3"
resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.3.tgz#38e1014b9e6329be0de9d24a414fd7441ec61937"
dependencies:
has-symbols "^1.0.1"
is-typedarray@~1.0.0:
is-typedarray@^1.0.0,
is-typedarray@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a"
is-windows@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d"
integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==
is@^3.2.1:
version "3.3.0"
resolved "https://registry.yarnpkg.com/is/-/is-3.3.0.tgz#61cff6dd3c4193db94a3d62582072b44e5645d79"
...
...
@@ -2258,51 +2554,69 @@ isstream@~0.1.2:
version "0.1.2"
resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a"
istanbul-lib-coverage@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.1.tgz#2aee0e073ad8c5f6a0b00e0dfbf52b4667472eda"
istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.0.0-alpha.1:
version "3.0.0"
resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.0.0.tgz#f5944a37c70b550b02a78a5c3b2055b280cec8ec"
integrity sha512-UiUIqxMgRDET6eR+o5HbfRYP1l0hqkWOs7vNxC/mggutCMUIhWMm8gAHb8tHlyfD3/l6rlgNA5cKdDzEAf6hEg==
istanbul-lib-hook@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/istanbul-lib-hook/-/istanbul-lib-hook-2.0.1.tgz#918a57b75a0f951d552a08487ca1fa5336433d72"
istanbul-lib-hook@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/istanbul-lib-hook/-/istanbul-lib-hook-3.0.0.tgz#8f84c9434888cc6b1d0a9d7092a76d239ebf0cc6"
integrity sha512-Pt/uge1Q9s+5VAZ+pCo16TYMWPBIl+oaNIjgLQxcX0itS6ueeaA+pEfThZpH8WxhFgCiEb8sAJY6MdUKgiIWaQ==
dependencies:
append-transform "^
1
.0.0"
append-transform "^
2
.0.0"
istanbul-lib-instrument@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-3.0.0.tgz#b5f066b2a161f75788be17a9d556f40a0cf2afc9"
dependencies:
"@babel/generator" "^7.0.0"
"@babel/parser" "^7.0.0"
"@babel/template" "^7.0.0"
"@babel/traverse" "^7.0.0"
"@babel/types" "^7.0.0"
istanbul-lib-coverage "^2.0.1"
semver "^5.5.0"
istanbul-lib-instrument@^4.0.0:
version "4.0.1"
resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.1.tgz#61f13ac2c96cfefb076fe7131156cc05907874e6"
integrity sha512-imIchxnodll7pvQBYOqUu88EufLCU56LMeFPZZM/fJZ1irYcYdqroaV+ACK1Ila8ls09iEYArp+nqyC6lW1Vfg==
dependencies:
"@babel/core" "^7.7.5"
"@babel/parser" "^7.7.5"
"@babel/template" "^7.7.4"
"@babel/traverse" "^7.7.4"
"@istanbuljs/schema" "^0.1.2"
istanbul-lib-coverage "^3.0.0"
semver "^6.3.0"
istanbul-lib-
report
@^2.0.2:
istanbul-lib-
processinfo
@^2.0.2:
version "2.0.2"
resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-2.0.2.tgz#430a2598519113e1da7af274ba861bd42dd97535"
resolved "https://registry.yarnpkg.com/istanbul-lib-processinfo/-/istanbul-lib-processinfo-2.0.2.tgz#e1426514662244b2f25df728e8fd1ba35fe53b9c"
integrity sha512-kOwpa7z9hme+IBPZMzQ5vdQj8srYgAtaRqeI48NGmAQ+/5yKiHLV0QbYqQpxsdEF0+w14SoB8YbnHKcXE2KnYw==
dependencies:
istanbul-lib-coverage "^2.0.1"
make-dir "^1.3.0"
supports-color "^5.4.0"
archy "^1.0.0"
cross-spawn "^7.0.0"
istanbul-lib-coverage "^3.0.0-alpha.1"
make-dir "^3.0.0"
p-map "^3.0.0"
rimraf "^3.0.0"
uuid "^3.3.3"
istanbul-lib-source-maps@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-2.0.1.tgz#ce8b45131d8293fdeaa732f4faf1852d13d0a97e"
istanbul-lib-report@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6"
integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==
dependencies:
debug "^3.1.0"
istanbul-lib-coverage "^2.0.1"
make-dir "^1.3.0"
rimraf "^2.6.2"
istanbul-lib-coverage "^3.0.0"
make-dir "^3.0.0"
supports-color "^7.1.0"
istanbul-lib-source-maps@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.0.tgz#75743ce6d96bb86dc7ee4352cf6366a23f0b1ad9"
integrity sha512-c16LpFRkR8vQXyHZ5nLpY35JZtzj1PQY1iZmesUbf1FZHbIupcWfjgOXBY9YHkLEQ6puz1u4Dgj6qmU/DisrZg==
dependencies:
debug "^4.1.1"
istanbul-lib-coverage "^3.0.0"
source-map "^0.6.1"
istanbul-reports@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-2.0.1.tgz#fb8d6ea850701a3984350b977a969e9a556116a7"
istanbul-reports@^3.0.0:
version "3.0.1"
resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.0.1.tgz#1343217244ad637e0c3b18e7f6b746941a9b5e9a"
integrity sha512-Vm9xwCiQ8t2cNNnckyeAV0UdxKpcQUz4nMxsBvIu8n2kmPSiyb5uaF/8LpmKr+yqL/MdOXaX2Nmdo4Qyxium9Q==
dependencies:
handlebars "^4.0.11"
html-escaper "^2.0.0"
istanbul-lib-report "^3.0.0"
isurl@^1.0.0-alpha5:
version "1.0.0"
...
...
@@ -2328,7 +2642,7 @@ js-tokens@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499"
js-yaml@^3.10.0, js-yaml@^3.13.1:
js-yaml@3.13.1,
js-yaml@^3.10.0, js-yaml@^3.13.1:
version "3.13.1"
resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.13.1.tgz#aff151b30bfdfa8e49e05da22e7415e9dfa37847"
dependencies:
...
...
@@ -2377,6 +2691,13 @@ json-stringify-safe@~5.0.1:
version "5.0.1"
resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb"
json5@^2.1.2:
version "2.1.2"
resolved "https://registry.yarnpkg.com/json5/-/json5-2.1.2.tgz#43ef1f0af9835dd624751a6b7fa48874fb2d608e"
integrity sha512-MoUOQ4WdiN3yxhm7NEVJSJrieAo5hNSLQ5sj05OTRHPL9HOBy8u4Bu88jsC1jvqAdN+E1bJmsUcZH+1HQxliqQ==
dependencies:
minimist "^1.2.5"
jsonparse@^1.2.0:
version "1.3.1"
resolved "https://registry.yarnpkg.com/jsonparse/-/jsonparse-1.3.1.tgz#3f4dae4a91fac315f71062f8521cc239f1366280"
...
...
@@ -2422,12 +2743,6 @@ lazy-property@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/lazy-property/-/lazy-property-1.0.0.tgz#84ddc4b370679ba8bd4cdcfa4c06b43d57111147"
lcid@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/lcid/-/lcid-1.0.0.tgz#308accafa0bc483a3867b4b6f2b9506251d1b835"
dependencies:
invert-kv "^1.0.0"
lcid@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/lcid/-/lcid-2.0.0.tgz#6ef5d2df60e52f82eb228a4c373e8d1f397253cf"
...
...
@@ -2565,15 +2880,6 @@ libnpx@10.2.0, libnpx@^10.2.0:
y18n "^4.0.0"
yargs "^11.0.0"
load-json-file@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-4.0.0.tgz#2f5f45ab91e33216234fd53adab668eb4ec0993b"
dependencies:
graceful-fs "^4.1.2"
parse-json "^4.0.0"
pify "^3.0.0"
strip-bom "^3.0.0"
locate-path@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e"
...
...
@@ -2588,6 +2894,13 @@ locate-path@^3.0.0:
p-locate "^3.0.0"
path-exists "^3.0.0"
locate-path@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0"
integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==
dependencies:
p-locate "^4.1.0"
lock-verify@^2.0.2, lock-verify@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/lock-verify/-/lock-verify-2.1.0.tgz#fff4c918b8db9497af0c5fa7f6d71555de3ceb47"
...
...
@@ -2676,10 +2989,17 @@ lodash.without@~4.4.0:
version "4.4.0"
resolved "https://registry.yarnpkg.com/lodash.without/-/lodash.without-4.4.0.tgz#3cd4574a00b67bae373a94b748772640507b7aac"
lodash@^4.17.10,
lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.15:
lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.15:
version "4.17.14"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.14.tgz#9ce487ae66c96254fe20b599f21b6816028078ba"
log-symbols@3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-3.0.0.tgz#f3a08516a5dea893336a7dee14d18a1cfdab77c4"
integrity sha512-dSkNGuI7iG3mfvDzUuYZyvk5dD9ocYCYzNU6CYDE6+Xqd+gwme6Z00NS3dUh8mq/73HaEtT7m6W+yUPtU6BZnQ==
dependencies:
chalk "^2.4.2"
long@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/long/-/long-4.0.0.tgz#9a7b71cfb7d361a194ea555241c92f7468d5bf28"
...
...
@@ -2712,12 +3032,19 @@ lru-cache@^5.1.1:
dependencies:
yallist "^3.0.2"
make-dir@^1.0.0
, make-dir@^1.3.0
:
make-dir@^1.0.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-1.3.0.tgz#79c1033b80515bd6d24ec9933e860ca75ee27f0c"
dependencies:
pify "^3.0.0"
make-dir@^3.0.0, make-dir@^3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.0.2.tgz#04a1acbf22221e1d6ef43559f43e05a90dbb4392"
integrity sha512-rYKABKutXa6vXTXhoV18cBE7PaewPXHe/Bdq4v+ZLMhxbWApkFFplT0LcbMW+6BbjnQXzZ/sAvSE/JdguApG5w==
dependencies:
semver "^6.0.0"
make-error@^1.1.1:
version "1.3.4"
resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.4.tgz#19978ed575f9e9545d2ff8c13e33b5d18a67d535"
...
...
@@ -2744,16 +3071,6 @@ map-age-cleaner@^0.1.1:
dependencies:
p-defer "^1.0.0"
md5-hex@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/md5-hex/-/md5-hex-2.0.0.tgz#d0588e9f1c74954492ecd24ac0ac6ce997d92e33"
dependencies:
md5-o-matic "^0.1.1"
md5-o-matic@^0.1.1:
version "0.1.1"
resolved "https://registry.yarnpkg.com/md5-o-matic/-/md5-o-matic-0.1.1.tgz#822bccd65e117c514fab176b25945d54100a03c3"
md5.js@1.3.4:
version "1.3.4"
resolved "http://registry.npmjs.org/md5.js/-/md5.js-1.3.4.tgz#e9bdbde94a20a5ac18b04340fc5764d5b09d901d"
...
...
@@ -2769,7 +3086,7 @@ media-typer@0.3.0:
version "0.3.0"
resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748"
mem@^1.1.0,
mem@^4.0.0:
mem@^4.0.0:
version "4.3.0"
resolved "https://registry.yarnpkg.com/mem/-/mem-4.3.0.tgz#461af497bc4ae09608cdb2e60eefb69bff744178"
dependencies:
...
...
@@ -2781,12 +3098,6 @@ merge-descriptors@1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61"
merge-source-map@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/merge-source-map/-/merge-source-map-1.1.0.tgz#2fdde7e6020939f70906a68f2d7ae685e4c8c646"
dependencies:
source-map "^0.6.1"
methods@~1.1.2:
version "1.1.2"
resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee"
...
...
@@ -2829,17 +3140,10 @@ minimatch@3.0.4, minimatch@^3.0.4:
dependencies:
brace-expansion "^1.1.7"
minimist@0.0.8:
version "0.0.8"
resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d"
minimist@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284"
minimist@~0.0.1:
version "0.0.10"
resolved "http://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz#de3f98543dbf96082be48ad1a0c7cda836301dcf"
minimist@^1.2.0, minimist@^1.2.5:
version "1.2.5"
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602"
integrity sha1-Z9ZgFLZqaoqqDAg8X9WN9OTpdgI=
minipass@^2.2.1, minipass@^2.3.3:
version "2.3.3"
...
...
@@ -2882,27 +3186,42 @@ mississippi@^3.0.0:
stream-each "^1.1.0"
through2 "^2.0.0"
mkdirp@0.5.1, mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@~0.5.0, mkdirp@~0.5.1:
version "0.5.1"
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903"
mkdirp@0.5.3, mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@~0.5.0, mkdirp@~0.5.1:
version "0.5.3"
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.3.tgz#5a514b7179259287952881e94410ec5465659f8c"
integrity sha512-P+2gwrFqx8lhew375MQHHeTlY8AuOJSrGf0R5ddkEndUkmwpgUob/vQuBD1V22/Cw1/lJr4x+EjllSezBThzBg==
dependencies:
minimist "
0.0.8
"
minimist "
^1.2.5
"
mocha@^5.2.0:
version "5.2.0"
resolved "https://registry.yarnpkg.com/mocha/-/mocha-5.2.0.tgz#6d8ae508f59167f940f2b5b3c4a612ae50c90ae6"
mocha@^7.1.1:
version "7.1.1"
resolved "https://registry.yarnpkg.com/mocha/-/mocha-7.1.1.tgz#89fbb30d09429845b1bb893a830bf5771049a441"
integrity sha512-3qQsu3ijNS3GkWcccT5Zw0hf/rWvu1fTN9sPvEd81hlwsr30GX2GcDSSoBxo24IR8FelmrAydGC6/1J5QQP4WA==
dependencies:
ansi-colors "3.2.3"
browser-stdout "1.3.1"
c
ommander "2.15.1
"
debug "3.
1.0
"
c
hokidar "3.3.0
"
debug "3.
2.6
"
diff "3.5.0"
escape-string-regexp "1.0.5"
glob "7.1.2"
find-up "3.0.0"
glob "7.1.3"
growl "1.10.5"
he "1.1.1"
he "1.2.0"
js-yaml "3.13.1"
log-symbols "3.0.0"
minimatch "3.0.4"
mkdirp "0.5.1"
supports-color "5.4.0"
mkdirp "0.5.3"
ms "2.1.1"
node-environment-flags "1.0.6"
object.assign "4.1.0"
strip-json-comments "2.0.1"
supports-color "6.0.0"
which "1.3.1"
wide-align "1.1.3"
yargs "13.3.2"
yargs-parser "13.1.2"
yargs-unparser "1.6.0"
moment@2.x.x:
version "2.22.2"
...
...
@@ -2923,14 +3242,14 @@ ms@2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"
ms@2.1.1, ms@^2.1.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a"
ms@^2.0.0:
version "2.1.2"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
ms@^2.1.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a"
mute-stream@0.0.8, mute-stream@~0.0.4:
version "0.0.8"
resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.8.tgz#1630c42b2251ff81e2a283de96a5497ea92e5e0d"
...
...
@@ -2955,14 +3274,18 @@ negotiator@0.6.1:
version "0.6.1"
resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.1.tgz#2b327184e8992101177b28563fb5e7102acd0ca9"
neo-async@^2.6.0:
version "2.6.1"
resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.1.tgz#ac27ada66167fa8849a6addd837f6b189ad2081c"
nice-try@^1.0.4:
version "1.0.5"
resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366"
node-environment-flags@1.0.6:
version "1.0.6"
resolved "https://registry.yarnpkg.com/node-environment-flags/-/node-environment-flags-1.0.6.tgz#a30ac13621f6f7d674260a54dede048c3982c088"
integrity sha512-5Evy2epuL+6TM0lCQGpFIj6KwiEsGh1SrHUhTbNX+sLbBtjidPZFAnVK9y5yU1+h//RitLbRHTIMyxQPtxMdHw==
dependencies:
object.getownpropertydescriptors "^2.0.3"
semver "^5.7.0"
node-fetch-npm@^2.0.2:
version "2.0.2"
resolved "https://registry.yarnpkg.com/node-fetch-npm/-/node-fetch-npm-2.0.2.tgz#7258c9046182dca345b4208eda918daf33697ff7"
...
...
@@ -3026,6 +3349,13 @@ node-pre-gyp@^0.10.3:
semver "^5.3.0"
tar "^4"
node-preload@^0.2.0:
version "0.2.1"
resolved "https://registry.yarnpkg.com/node-preload/-/node-preload-0.2.1.tgz#c03043bb327f417a18fee7ab7ee57b408a144301"
integrity sha512-RM5oyBy45cLEoHqCeh+MNuFAxO0vTFBLskvQbOKnEE7YTTSN4tbN8QWDIPQ6L+WvKsB/qLEGpYe2ZZ9d4W9OIQ==
dependencies:
process-on-spawn "^1.0.0"
node-version@^1.0.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/node-version/-/node-version-1.2.0.tgz#34fde3ffa8e1149bd323983479dda620e1b5060d"
...
...
@@ -3059,14 +3389,10 @@ normalize-package-data@^2.0.0, normalize-package-data@^2.4.0, normalize-package-
semver "2 || 3 || 4 || 5"
validate-npm-package-license "^3.0.1"
normalize-package-data@^2.3.2:
version "2.4.0"
resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.4.0.tgz#12f95a307d58352075a04907b84ac8be98ac012f"
dependencies:
hosted-git-info "^2.1.4"
is-builtin-module "^1.0.0"
semver "2 || 3 || 4 || 5"
validate-npm-package-license "^3.0.1"
normalize-path@^3.0.0, normalize-path@~3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65"
integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==
normalize-url@2.0.1:
version "2.0.1"
...
...
@@ -3319,35 +3645,39 @@ number-is-nan@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d"
nyc@^13.1.0:
version "13.1.0"
resolved "https://registry.yarnpkg.com/nyc/-/nyc-13.1.0.tgz#463665c7ff6b5798e322624a5eb449a678db90e3"
dependencies:
archy "^1.0.0"
arrify "^1.0.1"
caching-transform "^2.0.0"
convert-source-map "^1.6.0"
debug-log "^1.0.1"
find-cache-dir "^2.0.0"
find-up "^3.0.0"
foreground-child "^1.5.6"
glob "^7.1.3"
istanbul-lib-coverage "^2.0.1"
istanbul-lib-hook "^2.0.1"
istanbul-lib-instrument "^3.0.0"
istanbul-lib-report "^2.0.2"
istanbul-lib-source-maps "^2.0.1"
istanbul-reports "^2.0.1"
make-dir "^1.3.0"
merge-source-map "^1.1.0"
resolve-from "^4.0.0"
rimraf "^2.6.2"
nyc@^15.0.0:
version "15.0.0"
resolved "https://registry.yarnpkg.com/nyc/-/nyc-15.0.0.tgz#eb32db2c0f29242c2414fe46357f230121cfc162"
integrity sha512-qcLBlNCKMDVuKb7d1fpxjPR8sHeMVX0CHarXAVzrVWoFrigCkYR8xcrjfXSPi5HXM7EU78L6ywO7w1c5rZNCNg==
dependencies:
"@istanbuljs/load-nyc-config" "^1.0.0"
"@istanbuljs/schema" "^0.1.2"
caching-transform "^4.0.0"
convert-source-map "^1.7.0"
decamelize "^1.2.0"
find-cache-dir "^3.2.0"
find-up "^4.1.0"
foreground-child "^2.0.0"
glob "^7.1.6"
istanbul-lib-coverage "^3.0.0"
istanbul-lib-hook "^3.0.0"
istanbul-lib-instrument "^4.0.0"
istanbul-lib-processinfo "^2.0.2"
istanbul-lib-report "^3.0.0"
istanbul-lib-source-maps "^4.0.0"
istanbul-reports "^3.0.0"
js-yaml "^3.13.1"
make-dir "^3.0.0"
node-preload "^0.2.0"
p-map "^3.0.0"
process-on-spawn "^1.0.0"
resolve-from "^5.0.0"
rimraf "^3.0.0"
signal-exit "^3.0.2"
spawn-wrap "^1.4.2"
test-exclude "^5.0.0"
uuid "^3.3.2"
yargs "11.1.0"
yargs-parser "^9.0.2"
spawn-wrap "^2.0.0"
test-exclude "^6.0.0"
uuid "^3.3.3"
yargs "^15.0.2"
oauth-sign@~0.8.2:
version "0.8.2"
...
...
@@ -3369,7 +3699,7 @@ object-keys@^1.0.11, object-keys@^1.0.12, object-keys@^1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e"
object.assign@^4.1.0:
object.assign@4.1.0,
object.assign@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.0.tgz#968bf1100d7956bb3ca086f006f846b3bc4008da"
dependencies:
...
...
@@ -3425,13 +3755,6 @@ openid-client@^2.4.4:
oidc-token-hash "^3.0.1"
p-any "^1.1.0"
optimist@^0.6.1:
version "0.6.1"
resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.6.1.tgz#da3ea74686fa21a19a111c326e90eb15a0196686"
dependencies:
minimist "~0.0.1"
wordwrap "~0.0.2"
optionator@^0.8.3:
version "0.8.3"
resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495"
...
...
@@ -3443,18 +3766,10 @@ optionator@^0.8.3:
type-check "~0.3.2"
word-wrap "~1.2.3"
os-homedir@^1.0.0
, os-homedir@^1.0.1
:
os-homedir@^1.0.0:
version "1.0.2"
resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3"
os-locale@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-2.1.0.tgz#42bc2900a6b5b8bd17376c8e882b65afccf24bf2"
dependencies:
execa "^0.7.0"
lcid "^1.0.0"
mem "^1.1.0"
os-locale@^3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-3.1.0.tgz#a802a6ee17f24c10483ab9935719cef4ed16bf1a"
...
...
@@ -3512,6 +3827,13 @@ p-limit@^2.0.0:
dependencies:
p-try "^2.0.0"
p-limit@^2.2.0:
version "2.2.2"
resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.2.2.tgz#61279b67721f5287aa1c13a9a7fbbc48c9291b1e"
integrity sha512-WGR+xHecKTr7EbUEhyLSh5Dube9JtdiG78ufaeLxTgpudf/20KqyMioIUZJAezlTIi6evxuoUs9YXc11cU+yzQ==
dependencies:
p-try "^2.0.0"
p-locate@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43"
...
...
@@ -3524,6 +3846,20 @@ p-locate@^3.0.0:
dependencies:
p-limit "^2.0.0"
p-locate@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07"
integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==
dependencies:
p-limit "^2.2.0"
p-map@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/p-map/-/p-map-3.0.0.tgz#d704d9af8a2ba684e2600d9a215983d4141a979d"
integrity sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==
dependencies:
aggregate-error "^3.0.0"
p-some@^2.0.0:
version "2.0.1"
resolved "https://registry.yarnpkg.com/p-some/-/p-some-2.0.1.tgz#65d87c8b154edbcf5221d167778b6d2e150f6f06"
...
...
@@ -3544,13 +3880,14 @@ p-try@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.0.0.tgz#85080bb87c64688fa47996fe8f7dfbe8211760b1"
package-hash@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/package-hash/-/package-hash-2.0.0.tgz#78ae326c89e05a4d813b68601977af05c00d2a0d"
package-hash@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/package-hash/-/package-hash-4.0.0.tgz#3537f654665ec3cc38827387fc904c163c54f506"
integrity sha512-whdkPIooSu/bASggZ96BWVvZTRMOFxnyUG5PnTSGKoJE2gd5mbVNmR2Nj20QFzxYYgAXpoqC+AiXzl+UMRh7zQ==
dependencies:
graceful-fs "^4.1.11"
graceful-fs "^4.1.15"
hasha "^5.0.0"
lodash.flattendeep "^4.4.0"
md5-hex "^2.0.0"
release-zalgo "^1.0.0"
package-json@^4.0.0:
...
...
@@ -3617,13 +3954,6 @@ parent-module@^1.0.0:
dependencies:
callsites "^3.0.0"
parse-json@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-4.0.0.tgz#be35f5425be1f7f6c747184f98a788cb99477ee0"
dependencies:
error-ex "^1.3.1"
json-parse-better-errors "^1.0.1"
parseurl@~1.3.2:
version "1.3.2"
resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.2.tgz#fc289d4ed8993119460c156253262cdc8de65bf3"
...
...
@@ -3636,6 +3966,11 @@ path-exists@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515"
path-exists@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3"
integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==
path-is-absolute@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f"
...
...
@@ -3648,6 +3983,11 @@ path-key@^2.0.0, path-key@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40"
path-key@^3.1.0:
version "3.1.1"
resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375"
integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==
path-parse@^1.0.6:
version "1.0.6"
resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c"
...
...
@@ -3656,12 +3996,6 @@ path-to-regexp@0.1.7:
version "0.1.7"
resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c"
path-type@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/path-type/-/path-type-3.0.0.tgz#cef31dc8e0a1a3bb0d105c0cd97cf3bf47f4e36f"
dependencies:
pify "^3.0.0"
pathval@^1.0.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/pathval/-/pathval-1.1.0.tgz#b942e6d4bde653005ef6b71361def8727d0645e0"
...
...
@@ -3670,6 +4004,11 @@ performance-now@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b"
picomatch@^2.0.4:
version "2.2.2"
resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad"
integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==
pify@^2.0.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c"
...
...
@@ -3688,11 +4027,12 @@ pinkie@^2.0.0:
version "2.0.4"
resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870"
pkg-dir@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-3.0.0.tgz#2749020f239ed990881b1f71210d51eb6523bea3"
pkg-dir@^4.1.0:
version "4.2.0"
resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3"
integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==
dependencies:
find-up "^
3
.0.0"
find-up "^
4
.0.0"
prelude-ls@~1.1.2:
version "1.1.2"
...
...
@@ -3714,6 +4054,13 @@ process-nextick-args@~2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.0.tgz#a37d732f4271b4ab1ad070d35508e8290788ffaa"
process-on-spawn@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/process-on-spawn/-/process-on-spawn-1.0.0.tgz#95b05a23073d30a17acfdc92a440efd2baefdc93"
integrity sha512-1WsPDsUSMmZH5LeMLegqkPDrsGgsWwk1Exipy2hvB0o/F0ASzbpIctSCcZIK1ykJvtTJULEH+20WOFjMvGnCTg==
dependencies:
fromentries "^1.2.0"
progress@^2.0.0:
version "2.0.3"
resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8"
...
...
@@ -3905,21 +4252,6 @@ read-package-tree@^5.3.1:
readdir-scoped-modules "^1.0.0"
util-promisify "^2.1.0"
read-pkg-up@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-4.0.0.tgz#1b221c6088ba7799601c808f91161c66e58f8978"
dependencies:
find-up "^3.0.0"
read-pkg "^3.0.0"
read-pkg@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-3.0.0.tgz#9cbc686978fee65d16c00e2b19c237fcf6e38389"
dependencies:
load-json-file "^4.0.0"
normalize-package-data "^2.3.2"
path-type "^3.0.0"
read@1, read@~1.0.1, read@~1.0.7:
version "1.0.7"
resolved "https://registry.yarnpkg.com/read/-/read-1.0.7.tgz#b3da19bd052431a97671d44a42634adf710b40c4"
...
...
@@ -3975,6 +4307,13 @@ readdir-scoped-modules@^1.0.0, readdir-scoped-modules@^1.1.0:
graceful-fs "^4.1.2"
once "^1.3.0"
readdirp@~3.2.0:
version "3.2.0"
resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.2.0.tgz#c30c33352b12c96dfb4b895421a49fd5a9593839"
integrity sha512-crk4Qu3pmXwgxdSgGhgA/eXiJAPQiX4GMOZZMXnqKxHX7TaoL+3gQVo/WeuAiogr07DpnfjIMpXXa+PAIvwPGQ==
dependencies:
picomatch "^2.0.4"
reflect-metadata@^0.1.10:
version "0.1.12"
resolved "https://registry.yarnpkg.com/reflect-metadata/-/reflect-metadata-0.1.12.tgz#311bf0c6b63cd782f228a81abe146a2bfa9c56f2"
...
...
@@ -4072,16 +4411,33 @@ require-main-filename@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1"
require-main-filename@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b"
integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==
resolve-from@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6"
resolve-from@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69"
integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==
resolve@^1.10.0:
version "1.13.1"
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.13.1.tgz#be0aa4c06acd53083505abb35f4d66932ab35d16"
dependencies:
path-parse "^1.0.6"
resolve@^1.3.2:
version "1.15.1"
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.15.1.tgz#27bdcdeffeaf2d6244b95bb0f9f4b4653451f3e8"
integrity sha512-84oo6ZTtoTUpjgNEr5SJyzQhzL72gaRodsSfyxC/AXRvwu0Yse9H8eF9IpGo7b8YetZhlI6v7ZQ6bKBFV/6S7w==
dependencies:
path-parse "^1.0.6"
responselike@1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/responselike/-/responselike-1.0.2.tgz#918720ef3b631c5642be068f15ade5a46f4ba1e7"
...
...
@@ -4121,6 +4477,13 @@ rimraf@^2.6.1, rimraf@^2.6.2:
dependencies:
glob "^7.0.5"
rimraf@^3.0.0:
version "3.0.2"
resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a"
integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==
dependencies:
glob "^7.1.3"
rmdir@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/rmdir/-/rmdir-1.2.0.tgz#4fe0357cb06168c258e73e968093dc4e8a0f3253"
...
...
@@ -4183,7 +4546,7 @@ semver-diff@^2.0.0:
version "5.6.0"
resolved "https://registry.yarnpkg.com/semver/-/semver-5.6.0.tgz#7e74256fbaa49c75aa7c7a205cc22799cac80004"
"semver@2.x || 3.x || 4 || 5", "semver@^2.3.0 || 3.x || 4 || 5", semver@^5.0.3, semver@^5.4.1, semver@^5.5.1, semver@^5.6.0, semver@^5.7.1:
"semver@2.x || 3.x || 4 || 5", "semver@^2.3.0 || 3.x || 4 || 5", semver@^5.0.3, semver@^5.4.1, semver@^5.5.1, semver@^5.6.0,
semver@^5.7.0,
semver@^5.7.1:
version "5.7.1"
resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7"
...
...
@@ -4191,7 +4554,7 @@ semver@^5.1.0, semver@^5.3.0:
version "5.5.0"
resolved "https://registry.yarnpkg.com/semver/-/semver-5.5.0.tgz#dc4bbc7a6ca9d916dee5d43516f0092b58f7b8ab"
semver@^6.1.2, semver@^6.3.0:
semver@^6.0.0,
semver@^6.1.2, semver@^6.3.0:
version "6.3.0"
resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d"
...
...
@@ -4246,10 +4609,22 @@ shebang-command@^1.2.0:
dependencies:
shebang-regex "^1.0.0"
shebang-command@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea"
integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==
dependencies:
shebang-regex "^3.0.0"
shebang-regex@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3"
shebang-regex@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172"
integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==
signal-exit@^3.0.0, signal-exit@^3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d"
...
...
@@ -4316,20 +4691,21 @@ source-map@^0.5.0:
version "0.5.7"
resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc"
source-map@^0.6.0, source-map@^0.6.1
, source-map@~0.6.1
:
source-map@^0.6.0, source-map@^0.6.1:
version "0.6.1"
resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263"
spawn-wrap@^1.4.2:
version "1.4.2"
resolved "https://registry.yarnpkg.com/spawn-wrap/-/spawn-wrap-1.4.2.tgz#cff58e73a8224617b6561abdc32586ea0c82248c"
spawn-wrap@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/spawn-wrap/-/spawn-wrap-2.0.0.tgz#103685b8b8f9b79771318827aa78650a610d457e"
integrity sha512-EeajNjfN9zMnULLwhZZQU3GWBoFNkbngTUPfaawT4RkMiviTxcX0qfhVbGey39mfctfDHkWtuecgQ8NJcyQWHg==
dependencies:
foreground-child "^
1.5.6
"
mkdirp "^0.5.0
"
os-home
dir "^
1
.0.
1
"
rimraf "^
2.6.2
"
foreground-child "^
2.0.0
"
is-windows "^1.0.2
"
make-
dir "^
3
.0.
0
"
rimraf "^
3.0.0
"
signal-exit "^3.0.2"
which "^
1.3.0
"
which "^
2.0.1
"
spdx-correct@^3.0.0:
version "3.1.0"
...
...
@@ -4461,7 +4837,7 @@ string-width@^1.0.1:
is-fullwidth-code-point "^2.0.0"
strip-ansi "^4.0.0"
string-width@^3.0.0:
string-width@^3.0.0
, string-width@^3.1.0
:
version "3.1.0"
resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961"
dependencies:
...
...
@@ -4469,7 +4845,7 @@ string-width@^3.0.0:
is-fullwidth-code-point "^2.0.0"
strip-ansi "^5.1.0"
string-width@^4.1.0:
string-width@^4.1.0
, string-width@^4.2.0
:
version "4.2.0"
resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.0.tgz#952182c46cc7b2c313d1596e623992bd163b72b5"
dependencies:
...
...
@@ -4523,7 +4899,7 @@ strip-ansi@^4.0.0:
dependencies:
ansi-regex "^3.0.0"
strip-ansi@^5.1.0, strip-ansi@^5.2.0:
strip-ansi@^5.0.0,
strip-ansi@^5.1.0, strip-ansi@^5.2.0:
version "5.2.0"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae"
dependencies:
...
...
@@ -4535,33 +4911,42 @@ strip-ansi@^6.0.0:
dependencies:
ansi-regex "^5.0.0"
strip-bom@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3"
strip-bom@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878"
integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==
strip-eof@^1.0.0:
version "1.0.0"
resolved "http://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf"
strip-json-comments@2.0.1, strip-json-comments@~2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
strip-json-comments@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.0.1.tgz#85713975a91fb87bf1b305cca77395e40d2a64a7"
strip-json-comments@~2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
supports-color@6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.0.0.tgz#76cfe742cf1f41bb9b1c29ad03068c05b4c0e40a"
integrity sha512-on9Kwidc1IUQo+bQdhi8+Tijpo0e1SS6RoGo2guUwn5vdaxw8RXOF9Vb2ws+ihWOmh4JnCJOvaziZWP1VABaLg==
dependencies:
has-flag "^3.0.0"
supports-color@5.4.0,
supports-color@^5.3.0:
supports-color@^5.3.0:
version "5.4.0"
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.4.0.tgz#1c6b337402c2137605efe19f10fec390f6faab54"
dependencies:
has-flag "^3.0.0"
supports-color@^5.4.0:
version "5.5.0"
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f"
supports-color@^7.1.0:
version "7.1.0"
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.1.0.tgz#68e32591df73e25ad1c4b49108a2ec507962bfd1"
integrity sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==
dependencies:
has-flag "^
3
.0.0"
has-flag "^
4
.0.0"
table@^5.2.3:
version "5.4.6"
...
...
@@ -4606,14 +4991,14 @@ term-size@^1.2.0:
dependencies:
execa "^0.7.0"
test-exclude@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-5.0.0.tgz#cdce7cece785e0e829cd5c2b27baf18bc583cfb7"
test-exclude@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e"
integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==
dependencies:
arrify "^1.0.1"
"@istanbuljs/schema" "^0.1.2"
glob "^7.1.4"
minimatch "^3.0.4"
read-pkg-up "^4.0.0"
require-main-filename "^1.0.1"
text-table@^0.2.0, text-table@~0.2.0:
version "0.2.0"
...
...
@@ -4648,6 +5033,13 @@ to-fast-properties@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e"
to-regex-range@^5.0.1:
version "5.0.1"
resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4"
integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==
dependencies:
is-number "^7.0.0"
topo@1.x.x:
version "1.1.0"
resolved "https://registry.yarnpkg.com/topo/-/topo-1.1.0.tgz#e9d751615d1bb87dc865db182fa1ca0a5ef536d5"
...
...
@@ -4671,10 +5063,6 @@ tree-kill@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/tree-kill/-/tree-kill-1.2.0.tgz#5846786237b4239014f05db156b643212d4c6f36"
trim-right@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003"
ts-deferred@^1.0.4:
version "1.0.4"
resolved "https://registry.yarnpkg.com/ts-deferred/-/ts-deferred-1.0.4.tgz#58145ebaeef5b8f2a290b8cec3d060839f9489c7"
...
...
@@ -4726,7 +5114,7 @@ type-detect@^4.0.0:
version "4.0.8"
resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c"
type-fest@^0.8.1:
type-fest@^0.8.0,
type-fest@^0.8.1:
version "0.8.1"
resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d"
...
...
@@ -4737,6 +5125,13 @@ type-is@~1.6.15, type-is@~1.6.16:
media-typer "0.3.0"
mime-types "~2.1.18"
typedarray-to-buffer@^3.1.5:
version "3.1.5"
resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080"
integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==
dependencies:
is-typedarray "^1.0.0"
typedarray@^0.0.6:
version "0.0.6"
resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777"
...
...
@@ -4756,13 +5151,6 @@ typescript@^3.2.2:
version "3.5.2"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.2.tgz#a09e1dc69bc9551cadf17dba10ee42cf55e5d56c"
uglify-js@^3.1.4:
version "3.4.9"
resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.4.9.tgz#af02f180c1207d76432e473ed24a28f4a782bae3"
dependencies:
commander "~2.17.1"
source-map "~0.6.1"
uid-number@0.0.6:
version "0.0.6"
resolved "https://registry.yarnpkg.com/uid-number/-/uid-number-0.0.6.tgz#0ea10e8035e8eb5b8e4449f06da1c730663baa81"
...
...
@@ -4915,13 +5303,20 @@ which-module@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a"
which@^1.2.9, which@^1.3.0, which@^1.3.1:
which@1.3.1,
which@^1.2.9, which@^1.3.0, which@^1.3.1:
version "1.3.1"
resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a"
dependencies:
isexe "^2.0.0"
wide-align@^1.1.0:
which@^2.0.1:
version "2.0.2"
resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1"
integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==
dependencies:
isexe "^2.0.0"
wide-align@1.1.3, wide-align@^1.1.0:
version "1.1.3"
resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457"
dependencies:
...
...
@@ -4937,10 +5332,6 @@ word-wrap@~1.2.3:
version "1.2.3"
resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c"
wordwrap@~0.0.2:
version "0.0.3"
resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.3.tgz#a3d5da6cd5c0bc0008d37234bbaf1bed63059107"
worker-farm@^1.6.0, worker-farm@^1.7.0:
version "1.7.0"
resolved "https://registry.yarnpkg.com/worker-farm/-/worker-farm-1.7.0.tgz#26a94c5391bbca926152002f69b84a4bf772e5a8"
...
...
@@ -4954,6 +5345,24 @@ wrap-ansi@^2.0.0:
string-width "^1.0.1"
strip-ansi "^3.0.1"
wrap-ansi@^5.1.0:
version "5.1.0"
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09"
integrity sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==
dependencies:
ansi-styles "^3.2.0"
string-width "^3.0.0"
strip-ansi "^5.0.0"
wrap-ansi@^6.2.0:
version "6.2.0"
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53"
integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==
dependencies:
ansi-styles "^4.0.0"
string-width "^4.1.0"
strip-ansi "^6.0.0"
wrappy@1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
...
...
@@ -4974,6 +5383,16 @@ write-file-atomic@^2.3.0, write-file-atomic@^2.4.3:
imurmurhash "^0.1.4"
signal-exit "^3.0.2"
write-file-atomic@^3.0.0:
version "3.0.3"
resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8"
integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==
dependencies:
imurmurhash "^0.1.4"
is-typedarray "^1.0.0"
signal-exit "^3.0.2"
typedarray-to-buffer "^3.1.5"
write@1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/write/-/write-1.0.3.tgz#0800e14523b923a387e415123c865616aae0f5c3"
...
...
@@ -5024,28 +5443,52 @@ yallist@^3.0.3:
version "3.1.1"
resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd"
yargs-parser@13.1.2, yargs-parser@^13.1.2:
version "13.1.2"
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.2.tgz#130f09702ebaeef2650d54ce6e3e5706f7a4fb38"
integrity sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==
dependencies:
camelcase "^5.0.0"
decamelize "^1.2.0"
yargs-parser@^18.1.1:
version "18.1.2"
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.2.tgz#2f482bea2136dbde0861683abea7756d30b504f1"
integrity sha512-hlIPNR3IzC1YuL1c2UwwDKpXlNFBqD1Fswwh1khz5+d8Cq/8yc/Mn0i+rQXduu8hcrFKvO7Eryk+09NecTQAAQ==
dependencies:
camelcase "^5.0.0"
decamelize "^1.2.0"
yargs-parser@^9.0.2:
version "9.0.2"
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-9.0.2.tgz#9ccf6a43460fe4ed40a9bb68f48d43b8a68cc077"
dependencies:
camelcase "^4.1.0"
yargs@11.1.0:
version "11.1.0"
resolved "http://registry.npmjs.org/yargs/-/yargs-11.1.0.tgz#90b869934ed6e871115ea2ff58b03f4724ed2d77"
yargs-unparser@1.6.0:
version "1.6.0"
resolved "https://registry.yarnpkg.com/yargs-unparser/-/yargs-unparser-1.6.0.tgz#ef25c2c769ff6bd09e4b0f9d7c605fb27846ea9f"
integrity sha512-W9tKgmSn0DpSatfri0nx52Joq5hVXgeLiqR/5G0sZNDoLZFOr/xjBUDcShCOGNsBnEMNo1KAMBkTej1Hm62HTw==
dependencies:
cliui "^4.0.0"
decamelize "^1.1.1"
find-up "^2.1.0"
get-caller-file "^1.0.1"
os-locale "^2.0.0"
flat "^4.1.0"
lodash "^4.17.15"
yargs "^13.3.0"
yargs@13.3.2, yargs@^13.3.0:
version "13.3.2"
resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.3.2.tgz#ad7ffefec1aa59565ac915f82dccb38a9c31a2dd"
integrity sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==
dependencies:
cliui "^5.0.0"
find-up "^3.0.0"
get-caller-file "^2.0.1"
require-directory "^2.1.1"
require-main-filename "^
1
.0.
1
"
require-main-filename "^
2
.0.
0
"
set-blocking "^2.0.0"
string-width "^
2
.0.0"
string-width "^
3
.0.0"
which-module "^2.0.0"
y18n "^
3.2.1
"
yargs-parser "^
9.0
.2"
y18n "^
4.0.0
"
yargs-parser "^
13.1
.2"
yargs@^11.0.0:
version "11.1.1"
...
...
@@ -5064,6 +5507,23 @@ yargs@^11.0.0:
y18n "^3.2.1"
yargs-parser "^9.0.2"
yargs@^15.0.2:
version "15.3.1"
resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.3.1.tgz#9505b472763963e54afe60148ad27a330818e98b"
integrity sha512-92O1HWEjw27sBfgmXiixJWT5hRBp2eobqXicLtPBIDBhYB+1HpwZlXmbW2luivBJHBzki+7VyCLRtAkScbTBQA==
dependencies:
cliui "^6.0.0"
decamelize "^1.2.0"
find-up "^4.1.0"
get-caller-file "^2.0.1"
require-directory "^2.1.1"
require-main-filename "^2.0.0"
set-blocking "^2.0.0"
string-width "^4.2.0"
which-module "^2.0.0"
y18n "^4.0.0"
yargs-parser "^18.1.1"
yn@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/yn/-/yn-2.0.0.tgz#e5adabc8acf408f6385fc76495684c88e6af689a"
Prev
1
2
3
4
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment