Commit 3be88922 authored by suiguoxin's avatar suiguoxin
Browse files

Merge branch 'master' of git://github.com/microsoft/nni

parents b92c4ab2 5a058baf
...@@ -37,6 +37,9 @@ from ConfigSpaceNNI import Configuration ...@@ -37,6 +37,9 @@ from ConfigSpaceNNI import Configuration
from .convert_ss_to_scenario import generate_scenario from .convert_ss_to_scenario import generate_scenario
from nni.tuner import Tuner
from nni.utils import OptimizeMode, extract_scalar_reward, randint_to_quniform
class SMACTuner(Tuner): class SMACTuner(Tuner):
""" """
...@@ -136,6 +139,7 @@ class SMACTuner(Tuner): ...@@ -136,6 +139,7 @@ class SMACTuner(Tuner):
search_space: search_space:
search space search space
""" """
randint_to_quniform(search_space)
if not self.update_ss_done: if not self.update_ss_done:
self.categorical_dict = generate_scenario(search_space) self.categorical_dict = generate_scenario(search_space)
if self.categorical_dict is None: if self.categorical_dict is None:
......
...@@ -36,7 +36,8 @@ __all__ = [ ...@@ -36,7 +36,8 @@ __all__ = [
'qnormal', 'qnormal',
'lognormal', 'lognormal',
'qlognormal', 'qlognormal',
'function_choice' 'function_choice',
'mutable_layer'
] ]
...@@ -78,6 +79,9 @@ if trial_env_vars.NNI_PLATFORM is None: ...@@ -78,6 +79,9 @@ if trial_env_vars.NNI_PLATFORM is None:
def function_choice(*funcs, name=None): def function_choice(*funcs, name=None):
return random.choice(funcs)() return random.choice(funcs)()
def mutable_layer():
raise RuntimeError('Cannot call nni.mutable_layer in this mode')
else: else:
def choice(options, name=None, key=None): def choice(options, name=None, key=None):
...@@ -113,6 +117,42 @@ else: ...@@ -113,6 +117,42 @@ else:
def function_choice(funcs, name=None, key=None): def function_choice(funcs, name=None, key=None):
return funcs[_get_param(key)]() return funcs[_get_param(key)]()
def mutable_layer(
mutable_id,
mutable_layer_id,
funcs,
funcs_args,
fixed_inputs,
optional_inputs,
optional_input_size=0):
'''execute the chosen function and inputs.
Below is an example of chosen function and inputs:
{
"mutable_id": {
"mutable_layer_id": {
"chosen_layer": "pool",
"chosen_inputs": ["out1", "out3"]
}
}
}
Parameters:
---------------
mutable_id: the name of this mutable_layer block (which could have multiple mutable layers)
mutable_layer_id: the name of a mutable layer in this block
funcs: dict of function calls
funcs_args:
fixed_inputs:
optional_inputs: dict of optional inputs
optional_input_size: number of candidate inputs to be chosen
'''
mutable_block = _get_param(mutable_id)
chosen_layer = mutable_block[mutable_layer_id]["chosen_layer"]
chosen_inputs = mutable_block[mutable_layer_id]["chosen_inputs"]
real_chosen_inputs = [optional_inputs[input_name] for input_name in chosen_inputs]
layer_out = funcs[chosen_layer]([fixed_inputs, real_chosen_inputs], *funcs_args[chosen_layer])
return layer_out
def _get_param(key): def _get_param(key):
if trial._params is None: if trial._params is None:
trial.get_next_parameter() trial.get_next_parameter()
......
...@@ -40,6 +40,7 @@ class OptimizeMode(Enum): ...@@ -40,6 +40,7 @@ class OptimizeMode(Enum):
Minimize = 'minimize' Minimize = 'minimize'
Maximize = 'maximize' Maximize = 'maximize'
class NodeType: class NodeType:
"""Node Type class """Node Type class
""" """
...@@ -83,6 +84,7 @@ def extract_scalar_reward(value, scalar_key='default'): ...@@ -83,6 +84,7 @@ def extract_scalar_reward(value, scalar_key='default'):
raise RuntimeError('Incorrect final result: the final result should be float/int, or a dict which has a key named "default" whose value is float/int.') raise RuntimeError('Incorrect final result: the final result should be float/int, or a dict which has a key named "default" whose value is float/int.')
return reward return reward
def convert_dict2tuple(value): def convert_dict2tuple(value):
""" """
convert dict type to tuple to solve unhashable problem. convert dict type to tuple to solve unhashable problem.
...@@ -94,9 +96,30 @@ def convert_dict2tuple(value): ...@@ -94,9 +96,30 @@ def convert_dict2tuple(value):
else: else:
return value return value
def init_dispatcher_logger(): def init_dispatcher_logger():
""" Initialize dispatcher logging configuration""" """ Initialize dispatcher logging configuration"""
logger_file_path = 'dispatcher.log' logger_file_path = 'dispatcher.log'
if dispatcher_env_vars.NNI_LOG_DIRECTORY is not None: if dispatcher_env_vars.NNI_LOG_DIRECTORY is not None:
logger_file_path = os.path.join(dispatcher_env_vars.NNI_LOG_DIRECTORY, logger_file_path) logger_file_path = os.path.join(dispatcher_env_vars.NNI_LOG_DIRECTORY, logger_file_path)
init_logger(logger_file_path, dispatcher_env_vars.NNI_LOG_LEVEL) init_logger(logger_file_path, dispatcher_env_vars.NNI_LOG_LEVEL)
def randint_to_quniform(in_x):
if isinstance(in_x, dict):
if NodeType.TYPE in in_x.keys():
if in_x[NodeType.TYPE] == 'randint':
value = in_x[NodeType.VALUE]
value.append(1)
in_x[NodeType.TYPE] = 'quniform'
in_x[NodeType.VALUE] = value
elif in_x[NodeType.TYPE] == 'choice':
randint_to_quniform(in_x[NodeType.VALUE])
else:
for key in in_x.keys():
randint_to_quniform(in_x[key])
elif isinstance(in_x, list):
for temp in in_x:
randint_to_quniform(temp)
...@@ -3,7 +3,7 @@ experimentName: default_test ...@@ -3,7 +3,7 @@ experimentName: default_test
maxExecDuration: 5m maxExecDuration: 5m
maxTrialNum: 4 maxTrialNum: 4
trialConcurrency: 2 trialConcurrency: 2
searchSpacePath: ../../../examples/trials/mnist-cascading-search-space/search_space.json searchSpacePath: ../../../examples/trials/mnist-nested-search-space/search_space.json
tuner: tuner:
#choice: TPE, Random, Anneal, Evolution #choice: TPE, Random, Anneal, Evolution
...@@ -13,7 +13,7 @@ assessor: ...@@ -13,7 +13,7 @@ assessor:
classArgs: classArgs:
optimize_mode: maximize optimize_mode: maximize
trial: trial:
codeDir: ../../../examples/trials/mnist-cascading-search-space codeDir: ../../../examples/trials/mnist-nested-search-space
command: python3 mnist.py --batch_num 100 command: python3 mnist.py --batch_num 100
gpuNum: 0 gpuNum: 0
......
...@@ -25,6 +25,94 @@ from nni_cmd.common_utils import print_warning ...@@ -25,6 +25,94 @@ from nni_cmd.common_utils import print_warning
# pylint: disable=unidiomatic-typecheck # pylint: disable=unidiomatic-typecheck
def parse_annotation_mutable_layers(code, lineno):
"""Parse the string of mutable layers in annotation.
Return a list of AST Expr nodes
code: annotation string (excluding '@')
"""
module = ast.parse(code)
assert type(module) is ast.Module, 'internal error #1'
assert len(module.body) == 1, 'Annotation mutable_layers contains more than one expression'
assert type(module.body[0]) is ast.Expr, 'Annotation is not expression'
call = module.body[0].value
nodes = []
mutable_id = 'mutable_block_' + str(lineno)
mutable_layer_cnt = 0
for arg in call.args:
fields = {'layer_choice': False,
'fixed_inputs': False,
'optional_inputs': False,
'optional_input_size': False,
'layer_output': False}
for k, value in zip(arg.keys, arg.values):
if k.id == 'layer_choice':
assert not fields['layer_choice'], 'Duplicated field: layer_choice'
assert type(value) is ast.List, 'Value of layer_choice should be a list'
call_funcs_keys = []
call_funcs_values = []
call_kwargs_values = []
for call in value.elts:
assert type(call) is ast.Call, 'Element in layer_choice should be function call'
call_name = astor.to_source(call).strip()
call_funcs_keys.append(ast.Str(s=call_name))
call_funcs_values.append(call.func)
assert not call.args, 'Number of args without keyword should be zero'
kw_args = []
kw_values = []
for kw in call.keywords:
kw_args.append(kw.arg)
kw_values.append(kw.value)
call_kwargs_values.append(ast.Dict(keys=kw_args, values=kw_values))
call_funcs = ast.Dict(keys=call_funcs_keys, values=call_funcs_values)
call_kwargs = ast.Dict(keys=call_funcs_keys, values=call_kwargs_values)
fields['layer_choice'] = True
elif k.id == 'fixed_inputs':
assert not fields['fixed_inputs'], 'Duplicated field: fixed_inputs'
assert type(value) is ast.List, 'Value of fixed_inputs should be a list'
fixed_inputs = value
fields['fixed_inputs'] = True
elif k.id == 'optional_inputs':
assert not fields['optional_inputs'], 'Duplicated field: optional_inputs'
assert type(value) is ast.List, 'Value of optional_inputs should be a list'
var_names = [ast.Str(s=astor.to_source(var).strip()) for var in value.elts]
optional_inputs = ast.Dict(keys=var_names, values=value.elts)
fields['optional_inputs'] = True
elif k.id == 'optional_input_size':
assert not fields['optional_input_size'], 'Duplicated field: optional_input_size'
assert type(value) is ast.Num, 'Value of optional_input_size should be a number'
optional_input_size = value
fields['optional_input_size'] = True
elif k.id == 'layer_output':
assert not fields['layer_output'], 'Duplicated field: layer_output'
assert type(value) is ast.Name, 'Value of layer_output should be ast.Name type'
layer_output = value
fields['layer_output'] = True
else:
raise AssertionError('Unexpected field in mutable layer')
# make call for this mutable layer
assert fields['layer_choice'], 'layer_choice must exist'
assert fields['layer_output'], 'layer_output must exist'
mutable_layer_id = 'mutable_layer_' + str(mutable_layer_cnt)
mutable_layer_cnt += 1
target_call_attr = ast.Attribute(value=ast.Name(id='nni', ctx=ast.Load()), attr='mutable_layer', ctx=ast.Load())
target_call_args = [ast.Str(s=mutable_id),
ast.Str(s=mutable_layer_id),
call_funcs,
call_kwargs]
if fields['fixed_inputs']:
target_call_args.append(fixed_inputs)
else:
target_call_args.append(ast.NameConstant(value=None))
if fields['optional_inputs']:
target_call_args.append(optional_inputs)
assert fields['optional_input_size'], 'optional_input_size must exist when optional_inputs exists'
target_call_args.append(optional_input_size)
else:
target_call_args.append(ast.NameConstant(value=None))
target_call = ast.Call(func=target_call_attr, args=target_call_args, keywords=[])
node = ast.Assign(targets=[layer_output], value=target_call)
nodes.append(node)
return nodes
def parse_annotation(code): def parse_annotation(code):
"""Parse an annotation string. """Parse an annotation string.
...@@ -235,6 +323,9 @@ class Transformer(ast.NodeTransformer): ...@@ -235,6 +323,9 @@ class Transformer(ast.NodeTransformer):
or string.startswith('@nni.get_next_parameter('): or string.startswith('@nni.get_next_parameter('):
return parse_annotation(string[1:]) # expand annotation string to code return parse_annotation(string[1:]) # expand annotation string to code
if string.startswith('@nni.mutable_layers('):
return parse_annotation_mutable_layers(string[1:], node.lineno)
if string.startswith('@nni.variable(') \ if string.startswith('@nni.variable(') \
or string.startswith('@nni.function_choice('): or string.startswith('@nni.function_choice('):
self.stack[-1] = string[1:] # mark that the next expression is annotated self.stack[-1] = string[1:] # mark that the next expression is annotated
......
...@@ -38,7 +38,8 @@ _ss_funcs = [ ...@@ -38,7 +38,8 @@ _ss_funcs = [
'qnormal', 'qnormal',
'lognormal', 'lognormal',
'qlognormal', 'qlognormal',
'function_choice' 'function_choice',
'mutable_layer'
] ]
...@@ -50,6 +51,18 @@ class SearchSpaceGenerator(ast.NodeTransformer): ...@@ -50,6 +51,18 @@ class SearchSpaceGenerator(ast.NodeTransformer):
self.search_space = {} self.search_space = {}
self.last_line = 0 # last parsed line, useful for error reporting self.last_line = 0 # last parsed line, useful for error reporting
def generate_mutable_layer_search_space(self, args):
mutable_block = args[0].s
mutable_layer = args[1].s
if mutable_block not in self.search_space:
self.search_space[mutable_block] = dict()
self.search_space[mutable_block][mutable_layer] = {
'layer_choice': [key.s for key in args[2].keys],
'optional_inputs': [key.s for key in args[5].keys],
'optional_input_size': args[6].n
}
def visit_Call(self, node): # pylint: disable=invalid-name def visit_Call(self, node): # pylint: disable=invalid-name
self.generic_visit(node) self.generic_visit(node)
...@@ -68,6 +81,10 @@ class SearchSpaceGenerator(ast.NodeTransformer): ...@@ -68,6 +81,10 @@ class SearchSpaceGenerator(ast.NodeTransformer):
self.last_line = node.lineno self.last_line = node.lineno
if func == 'mutable_layer':
self.generate_mutable_layer_search_space(node.args)
return node
if node.keywords: if node.keywords:
# there is a `name` argument # there is a `name` argument
assert len(node.keywords) == 1, 'Smart parameter has keyword argument other than "name"' assert len(node.keywords) == 1, 'Smart parameter has keyword argument other than "name"'
......
import time
def add_one(inputs):
return inputs + 1
def add_two(inputs):
return inputs + 2
def add_three(inputs):
return inputs + 3
def add_four(inputs):
return inputs + 4
def main():
images = 5
"""@nni.mutable_layers(
{
layer_choice: [add_one(), add_two(), add_three(), add_four()],
optional_inputs: [images],
optional_input_size: 1,
layer_output: layer_1_out
},
{
layer_choice: [add_one(), add_two(), add_three(), add_four()],
optional_inputs: [layer_1_out],
optional_input_size: 1,
layer_output: layer_2_out
},
{
layer_choice: [add_one(), add_two(), add_three(), add_four()],
optional_inputs: [layer_1_out, layer_2_out],
optional_input_size: 1,
layer_output: layer_3_out
}
)"""
"""@nni.report_intermediate_result(layer_1_out)"""
time.sleep(2)
"""@nni.report_intermediate_result(layer_2_out)"""
time.sleep(2)
"""@nni.report_intermediate_result(layer_3_out)"""
time.sleep(2)
layer_3_out = layer_3_out + 10
"""@nni.report_final_result(layer_3_out)"""
if __name__ == '__main__':
main()
...@@ -63,7 +63,9 @@ common_schema = { ...@@ -63,7 +63,9 @@ common_schema = {
Optional('advisor'): dict, Optional('advisor'): dict,
Optional('assessor'): dict, Optional('assessor'): dict,
Optional('localConfig'): { Optional('localConfig'): {
Optional('gpuIndices'): Or(int, And(str, lambda x: len([int(i) for i in x.split(',')]) > 0), error='gpuIndex format error!') Optional('gpuIndices'): Or(int, And(str, lambda x: len([int(i) for i in x.split(',')]) > 0), error='gpuIndex format error!'),
Optional('maxTrialNumPerGpu'): setType('maxTrialNumPerGpu', int),
Optional('useActiveGpu'): setType('useActiveGpu', bool)
} }
} }
tuner_schema_dict = { tuner_schema_dict = {
...@@ -310,26 +312,30 @@ frameworkcontroller_config_schema = { ...@@ -310,26 +312,30 @@ frameworkcontroller_config_schema = {
}) })
} }
machine_list_schima = { machine_list_schema = {
Optional('machineList'):[Or({ Optional('machineList'):[Or({
'ip': setType('ip', str), 'ip': setType('ip', str),
Optional('port'): setNumberRange('port', int, 1, 65535), Optional('port'): setNumberRange('port', int, 1, 65535),
'username': setType('username', str), 'username': setType('username', str),
'passwd': setType('passwd', str), 'passwd': setType('passwd', str),
Optional('gpuIndices'): Or(int, And(str, lambda x: len([int(i) for i in x.split(',')]) > 0), error='gpuIndex format error!') Optional('gpuIndices'): Or(int, And(str, lambda x: len([int(i) for i in x.split(',')]) > 0), error='gpuIndex format error!'),
Optional('maxTrialNumPerGpu'): setType('maxTrialNumPerGpu', int),
Optional('useActiveGpu'): setType('useActiveGpu', bool)
},{ },{
'ip': setType('ip', str), 'ip': setType('ip', str),
Optional('port'): setNumberRange('port', int, 1, 65535), Optional('port'): setNumberRange('port', int, 1, 65535),
'username': setType('username', str), 'username': setType('username', str),
'sshKeyPath': setPathCheck('sshKeyPath'), 'sshKeyPath': setPathCheck('sshKeyPath'),
Optional('passphrase'): setType('passphrase', str), Optional('passphrase'): setType('passphrase', str),
Optional('gpuIndices'): Or(int, And(str, lambda x: len([int(i) for i in x.split(',')]) > 0), error='gpuIndex format error!') Optional('gpuIndices'): Or(int, And(str, lambda x: len([int(i) for i in x.split(',')]) > 0), error='gpuIndex format error!'),
Optional('maxTrialNumPerGpu'): setType('maxTrialNumPerGpu', int),
Optional('useActiveGpu'): setType('useActiveGpu', bool)
})] })]
} }
LOCAL_CONFIG_SCHEMA = Schema({**common_schema, **common_trial_schema}) LOCAL_CONFIG_SCHEMA = Schema({**common_schema, **common_trial_schema})
REMOTE_CONFIG_SCHEMA = Schema({**common_schema, **common_trial_schema, **machine_list_schima}) REMOTE_CONFIG_SCHEMA = Schema({**common_schema, **common_trial_schema, **machine_list_schema})
PAI_CONFIG_SCHEMA = Schema({**common_schema, **pai_trial_schema, **pai_config_schema}) PAI_CONFIG_SCHEMA = Schema({**common_schema, **pai_trial_schema, **pai_config_schema})
......
...@@ -23,7 +23,6 @@ import os ...@@ -23,7 +23,6 @@ import os
import json import json
import shutil import shutil
from .constants import NNICTL_HOME_DIR from .constants import NNICTL_HOME_DIR
from .common_utils import print_error
class Config: class Config:
'''a util class to load and save config''' '''a util class to load and save config'''
...@@ -121,25 +120,3 @@ class Experiments: ...@@ -121,25 +120,3 @@ class Experiments:
except ValueError: except ValueError:
return {} return {}
return {} return {}
class HDFSConfig:
'''manage hdfs configuration'''
def __init__(self):
os.makedirs(NNICTL_HOME_DIR, exist_ok=True)
self.hdfs_config_file = os.path.join(NNICTL_HOME_DIR, '.hdfs')
def get_config(self):
if os.path.exists(self.hdfs_config_file):
try:
with open(self.hdfs_config_file, 'r') as file:
return json.load(file)
except Exception as exception:
print_error(exception)
return None
else:
return None
def set_config(self, host, user_name):
with open(self.hdfs_config_file, 'w') as file:
json.dump({'host':host, 'userName': user_name}, file)
...@@ -160,9 +160,13 @@ def set_local_config(experiment_config, port, config_file_name): ...@@ -160,9 +160,13 @@ def set_local_config(experiment_config, port, config_file_name):
request_data = dict() request_data = dict()
if experiment_config.get('localConfig'): if experiment_config.get('localConfig'):
request_data['local_config'] = experiment_config['localConfig'] request_data['local_config'] = experiment_config['localConfig']
if request_data['local_config'] and request_data['local_config'].get('gpuIndices') \ if request_data['local_config']:
and isinstance(request_data['local_config'].get('gpuIndices'), int): if request_data['local_config'].get('gpuIndices') and isinstance(request_data['local_config'].get('gpuIndices'), int):
request_data['local_config']['gpuIndices'] = str(request_data['local_config'].get('gpuIndices')) request_data['local_config']['gpuIndices'] = str(request_data['local_config'].get('gpuIndices'))
if request_data['local_config'].get('maxTrialNumOnEachGpu'):
request_data['local_config']['maxTrialNumOnEachGpu'] = request_data['local_config'].get('maxTrialNumOnEachGpu')
if request_data['local_config'].get('useActiveGpu'):
request_data['local_config']['useActiveGpu'] = request_data['local_config'].get('useActiveGpu')
response = rest_put(cluster_metadata_url(port), json.dumps(request_data), REST_TIME_OUT) response = rest_put(cluster_metadata_url(port), json.dumps(request_data), REST_TIME_OUT)
err_message = '' err_message = ''
if not response or not check_response(response): if not response or not check_response(response):
...@@ -343,6 +347,13 @@ def set_experiment(experiment_config, mode, port, config_file_name): ...@@ -343,6 +347,13 @@ def set_experiment(experiment_config, mode, port, config_file_name):
def launch_experiment(args, experiment_config, mode, config_file_name, experiment_id=None): def launch_experiment(args, experiment_config, mode, config_file_name, experiment_id=None):
'''follow steps to start rest server and start experiment''' '''follow steps to start rest server and start experiment'''
nni_config = Config(config_file_name) nni_config = Config(config_file_name)
# check execution policy in powershell
if sys.platform == 'win32':
execution_policy = check_output(['powershell.exe','Get-ExecutionPolicy']).decode('ascii').strip()
if execution_policy == 'Restricted':
print_error('PowerShell execution policy error, please run PowerShell as administrator with this command first:\r\n'\
+ '\'Set-ExecutionPolicy -ExecutionPolicy Unrestricted\'')
exit(1)
# check packages for tuner # check packages for tuner
package_name, module_name = None, None package_name, module_name = None, None
if experiment_config.get('tuner') and experiment_config['tuner'].get('builtinTunerName'): if experiment_config.get('tuner') and experiment_config['tuner'].get('builtinTunerName'):
......
...@@ -194,15 +194,6 @@ def parse_args(): ...@@ -194,15 +194,6 @@ def parse_args():
'the unit is second') 'the unit is second')
parser_top.set_defaults(func=monitor_experiment) parser_top.set_defaults(func=monitor_experiment)
parser_hdfs = subparsers.add_parser('hdfs', help='monitor hdfs files')
parser_hdfs_subparsers = parser_hdfs.add_subparsers()
parser_hdfs_set = parser_hdfs_subparsers.add_parser('set', help='set the host and userName of hdfs')
parser_hdfs_set.add_argument('--host', required=True, dest='host', help='the host of hdfs')
parser_hdfs_set.add_argument('--user_name', required=True, dest='user_name', help='the userName of hdfs')
parser_hdfs_set.set_defaults(func=hdfs_set)
parser_hdfs_list = parser_hdfs_subparsers.add_parser('clean', help='clean hdfs files')
parser_hdfs_list.set_defaults(func=hdfs_clean)
args = parser.parse_args() args = parser.parse_args()
args.func(args) args.func(args)
......
...@@ -26,9 +26,8 @@ import datetime ...@@ -26,9 +26,8 @@ import datetime
import time import time
from subprocess import call, check_output from subprocess import call, check_output
from .rest_utils import rest_get, rest_delete, check_rest_server_quick, check_response from .rest_utils import rest_get, rest_delete, check_rest_server_quick, check_response
from pyhdfs import HdfsClient, HdfsFileNotFoundException from .url_utils import trial_jobs_url, experiment_url, trial_job_id_url, export_data_url
from .config_utils import Config, Experiments, HDFSConfig from .config_utils import Config, Experiments
from .url_utils import trial_jobs_url, experiment_url, trial_job_id_url
from .constants import NNICTL_HOME_DIR, EXPERIMENT_INFORMATION_FORMAT, EXPERIMENT_DETAIL_FORMAT, \ from .constants import NNICTL_HOME_DIR, EXPERIMENT_INFORMATION_FORMAT, EXPERIMENT_DETAIL_FORMAT, \
EXPERIMENT_MONITOR_INFO, TRIAL_MONITOR_HEAD, TRIAL_MONITOR_CONTENT, TRIAL_MONITOR_TAIL, REST_TIME_OUT EXPERIMENT_MONITOR_INFO, TRIAL_MONITOR_HEAD, TRIAL_MONITOR_CONTENT, TRIAL_MONITOR_TAIL, REST_TIME_OUT
from .common_utils import print_normal, print_error, print_warning, detect_process from .common_utils import print_normal, print_error, print_warning, detect_process
...@@ -451,30 +450,9 @@ def monitor_experiment(args): ...@@ -451,30 +450,9 @@ def monitor_experiment(args):
print_error(exception) print_error(exception)
exit(1) exit(1)
def parse_trial_data(content):
"""output: List[Dict]"""
trial_records = []
for trial_data in content:
for phase_i in range(len(trial_data['hyperParameters'])):
hparam = json.loads(trial_data['hyperParameters'][phase_i])['parameters']
hparam['id'] = trial_data['id']
if 'finalMetricData' in trial_data.keys() and phase_i < len(trial_data['finalMetricData']):
reward = json.loads(trial_data['finalMetricData'][phase_i]['data'])
if isinstance(reward, (float, int)):
dict_tmp = {**hparam, **{'reward': reward}}
elif isinstance(reward, dict):
dict_tmp = {**hparam, **reward}
else:
raise ValueError("Invalid finalMetricsData format: {}/{}".format(type(reward), reward))
else:
dict_tmp = hparam
trial_records.append(dict_tmp)
return trial_records
def export_trials_data(args): def export_trials_data(args):
"""export experiment metadata to csv '''export experiment metadata to csv
""" '''
nni_config = Config(get_config_filename(args)) nni_config = Config(get_config_filename(args))
rest_port = nni_config.get_config('restServerPort') rest_port = nni_config.get_config('restServerPort')
rest_pid = nni_config.get_config('restServerPid') rest_pid = nni_config.get_config('restServerPid')
...@@ -483,58 +461,28 @@ def export_trials_data(args): ...@@ -483,58 +461,28 @@ def export_trials_data(args):
return return
running, response = check_rest_server_quick(rest_port) running, response = check_rest_server_quick(rest_port)
if running: if running:
response = rest_get(trial_jobs_url(rest_port), 20) response = rest_get(export_data_url(rest_port), 20)
if response is not None and check_response(response): if response is not None and check_response(response):
content = json.loads(response.text)
# dframe = pd.DataFrame.from_records([parse_trial_data(t_data) for t_data in content])
# dframe.to_csv(args.csv_path, sep='\t')
records = parse_trial_data(content)
if args.type == 'json': if args.type == 'json':
json_records = [] with open(args.path, 'w') as file:
for trial in records: file.write(response.text)
value = trial.pop('reward', None) elif args.type == 'csv':
trial_id = trial.pop('id', None) content = json.loads(response.text)
json_records.append({'parameter': trial, 'value': value, 'id': trial_id}) trial_records = []
with open(args.path, 'w') as file: for record in content:
if args.type == 'csv': if not isinstance(record['value'], (float, int)):
writer = csv.DictWriter(file, set.union(*[set(r.keys()) for r in records])) formated_record = {**record['parameter'], **record['value'], **{'id': record['id']}}
else:
formated_record = {**record['parameter'], **{'reward': record['value'], 'id': record['id']}}
trial_records.append(formated_record)
with open(args.path, 'w') as file:
writer = csv.DictWriter(file, set.union(*[set(r.keys()) for r in trial_records]))
writer.writeheader() writer.writeheader()
writer.writerows(records) writer.writerows(trial_records)
else: else:
json.dump(json_records, file) print_error('Unknown type: %s' % args.type)
exit(1)
else: else:
print_error('Export failed...') print_error('Export failed...')
else: else:
print_error('Restful server is not Running') print_error('Restful server is not Running')
\ No newline at end of file
def hdfs_set(args):
hdfsConfig = HDFSConfig()
hdfsConfig.set_config(args.host, args.user_name)
print_normal('HDFS account update success!')
def hdfs_clean(args):
hdfsConfig = HDFSConfig()
if not hdfsConfig.get_config():
print_error('Please use \'nnictl hdfs set\' command to set hdfs account first!')
exit(1)
host = hdfsConfig.get_config().get('host')
user_name = hdfsConfig.get_config().get('userName')
hdfs_client = HdfsClient(hosts='{0}:80'.format(host), user_name=user_name, webhdfs_path='/webhdfs/api/v1', timeout=5)
root_path = os.path.join('/', user_name, 'nni', 'experiments')
while True:
inputs = input('INFO: clean up all files in {0}, do you want to continue?[Y/N]:'.format(root_path))
if inputs.lower() not in ['y', 'n', 'yes', 'no']:
print_warning('please input Y or N!')
elif inputs.lower() in ['n', 'no']:
exit(0)
else:
break
path_list = hdfs_client.listdir(root_path)
for path in path_list:
full_path = os.path.join(root_path, path)
print_normal('deleting {0}'.format(full_path))
if hdfs_client.delete(full_path, recursive=True):
print_normal('delete success!')
else:
print_normal('delete failed!')
print_normal('DONE')
...@@ -35,6 +35,8 @@ CHECK_STATUS_API = '/check-status' ...@@ -35,6 +35,8 @@ CHECK_STATUS_API = '/check-status'
TRIAL_JOBS_API = '/trial-jobs' TRIAL_JOBS_API = '/trial-jobs'
EXPORT_DATA_API = '/export-data'
TENSORBOARD_API = '/tensorboard' TENSORBOARD_API = '/tensorboard'
...@@ -68,6 +70,11 @@ def trial_job_id_url(port, job_id): ...@@ -68,6 +70,11 @@ def trial_job_id_url(port, job_id):
return '{0}:{1}{2}{3}/:{4}'.format(BASE_URL, port, API_ROOT_URL, TRIAL_JOBS_API, job_id) return '{0}:{1}{2}{3}/:{4}'.format(BASE_URL, port, API_ROOT_URL, TRIAL_JOBS_API, job_id)
def export_data_url(port):
'''get export_data url'''
return '{0}:{1}{2}{3}'.format(BASE_URL, port, API_ROOT_URL, EXPORT_DATA_API)
def tensorboard_url(port): def tensorboard_url(port):
'''get tensorboard url''' '''get tensorboard url'''
return '{0}:{1}{2}{3}'.format(BASE_URL, port, API_ROOT_URL, TENSORBOARD_API) return '{0}:{1}{2}{3}'.format(BASE_URL, port, API_ROOT_URL, TENSORBOARD_API)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment