Unverified Commit 4f66d0c1 authored by SparkSnail's avatar SparkSnail Committed by GitHub
Browse files

Merge pull request #229 from microsoft/master

merge master
parents 4132f620 049634f7
......@@ -5,6 +5,7 @@ import glob
import json
import logging
import os
import random
import shutil
import sys
from unittest import TestCase, main
......@@ -15,6 +16,7 @@ from nni.gp_tuner.gp_tuner import GPTuner
from nni.gridsearch_tuner.gridsearch_tuner import GridSearchTuner
from nni.hyperopt_tuner.hyperopt_tuner import HyperoptTuner
from nni.metis_tuner.metis_tuner import MetisTuner
try:
from nni.smac_tuner.smac_tuner import SMACTuner
except ImportError:
......@@ -34,20 +36,28 @@ class BuiltinTunersTestCase(TestCase):
- [X] generate_multiple_parameters
- [ ] import_data
- [ ] trial_end
- [ ] receive_trial_result
- [x] receive_trial_result
"""
def setUp(self):
self.test_round = 3
self.params_each_round = 50
self.exhaustive = False
def search_space_test_one(self, tuner_factory, search_space):
tuner = tuner_factory()
self.assertIsInstance(tuner, Tuner)
tuner.update_search_space(search_space)
parameters = tuner.generate_multiple_parameters(list(range(0, 50)))
logger.info(parameters)
for i in range(self.test_round):
parameters = tuner.generate_multiple_parameters(list(range(i * self.params_each_round,
(i + 1) * self.params_each_round)))
logger.debug(parameters)
self.check_range(parameters, search_space)
if not parameters: # TODO: not strict
for k in range(min(len(parameters), self.params_each_round)):
tuner.receive_trial_result(self.params_each_round * i + k, parameters[k], random.uniform(-100, 100))
if not parameters and not self.exhaustive:
raise ValueError("No parameters generated")
return parameters
def check_range(self, generated_params, search_space):
EPS = 1E-6
......@@ -91,7 +101,8 @@ class BuiltinTunersTestCase(TestCase):
for layer_name in item["_value"].keys():
self.assertIn(v[layer_name]["chosen_layer"], item["layer_choice"])
def search_space_test_all(self, tuner_factory, supported_types=None, ignore_types=None):
def search_space_test_all(self, tuner_factory, supported_types=None, ignore_types=None, fail_types=None):
# Three types: 1. supported; 2. ignore; 3. fail.
# NOTE(yuge): ignore types
# Supported types are listed in the table. They are meant to be supported and should be correct.
# Other than those, all the rest are "unsupported", which are expected to produce ridiculous results
......@@ -103,16 +114,18 @@ class BuiltinTunersTestCase(TestCase):
if supported_types is None:
supported_types = ["choice", "randint", "uniform", "quniform", "loguniform", "qloguniform",
"normal", "qnormal", "lognormal", "qlognormal"]
if fail_types is None:
fail_types = []
if ignore_types is None:
ignore_types = []
full_supported_search_space = dict()
for single in search_space_all:
single_keyword = single.split("_")
space = search_space_all[single]
expected_fail = not any([t in single_keyword for t in supported_types]) or "fail" in single_keyword
if ignore_types is not None and any([t in ignore_types for t in single_keyword]):
if any(single.startswith(t) for t in ignore_types):
continue
if "fail" in space:
if self._testMethodName.split("_", 1)[1] in space.pop("fail"):
expected_fail = True
expected_fail = not any(single.startswith(t) for t in supported_types) or \
any(single.startswith(t) for t in fail_types) or \
"fail" in single # name contains fail (fail on all)
single_search_space = {single: space}
if not expected_fail:
# supports this key
......@@ -129,11 +142,14 @@ class BuiltinTunersTestCase(TestCase):
self.search_space_test_one(tuner_factory, full_supported_search_space)
def test_grid_search(self):
self.exhaustive = True
self.search_space_test_all(lambda: GridSearchTuner(),
supported_types=["choice", "randint", "quniform"])
def test_tpe(self):
self.search_space_test_all(lambda: HyperoptTuner("tpe"))
self.search_space_test_all(lambda: HyperoptTuner("tpe"),
ignore_types=["uniform_equal", "qloguniform_equal", "loguniform_equal", "quniform_clip_2"])
# NOTE: types are ignored because `tpe.py line 465, in adaptive_parzen_normal assert prior_sigma > 0`
def test_random_search(self):
self.search_space_test_all(lambda: HyperoptTuner("random_search"))
......@@ -148,6 +164,7 @@ class BuiltinTunersTestCase(TestCase):
supported_types=["choice", "randint", "uniform", "quniform", "loguniform"])
def test_batch(self):
self.exhaustive = True
self.search_space_test_all(lambda: BatchTuner(),
supported_types=["choice"])
......@@ -156,14 +173,18 @@ class BuiltinTunersTestCase(TestCase):
self.search_space_test_all(lambda: EvolutionTuner(population_size=100))
def test_gp(self):
self.test_round = 1 # NOTE: GP tuner got hanged for multiple testing round
self.search_space_test_all(lambda: GPTuner(),
supported_types=["choice", "randint", "uniform", "quniform", "loguniform",
"qloguniform"],
ignore_types=["normal", "lognormal", "qnormal", "qlognormal"])
ignore_types=["normal", "lognormal", "qnormal", "qlognormal"],
fail_types=["choice_str", "choice_mixed"])
def test_metis(self):
self.test_round = 1 # NOTE: Metis tuner got hanged for multiple testing round
self.search_space_test_all(lambda: MetisTuner(),
supported_types=["choice", "randint", "uniform", "quniform"])
supported_types=["choice", "randint", "uniform", "quniform"],
fail_types=["choice_str", "choice_mixed"])
def test_networkmorphism(self):
pass
......
......@@ -18,6 +18,7 @@
"fork-ts-checker-webpack-plugin": "^1.5.0",
"fs-extra": "^8.1.0",
"html-webpack-plugin": "^4.0.0-beta.8",
"json5": "^2.1.1",
"less": "^3.9.0",
"less-loader": "^5.0.0",
"mini-css-extract-plugin": "^0.8.0",
......@@ -55,6 +56,7 @@
"eslint": "npx eslint ./ --ext .tsx,.ts"
},
"devDependencies": {
"@types/json5": "^0.0.30",
"@types/node": "^10.14.14",
"@types/react": "16.4.17",
"@types/react-dom": "^16.0.7",
......
......@@ -6,7 +6,7 @@ import { ColumnProps } from 'antd/lib/table';
const Option = Select.Option;
const CheckboxGroup = Checkbox.Group;
import { MANAGER_IP, trialJobStatus, COLUMN_INDEX, COLUMNPro } from '../../static/const';
import { convertDuration, formatTimestamp, intermediateGraphOption, killJob } from '../../static/function';
import { convertDuration, formatTimestamp, intermediateGraphOption, killJob, parseMetrics } from '../../static/function';
import { EXPERIMENT, TRIALS } from '../../static/datamodel';
import { TableRecord } from '../../static/interface';
import OpenRow from '../public-child/OpenRow';
......@@ -178,11 +178,11 @@ class TableList extends React.Component<TableListProps, TableListState> {
// get intermediate result dict keys array
let otherkeys: Array<string> = ['default'];
if (res.data.length !== 0) {
otherkeys = Object.keys(JSON.parse(res.data[0].data));
otherkeys = Object.keys(parseMetrics(res.data[0].data));
}
// intermediateArr just store default val
Object.keys(res.data).map(item => {
const temp = JSON.parse(res.data[item].data);
const temp = parseMetrics(res.data[item].data);
if (typeof temp === 'object') {
intermediateArr.push(temp.default);
} else {
......@@ -210,7 +210,7 @@ class TableList extends React.Component<TableListProps, TableListState> {
// just watch default key-val
if (isShowDefault === true) {
Object.keys(intermediateData).map(item => {
const temp = JSON.parse(intermediateData[item].data);
const temp = parseMetrics(intermediateData[item].data);
if (typeof temp === 'object') {
intermediateArr.push(temp[value]);
} else {
......@@ -219,7 +219,7 @@ class TableList extends React.Component<TableListProps, TableListState> {
});
} else {
Object.keys(intermediateData).map(item => {
const temp = JSON.parse(intermediateData[item].data);
const temp = parseMetrics(intermediateData[item].data);
if (typeof temp === 'object') {
intermediateArr.push(temp[value]);
}
......@@ -431,7 +431,8 @@ class TableList extends React.Component<TableListProps, TableListState> {
key: 'operation',
render: (text: string, record: TableRecord) => {
const trialStatus = record.status;
const flag: boolean = (trialStatus === 'RUNNING') ? false : true;
// could kill a job when its status is RUNNING or UNKNOWN
const flag: boolean = (trialStatus === 'RUNNING' || trialStatus === 'UNKNOWN') ? false : true;
return (
<Row id="detail-button">
{/* see intermediate result graph */}
......
import * as JSON5 from 'json5';
import axios from 'axios';
import { message } from 'antd';
import { MANAGER_IP } from './const';
......@@ -173,8 +174,16 @@ function formatTimestamp(timestamp?: number, placeholder?: string = 'N/A'): stri
return timestamp ? new Date(timestamp).toLocaleString('en-US') : placeholder;
}
function parseMetrics(metricData: string): any {
if (metricData.includes('NaN')) {
return JSON5.parse(metricData)
} else {
return JSON.parse(metricData)
}
}
function metricAccuracy(metric: MetricDataRecord): number {
const data = JSON.parse(metric.data);
const data = parseMetrics(metric.data);
return typeof data === 'number' ? data : NaN;
}
......@@ -186,5 +195,5 @@ function formatAccuracy(accuracy: number): string {
export {
convertTime, convertDuration, getFinalResult, getFinal, downFile,
intermediateGraphOption, killJob, filterByStatus, filterDuration,
formatAccuracy, formatTimestamp, metricAccuracy
formatAccuracy, formatTimestamp, metricAccuracy, parseMetrics
};
import { MetricDataRecord, TrialJobInfo, TableObj, TableRecord, Parameters, FinalType } from '../interface';
import { getFinal, formatAccuracy, metricAccuracy } from '../function';
import { getFinal, formatAccuracy, metricAccuracy, parseMetrics } from '../function';
class Trial implements TableObj {
private metricsInitialized: boolean = false;
......@@ -56,7 +56,7 @@ class Trial implements TableObj {
// TODO: support intermeidate result is dict
const temp = this.intermediates[this.intermediates.length - 1];
if (temp !== undefined) {
return JSON.parse(temp.data);
return parseMetrics(temp.data);
} else {
return undefined;
}
......@@ -138,10 +138,10 @@ class Trial implements TableObj {
const mediate: number[] = [ ];
for (const items of this.intermediateMetrics) {
if (typeof JSON.parse(items.data) === 'object') {
mediate.push(JSON.parse(items.data).default);
if (typeof parseMetrics(items.data) === 'object') {
mediate.push(parseMetrics(items.data).default);
} else {
mediate.push(JSON.parse(items.data));
mediate.push(parseMetrics(items.data));
}
}
ret.intermediate = mediate;
......
......@@ -80,6 +80,10 @@
version "0.0.29"
resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee"
"@types/json5@^0.0.30":
version "0.0.30"
resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.30.tgz#44cb52f32a809734ca562e685c6473b5754a7818"
"@types/minimatch@*":
version "3.0.3"
resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.3.tgz#3dca0e3f33b200fc7d1139c0cd96c1268cadfd9d"
......@@ -3988,6 +3992,12 @@ json5@^1.0.1:
dependencies:
minimist "^1.2.0"
json5@^2.1.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/json5/-/json5-2.1.1.tgz#81b6cb04e9ba496f1c7005d07b4368a2638f90b6"
dependencies:
minimist "^1.2.0"
jsonfile@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb"
......
......@@ -12,7 +12,7 @@ jobs:
python3 -m pip install torch==1.2.0 --user
python3 -m pip install torchvision==0.4.0 --user
python3 -m pip install keras==2.1.6 --user
python3 -m pip install tensorflow-gpu==1.12.0 --user
python3 -m pip install tensorflow-gpu==1.15 --user
sudo apt-get install swig -y
PATH=$HOME/.local/bin:$PATH nnictl package install --name=SMAC
PATH=$HOME/.local/bin:$PATH nnictl package install --name=BOHB
......
......@@ -76,7 +76,7 @@ class Experiments:
def remove_experiment(self, expId):
'''remove an experiment by id'''
if id in self.experiments:
if expId in self.experiments:
self.experiments.pop(expId)
self.write_file()
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment