"vscode:/vscode.git/clone" did not exist on "3cf3998ea5e8550597ce0cb122482e22bcbb88a2"
Commit 85cb472e authored by Shinai Yang (FA TALENT)'s avatar Shinai Yang (FA TALENT)
Browse files

Merge branch 'master' of https://github.com/SparkSnail/nni

parents 85c015dc 2c862dcb
# Copyright (c) Microsoft Corporation
# All rights reserved.
#
# MIT License
#
# Permission is hereby granted, free of charge,
# to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and
# to permit persons to whom the Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import os
import sys
import numpy
sys.path.insert(1, os.path.join(sys.path[0], '..'))
def predict(parameters_value, regressor_gp):
'''
Predict by Gaussian Process Model
'''
parameters_value = numpy.array(parameters_value).reshape(-1, len(parameters_value))
mu, sigma = regressor_gp.predict(parameters_value, return_std=True)
return mu[0], sigma[0]
\ No newline at end of file
# Copyright (c) Microsoft Corporation
# All rights reserved.
#
# MIT License
#
# Permission is hereby granted, free of charge,
# to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and
# to permit persons to whom the Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import os
import random
import sys
import nni.metis_tuner.lib_acquisition_function as lib_acquisition_function
import nni.metis_tuner.lib_constraint_summation as lib_constraint_summation
import nni.metis_tuner.lib_data as lib_data
import nni.metis_tuner.Regression_GP.Prediction as gp_prediction
sys.path.insert(1, os.path.join(sys.path[0], '..'))
CONSTRAINT_LOWERBOUND = None
CONSTRAINT_UPPERBOUND = None
CONSTRAINT_PARAMS_IDX = []
def selection_r(acquisition_function,
samples_y_aggregation,
x_bounds,
x_types,
regressor_gp,
num_starting_points=100,
minimize_constraints_fun=None):
'''
Selecte R value
'''
minimize_starting_points = [lib_data.rand(x_bounds, x_types) \
for i in range(0, num_starting_points)]
outputs = selection(acquisition_function, samples_y_aggregation,
x_bounds, x_types, regressor_gp,
minimize_starting_points,
minimize_constraints_fun=minimize_constraints_fun)
return outputs
def selection(acquisition_function,
samples_y_aggregation,
x_bounds, x_types,
regressor_gp,
minimize_starting_points,
minimize_constraints_fun=None):
'''
selection
'''
outputs = None
sys.stderr.write("[%s] Exercise \"%s\" acquisition function\n" \
% (os.path.basename(__file__), acquisition_function))
if acquisition_function == "ei":
outputs = lib_acquisition_function.next_hyperparameter_expected_improvement(\
gp_prediction.predict, [regressor_gp], x_bounds, x_types, \
samples_y_aggregation, minimize_starting_points, \
minimize_constraints_fun=minimize_constraints_fun)
elif acquisition_function == "lc":
outputs = lib_acquisition_function.next_hyperparameter_lowest_confidence(\
gp_prediction.predict, [regressor_gp], x_bounds, x_types,\
minimize_starting_points, minimize_constraints_fun=minimize_constraints_fun)
elif acquisition_function == "lm":
outputs = lib_acquisition_function.next_hyperparameter_lowest_mu(\
gp_prediction.predict, [regressor_gp], x_bounds, x_types,\
minimize_starting_points, minimize_constraints_fun=minimize_constraints_fun)
return outputs
def _rand_with_constraints(x_bounds, x_types):
'''
Random generate with constraints
'''
outputs = None
x_bounds_withconstraints = [x_bounds[i] for i in CONSTRAINT_PARAMS_IDX]
x_types_withconstraints = [x_types[i] for i in CONSTRAINT_PARAMS_IDX]
x_val_withconstraints = lib_constraint_summation.rand(x_bounds_withconstraints,
x_types_withconstraints,
CONSTRAINT_LOWERBOUND,
CONSTRAINT_UPPERBOUND)
if x_val_withconstraints is not None:
outputs = [None] * len(x_bounds)
for i, _ in enumerate(CONSTRAINT_PARAMS_IDX):
outputs[CONSTRAINT_PARAMS_IDX[i]] = x_val_withconstraints[i]
for i, _ in enumerate(outputs):
if outputs[i] is None:
outputs[i] = random.randint(x_bounds[i][0], x_bounds[i][1])
return outputs
def _minimize_constraints_fun_summation(x):
'''
Minimize the constraints fun summation
'''
summation = sum([x[i] for i in CONSTRAINT_PARAMS_IDX])
return CONSTRAINT_UPPERBOUND >= summation >= CONSTRAINT_LOWERBOUND
# Copyright (c) Microsoft Corporation
# All rights reserved.
#
# MIT License
#
# Permission is hereby granted, free of charge,
# to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and
# to permit persons to whom the Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import sys
import numpy
from scipy.stats import norm
from scipy.optimize import minimize
import nni.metis_tuner.lib_data as lib_data
def next_hyperparameter_expected_improvement(fun_prediction,
fun_prediction_args,
x_bounds, x_types,
samples_y_aggregation,
minimize_starting_points,
minimize_constraints_fun=None):
'''
"Expected Improvement" acquisition function
'''
best_x = None
best_acquisition_value = None
x_bounds_minmax = [[i[0], i[-1]] for i in x_bounds]
x_bounds_minmax = numpy.array(x_bounds_minmax)
for starting_point in numpy.array(minimize_starting_points):
res = minimize(fun=_expected_improvement,
x0=starting_point.reshape(1, -1),
bounds=x_bounds_minmax,
method="L-BFGS-B",
args=(fun_prediction,
fun_prediction_args,
x_bounds,
x_types,
samples_y_aggregation,
minimize_constraints_fun))
if (best_acquisition_value is None) or \
(res.fun < best_acquisition_value):
res.x = numpy.ndarray.tolist(res.x)
res.x = lib_data.match_val_type(res.x, x_bounds, x_types)
if (minimize_constraints_fun is None) or \
(minimize_constraints_fun(res.x) is True):
best_acquisition_value = res.fun
best_x = res.x
outputs = None
if best_x is not None:
mu, sigma = fun_prediction(best_x, *fun_prediction_args)
outputs = {'hyperparameter': best_x, 'expected_mu': mu,
'expected_sigma': sigma, 'acquisition_func': "ei"}
return outputs
def _expected_improvement(x, fun_prediction, fun_prediction_args,
x_bounds, x_types, samples_y_aggregation,
minimize_constraints_fun):
# This is only for step-wise optimization
x = lib_data.match_val_type(x, x_bounds, x_types)
expected_improvement = sys.maxsize
if (minimize_constraints_fun is None) or (minimize_constraints_fun(x) is True):
mu, sigma = fun_prediction(x, *fun_prediction_args)
loss_optimum = min(samples_y_aggregation)
scaling_factor = -1
# In case sigma equals zero
with numpy.errstate(divide="ignore"):
Z = scaling_factor * (mu - loss_optimum) / sigma
expected_improvement = scaling_factor * (mu - loss_optimum) * \
norm.cdf(Z) + sigma * norm.pdf(Z)
expected_improvement = 0.0 if sigma == 0.0 else expected_improvement
# We want expected_improvement to be as large as possible
# (i.e., as small as possible for minimize(...))
expected_improvement = -1 * expected_improvement
return expected_improvement
def next_hyperparameter_lowest_confidence(fun_prediction,
fun_prediction_args,
x_bounds, x_types,
minimize_starting_points,
minimize_constraints_fun=None):
'''
"Lowest Confidence" acquisition function
'''
best_x = None
best_acquisition_value = None
x_bounds_minmax = [[i[0], i[-1]] for i in x_bounds]
x_bounds_minmax = numpy.array(x_bounds_minmax)
for starting_point in numpy.array(minimize_starting_points):
res = minimize(fun=_lowest_confidence,
x0=starting_point.reshape(1, -1),
bounds=x_bounds_minmax,
method="L-BFGS-B",
args=(fun_prediction,
fun_prediction_args,
x_bounds,
x_types,
minimize_constraints_fun))
if (best_acquisition_value) is None or (res.fun < best_acquisition_value):
res.x = numpy.ndarray.tolist(res.x)
res.x = lib_data.match_val_type(res.x, x_bounds, x_types)
if (minimize_constraints_fun is None) or (minimize_constraints_fun(res.x) is True):
best_acquisition_value = res.fun
best_x = res.x
outputs = None
if best_x is not None:
mu, sigma = fun_prediction(best_x, *fun_prediction_args)
outputs = {'hyperparameter': best_x, 'expected_mu': mu,
'expected_sigma': sigma, 'acquisition_func': "lc"}
return outputs
def _lowest_confidence(x, fun_prediction, fun_prediction_args,
x_bounds, x_types, minimize_constraints_fun):
# This is only for step-wise optimization
x = lib_data.match_val_type(x, x_bounds, x_types)
ci = sys.maxsize
if (minimize_constraints_fun is None) or (minimize_constraints_fun(x) is True):
mu, sigma = fun_prediction(x, *fun_prediction_args)
ci = (sigma * 1.96 * 2) / mu
# We want ci to be as large as possible
# (i.e., as small as possible for minimize(...),
# because this would mean lowest confidence
ci = -1 * ci
return ci
def next_hyperparameter_lowest_mu(fun_prediction,
fun_prediction_args,
x_bounds, x_types,
minimize_starting_points,
minimize_constraints_fun=None):
'''
"Lowest Mu" acquisition function
'''
best_x = None
best_acquisition_value = None
x_bounds_minmax = [[i[0], i[-1]] for i in x_bounds]
x_bounds_minmax = numpy.array(x_bounds_minmax)
for starting_point in numpy.array(minimize_starting_points):
res = minimize(fun=_lowest_mu,
x0=starting_point.reshape(1, -1),
bounds=x_bounds_minmax,
method="L-BFGS-B",
args=(fun_prediction, fun_prediction_args, \
x_bounds, x_types, minimize_constraints_fun))
if (best_acquisition_value is None) or (res.fun < best_acquisition_value):
res.x = numpy.ndarray.tolist(res.x)
res.x = lib_data.match_val_type(res.x, x_bounds, x_types)
if (minimize_constraints_fun is None) or (minimize_constraints_fun(res.x) is True):
best_acquisition_value = res.fun
best_x = res.x
outputs = None
if best_x is not None:
mu, sigma = fun_prediction(best_x, *fun_prediction_args)
outputs = {'hyperparameter': best_x, 'expected_mu': mu,
'expected_sigma': sigma, 'acquisition_func': "lm"}
return outputs
def _lowest_mu(x, fun_prediction, fun_prediction_args,
x_bounds, x_types, minimize_constraints_fun):
'''
Calculate the lowest mu
'''
# This is only for step-wise optimization
x = lib_data.match_val_type(x, x_bounds, x_types)
mu = sys.maxsize
if (minimize_constraints_fun is None) or (minimize_constraints_fun(x) is True):
mu, _ = fun_prediction(x, *fun_prediction_args)
return mu
\ No newline at end of file
# Copyright (c) Microsoft Corporation
# All rights reserved.
#
# MIT License
#
# Permission is hereby granted, free of charge,
# to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and
# to permit persons to whom the Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import math
import random
from operator import itemgetter
def check_feasibility(x_bounds, lowerbound, upperbound):
'''
This can have false positives.
For examples, parameters can only be 0 or 5, and the summation constraint is between 6 and 7.
'''
# x_bounds should be sorted, so even for "discrete_int" type,
# the smallest and the largest number should the first and the last element
x_bounds_lowerbound = sum([x_bound[0] for x_bound in x_bounds])
x_bounds_upperbound = sum([x_bound[-1] for x_bound in x_bounds])
# return ((x_bounds_lowerbound <= lowerbound) and (x_bounds_upperbound >= lowerbound)) or \
# ((x_bounds_lowerbound <= upperbound) and (x_bounds_upperbound >= upperbound))
return (x_bounds_lowerbound <= lowerbound <= x_bounds_upperbound) or \
(x_bounds_lowerbound <= upperbound <= x_bounds_upperbound)
def rand(x_bounds, x_types, lowerbound, upperbound, max_retries=100):
'''
Key idea is that we try to move towards upperbound, by randomly choose one
value for each parameter. However, for the last parameter,
we need to make sure that its value can help us get above lowerbound
'''
outputs = None
if check_feasibility(x_bounds, lowerbound, upperbound) is True:
# Order parameters by their range size. We want the smallest range first,
# because the corresponding parameter has less numbers to choose from
x_idx_sorted = []
for i, _ in enumerate(x_bounds):
if x_types[i] == "discrete_int":
x_idx_sorted.append([i, len(x_bounds[i])])
elif (x_types[i] == "range_int") or (x_types[i] == "range_continuous"):
x_idx_sorted.append([i, math.floor(x_bounds[i][1] - x_bounds[i][0])])
x_idx_sorted = sorted(x_idx_sorted, key=itemgetter(1))
for _ in range(max_retries):
budget_allocated = 0
outputs = [None] * len(x_bounds)
for i, _ in enumerate(x_idx_sorted):
x_idx = x_idx_sorted[i][0]
# The amount of unallocated space that we have
budget_max = upperbound - budget_allocated
# NOT the Last x that we need to assign a random number
if i < (len(x_idx_sorted) - 1):
if x_bounds[x_idx][0] <= budget_max:
if x_types[x_idx] == "discrete_int":
# Note the valid integer
temp = []
for j in x_bounds[x_idx]:
if j <= budget_max:
temp.append(j)
# Randomly pick a number from the integer array
if temp:
outputs[x_idx] = temp[random.randint(0, len(temp) - 1)]
elif (x_types[x_idx] == "range_int") or \
(x_types[x_idx] == "range_continuous"):
outputs[x_idx] = random.randint(x_bounds[x_idx][0],
min(x_bounds[x_idx][-1], budget_max))
else:
# The last x that we need to assign a random number
randint_lowerbound = lowerbound - budget_allocated
randint_lowerbound = 0 if randint_lowerbound < 0 else randint_lowerbound
# This check:
# is our smallest possible value going to overflow the available budget space,
# and is our largest possible value going to underflow the lower bound
if (x_bounds[x_idx][0] <= budget_max) and \
(x_bounds[x_idx][-1] >= randint_lowerbound):
if x_types[x_idx] == "discrete_int":
temp = []
for j in x_bounds[x_idx]:
# if (j <= budget_max) and (j >= randint_lowerbound):
if randint_lowerbound <= j <= budget_max:
temp.append(j)
if temp:
outputs[x_idx] = temp[random.randint(0, len(temp) - 1)]
elif (x_types[x_idx] == "range_int") or \
(x_types[x_idx] == "range_continuous"):
outputs[x_idx] = random.randint(randint_lowerbound,
min(x_bounds[x_idx][1], budget_max))
if outputs[x_idx] is None:
break
else:
budget_allocated += outputs[x_idx]
if None not in outputs:
break
return outputs
\ No newline at end of file
# Copyright (c) Microsoft Corporation
# All rights reserved.
#
# MIT License
#
# Permission is hereby granted, free of charge,
# to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and
# to permit persons to whom the Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import math
import random
def match_val_type(vals, vals_bounds, vals_types):
'''
Update values in the array, to match their corresponding type
'''
vals_new = []
for i, _ in enumerate(vals_types):
if vals_types[i] == "discrete_int":
# Find the closest integer in the array, vals_bounds
vals_new.append(min(vals_bounds[i], key=lambda x: abs(x - vals[i])))
elif vals_types[i] == "range_int":
# Round down to the nearest integer
vals_new.append(math.floor(vals[i]))
elif vals_types[i] == "range_continuous":
# Don't do any processing for continous numbers
vals_new.append(vals[i])
else:
return None
return vals_new
def rand(x_bounds, x_types):
'''
Random generate variable value within their bounds
'''
outputs = []
for i, _ in enumerate(x_bounds):
if x_types[i] == "discrete_int":
temp = x_bounds[i][random.randint(0, len(x_bounds[i]) - 1)]
outputs.append(temp)
elif x_types[i] == "range_int":
temp = random.randint(x_bounds[i][0], x_bounds[i][1])
outputs.append(temp)
elif x_types[i] == "range_continuous":
temp = random.uniform(x_bounds[i][0], x_bounds[i][1])
outputs.append(temp)
else:
return None
return outputs
\ No newline at end of file
This diff is collapsed.
sklearn
\ No newline at end of file
......@@ -4,4 +4,7 @@ json_tricks
# hyperopt tuner
numpy
scipy
hyperopt
\ No newline at end of file
hyperopt
# metis tuner
sklearn
......@@ -209,6 +209,10 @@ class Overview extends React.Component<{}, OverviewState> {
profile.failTrial += 1;
break;
case 'RUNNING':
profile.runTrial += 1;
break;
case 'USER_CANCELED':
case 'SYS_CANCELED':
profile.stopTrial += 1;
......
import * as React from 'react';
import axios from 'axios';
import { MANAGER_IP } from '../static/const';
import { Row, Col, Tabs, Input, Select } from 'antd';
import { Row, Col, Tabs, Input, Select, Button } from 'antd';
const Option = Select.Option;
import { TableObj, Parameters, DetailAccurPoint, TooltipForAccuracy } from '../static/interface';
import { getFinalResult } from '../static/function';
import { TableObjFianl, Parameters, DetailAccurPoint, TooltipForAccuracy } from '../static/interface';
import { getFinalResult, getFinal } from '../static/function';
import Accuracy from './overview/Accuracy';
import Duration from './trial-detail/Duration';
import Title1 from './overview/Title1';
......@@ -16,8 +16,8 @@ import '../static/style/trialsDetail.scss';
interface TrialDetailState {
accSource: object;
accNodata: string;
tableListSource: Array<TableObj>;
searchResultSource: Array<TableObj>;
tableListSource: Array<TableObjFianl>;
searchResultSource: Array<TableObjFianl>;
isHasSearch: boolean;
experimentStatus: string;
entriesTable: number;
......@@ -30,6 +30,8 @@ class TrialsDetail extends React.Component<{}, TrialDetailState> {
public interTableList = 1;
public interAllTableList = 2;
public tableList: TableList | null;
constructor(props: {}) {
super(props);
......@@ -40,7 +42,7 @@ class TrialsDetail extends React.Component<{}, TrialDetailState> {
searchResultSource: [],
experimentStatus: '',
entriesTable: 20,
isHasSearch: false
isHasSearch: false,
};
}
// trial accuracy graph
......@@ -132,7 +134,7 @@ class TrialsDetail extends React.Component<{}, TrialDetailState> {
.then(res => {
if (res.status === 200) {
const trialJobs = res.data;
const trialTable: Array<TableObj> = [];
const trialTable: Array<TableObjFianl> = [];
Object.keys(trialJobs).map(item => {
// only succeeded trials have finalMetricData
let desc: Parameters = {
......@@ -167,7 +169,7 @@ class TrialsDetail extends React.Component<{}, TrialDetailState> {
if (trialJobs[item].logPath !== undefined) {
desc.logPath = trialJobs[item].logPath;
}
const acc = getFinalResult(trialJobs[item].finalMetricData);
const acc = getFinal(trialJobs[item].finalMetricData);
trialTable.push({
key: trialTable.length,
sequenceId: trialJobs[item].sequenceId,
......@@ -185,7 +187,7 @@ class TrialsDetail extends React.Component<{}, TrialDetailState> {
Object.keys(searchResultSource).map(index => {
temp.push(searchResultSource[index].id);
});
const searchResultList: Array<TableObj> = [];
const searchResultList: Array<TableObjFianl> = [];
for (let i = 0; i < temp.length; i++) {
Object.keys(trialTable).map(key => {
const item = trialTable[key];
......@@ -217,7 +219,7 @@ class TrialsDetail extends React.Component<{}, TrialDetailState> {
.then(res => {
if (res.status === 200) {
const trialJobs = res.data;
const trialTable: Array<TableObj> = [];
const trialTable: Array<TableObjFianl> = [];
Object.keys(trialJobs).map(item => {
// only succeeded trials have finalMetricData
let desc: Parameters = {
......@@ -252,7 +254,7 @@ class TrialsDetail extends React.Component<{}, TrialDetailState> {
if (trialJobs[item].logPath !== undefined) {
desc.logPath = trialJobs[item].logPath;
}
const acc = getFinalResult(trialJobs[item].finalMetricData);
const acc = getFinal(trialJobs[item].finalMetricData);
trialTable.push({
key: trialTable.length,
sequenceId: trialJobs[item].sequenceId,
......@@ -308,7 +310,7 @@ class TrialsDetail extends React.Component<{}, TrialDetailState> {
} else {
window.clearInterval(this.interAllTableList);
const { tableListSource } = this.state;
const searchResultList: Array<TableObj> = [];
const searchResultList: Array<TableObjFianl> = [];
Object.keys(tableListSource).map(key => {
const item = tableListSource[key];
if (item.sequenceId.toString() === targetValue || item.id.includes(targetValue)) {
......@@ -364,6 +366,10 @@ class TrialsDetail extends React.Component<{}, TrialDetailState> {
}
}
test = () => {
alert('TableList component was not properly initialized.');
}
componentDidMount() {
this._isMounted = true;
......@@ -429,13 +435,26 @@ class TrialsDetail extends React.Component<{}, TrialDetailState> {
<span>entries</span>
</Col>
<Col span={12} className="right">
{/* <span>Search:</span> */}
<Input
type="text"
placeholder="search by Trial No. and id"
onChange={this.searchTrial}
style={{ width: 200, marginLeft: 6 }}
/>
<Row>
<Col span={12}>
<Button
type="primary"
className="tableButton editStyle"
onClick={this.tableList ? this.tableList.addColumn : this.test}
>
AddColumn
</Button>
</Col>
<Col span={12}>
{/* <span>Search:</span> */}
<Input
type="text"
placeholder="search by Trial No. and id"
onChange={this.searchTrial}
style={{ width: 200, marginLeft: 6 }}
/>
</Col>
</Row>
</Col>
</Row>
<TableList
......@@ -444,6 +463,7 @@ class TrialsDetail extends React.Component<{}, TrialDetailState> {
updateList={this.drawTableList}
searchResult={searchResultSource}
isHasSearch={isHasSearch}
ref={(tabList) => this.tableList = tabList}
/>
</div>
);
......
......@@ -242,45 +242,45 @@ class Progressed extends React.Component<ProgressProps, ProgressState> {
maxString={`MaxTrialNumber: ${trialProfile.MaxTrialNum}`}
/>
<Row className="basic colorOfbasic mess">
<p>Best Default Metric</p>
<p>best metric</p>
<div>{bestAccuracy}</div>
</Row>
<Row className="mess">
<Col span={8}>
<Row className="basic colorOfbasic">
<p>Time Spent</p>
<p>spent</p>
<div>{convertTime(trialProfile.execDuration)}</div>
</Row>
</Col>
<Col span={9}>
<Row className="basic colorOfbasic">
<p>Remaining Time</p>
<p>remaining</p>
<div>{remaining}</div>
</Row>
</Col>
<Col span={7}>
<Row className="basic colorOfbasic">
<p>MaxDuration</p>
<div>{convertTime(trialProfile.maxDuration)}</div>
<p>running</p>
<div>{trialNumber.runTrial}</div>
</Row>
</Col>
</Row>
<Row className="mess">
<Col span={8}>
<Row className="basic colorOfbasic">
<p>Succeed Trial</p>
<p>succeed</p>
<div>{trialNumber.succTrial}</div>
</Row>
</Col>
<Col span={9}>
<Row className="basic">
<p>Stopped Trial</p>
<p>stopped</p>
<div>{trialNumber.stopTrial}</div>
</Row>
</Col>
<Col span={7}>
<Row className="basic">
<p>Failed Trial</p>
<p>failed</p>
<div>{trialNumber.failTrial}</div>
</Row>
</Col>
......
export const MANAGER_IP = `/api/v1/nni`;
export const DOWNLOAD_IP = `/logs`;
export const trialJobStatus = [
const MANAGER_IP = `/api/v1/nni`;
const DOWNLOAD_IP = `/logs`;
const trialJobStatus = [
'UNKNOWN',
'WAITING',
'RUNNING',
......@@ -10,12 +10,47 @@ export const trialJobStatus = [
'SYS_CANCELED',
'EARLY_STOPPED'
];
export const CONTROLTYPE = [
const CONTROLTYPE = [
'SEARCH_SPACE',
'TRIAL_CONCURRENCY',
'MAX_EXEC_DURATION'
];
export const MONACO = {
const MONACO = {
readOnly: true,
automaticLayout: true
};
const COLUMN_INDEX = [
{
name: 'Trial No',
index: 1
},
{
name: 'id',
index: 2
},
{
name: 'duration',
index: 3
},
{
name: 'status',
index: 4
},
{
name: 'Default',
index: 5
},
{
name: 'Operation',
index: 10000
},
{
name: 'Intermediate Result',
index: 10001
}
];
const COLUMN = ['Trial No', 'id', 'duration', 'status', 'Default', 'Operation', 'Intermediate Result'];
export {
MANAGER_IP, DOWNLOAD_IP, trialJobStatus,
CONTROLTYPE, MONACO, COLUMN, COLUMN_INDEX
};
import { FinalResult } from './interface';
import { FinalResult, FinalType } from './interface';
const convertTime = (num: number) => {
if (num % 3600 === 0) {
......@@ -28,6 +28,7 @@ const convertDuration = (num: number) => {
};
// get final result value
// draw Accuracy point graph
const getFinalResult = (final: FinalResult) => {
let acc;
let showDefault = 0;
......@@ -46,6 +47,21 @@ const getFinalResult = (final: FinalResult) => {
}
};
// get final result value // acc obj
const getFinal = (final: FinalResult) => {
let showDefault: FinalType;
if (final) {
showDefault = JSON.parse(final[0].data);
if (typeof showDefault === 'number') {
showDefault = { default: showDefault };
}
return showDefault;
} else {
return undefined;
}
};
export {
convertTime, convertDuration, getFinalResult
convertTime, convertDuration, getFinalResult,
getFinal
};
// draw accuracy graph data interface
interface TableObj {
key: number;
sequenceId: number;
id: string;
duration: number;
status: string;
acc?: number;
acc?: number; // draw accuracy graph
description: Parameters;
color?: string;
}
interface TableObjFianl {
key: number;
sequenceId: number;
id: string;
duration: number;
status: string;
acc?: FinalType;
description: Parameters;
color?: string;
}
interface FinalType {
default: string;
}
interface ErrorParameter {
error?: string;
}
interface Parameters {
parameters: ErrorParameter;
logPath?: string;
......@@ -93,5 +111,6 @@ export {
TableObj, Parameters, Experiment,
AccurPoint, TrialNumber, TrialJob,
DetailAccurPoint, TooltipForAccuracy,
ParaObj, VisualMapValue, Dimobj, FinalResult
ParaObj, VisualMapValue, Dimobj, FinalResult,
TableObjFianl, FinalType
};
/* some buttons about trial-detail table */
.allList{
width: 96%;
margin: 0 auto;
......@@ -17,4 +18,17 @@
}
}
.titleColumn{
.ant-checkbox-group-item{
display: block;
}
}
.applyfooter{
/* apply button style */
.apply{
text-align: right;
}
}
......@@ -68,6 +68,16 @@ Optional('tuner'): Or({
Optional('n_output_node'): int,
},
Optional('gpuNum'): And(int, lambda x: 0 <= x <= 99999),
},{
'builtinTunerName': 'MetisTuner',
'classArgs': {
Optional('optimize_mode'): Or('maximize', 'minimize'),
Optional('no_resampling'): bool,
Optional('no_candidates'): bool,
Optional('selection_num_starting_points'): int,
Optional('cold_start_num'): int,
},
Optional('gpuNum'): And(int, lambda x: 0 <= x <= 99999),
},{
'codeDir': os.path.exists,
'classFileName': str,
......
......@@ -21,7 +21,7 @@
import os
import json
from .config_schema import LOCAL_CONFIG_SCHEMA, REMOTE_CONFIG_SCHEMA, PAI_CONFIG_SCHEMA, KUBEFLOW_CONFIG_SCHEMA, FRAMEWORKCONTROLLER_CONFIG_SCHEMA
from .common_utils import get_json_content, print_error, print_warning
from .common_utils import get_json_content, print_error, print_warning, print_normal
def expand_path(experiment_config, key):
'''Change '~' to user home directory'''
......@@ -32,7 +32,7 @@ def parse_relative_path(root_path, experiment_config, key):
'''Change relative path to absolute path'''
if experiment_config.get(key) and not os.path.isabs(experiment_config.get(key)):
absolute_path = os.path.join(root_path, experiment_config.get(key))
print_warning('expand %s: %s to %s ' % (key, experiment_config[key], absolute_path))
print_normal('expand %s: %s to %s ' % (key, experiment_config[key], absolute_path))
experiment_config[key] = absolute_path
def parse_time(experiment_config):
......
......@@ -129,13 +129,15 @@ class PipeLogReader(threading.Thread):
self.pipeReader = os.fdopen(self.fdRead)
self.orig_stdout = sys.__stdout__
self._is_read_completed = False
self.process_exit = False
def _populateQueue(stream, queue):
'''
Collect lines from 'stream' and put them in 'quque'.
'''
time.sleep(5)
while True:
while True:
cur_process_exit = self.process_exit
try:
line = self.queue.get(True, 5)
try:
......@@ -144,9 +146,10 @@ class PipeLogReader(threading.Thread):
self.orig_stdout.flush()
except Exception as e:
pass
except Exception as e:
self._is_read_completed = True
break
except Exception as e:
if cur_process_exit == True:
self._is_read_completed = True
break
self.pip_log_reader_thread = threading.Thread(target = _populateQueue,
args = (self.pipeReader, self.queue))
......@@ -175,4 +178,8 @@ class PipeLogReader(threading.Thread):
def is_read_completed(self):
"""Return if read is completed
"""
return self._is_read_completed
\ No newline at end of file
return self._is_read_completed
def set_process_exit(self):
self.process_exit = True
return self.process_exit
\ No newline at end of file
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment