Unverified Commit 87ed70cd authored by fishyds's avatar fishyds Committed by GitHub
Browse files

Merge pull request #4 from Microsoft/merge-from-dogfood-v1-0824

[Code merge] Merge code from dogfood-v1 branch
parents f1f6f880 61d47a4d
BIN_PATH ?= $(HOME)/.nni/bin/ BIN_PATH ?= /usr/bin
NNI_PATH ?= $(HOME)/.nni/ NODE_PATH ?= /usr/share
EXAMPLE_PATH ?= /usr/share/nni/examples
SRC_DIR := ${PWD} SRC_DIR := ${PWD}
...@@ -20,48 +21,50 @@ build: ...@@ -20,48 +21,50 @@ build:
install: install:
mkdir -p $(NNI_PATH) mkdir -p $(NODE_PATH)/nni
mkdir -p $(BIN_PATH) mkdir -p $(EXAMPLE_PATH)
### Installing NNI Manager ### ### Installing NNI Manager ###
cp -rT src/nni_manager/dist $(NNI_PATH)nni_manager cp -rT src/nni_manager/dist $(NODE_PATH)/nni/nni_manager
cp -rT src/nni_manager/node_modules $(NNI_PATH)nni_manager/node_modules cp -rT src/nni_manager/node_modules $(NODE_PATH)/nni/nni_manager/node_modules
### Installing Web UI ### ### Installing Web UI ###
cp -rT src/webui/build $(NNI_PATH)webui cp -rT src/webui/build $(NODE_PATH)/nni/webui
ln -sf $(NNI_PATH)nni_manager/node_modules/serve/bin/serve.js $(BIN_PATH)serve ln -sf $(NODE_PATH)/nni/nni_manager/node_modules/serve/bin/serve.js $(BIN_PATH)/serve
### Installing Python SDK dependencies ### ### Installing Python SDK dependencies ###
pip3 install -r src/sdk/pynni/requirements.txt pip3 install -r src/sdk/pynni/requirements.txt
### Installing Python SDK ### ### Installing Python SDK ###
cd src/sdk/pynni && pip3 install -e . cd src/sdk/pynni && python3 setup.py install
### Installing nnictl ### ### Installing nnictl ###
cd tools && pip3 install -e . cd tools && python3 setup.py install
echo '#!/bin/sh' > $(BIN_PATH)nnimanager echo '#!/bin/sh' > $(BIN_PATH)/nnimanager
echo 'cd $(NNI_PATH)nni_manager && node main.js $$@' >> $(BIN_PATH)nnimanager echo 'cd $(NODE_PATH)/nni/nni_manager && node main.js $$@' >> $(BIN_PATH)/nnimanager
chmod +x $(BIN_PATH)nnimanager chmod +x $(BIN_PATH)/nnimanager
install -m 755 tools/nnictl $(BIN_PATH)nnictl install -m 755 tools/nnictl $(BIN_PATH)/nnictl
### Installing examples ### ### Installing examples ###
cp -rT examples $(NNI_PATH)examples cp -rT examples $(EXAMPLE_PATH)
dev-install: dev-install:
### Installing Python SDK dependencies ### ### Installing Python SDK dependencies ###
pip3 install -r src/sdk/pynni/requirements.txt pip3 install --user -r src/sdk/pynni/requirements.txt
### Installing Python SDK ### ### Installing Python SDK ###
cd src/sdk/pynni && pip3 install -e . cd src/sdk/pynni && pip3 install --user -e .
### Installing nnictl ### ### Installing nnictl ###
cd tools && pip3 install -e . cd tools && pip3 install --user -e .
uninstall: uninstall:
-rm -r $(NNI_PATH) -rm -r $(EXAMPLE_PATH)
-rm -r $(BIN_PATH) -rm -r $(NODE_PATH)/nni
-pip3 uninstall -y nnictl -pip3 uninstall -y nnictl
-pip3 uninstall -y nni -pip3 uninstall -y nni
-rm $(BIN_PATH)/nnictl
-rm $(BIN_PATH)/nnimanager
-rm $(BIN_PATH)/serve
# Neural Network Intelligence # Introduction
[![Build Status](https://travis-ci.org/Microsoft/NeuralNetworkIntelligence.svg?branch=master)](https://travis-ci.org/Microsoft/NeuralNetworkIntelligence)
## Introduction
Neural Network Intelligence(NNI) is a light package for supporting hyper-parameter tuning or neural architecture search. Neural Network Intelligence(NNI) is a light package for supporting hyper-parameter tuning or neural architecture search.
It could easily run in different environments, such as: local/remote machine/cloud. It could easily run in different environments, such as: local/remote machine/cloud.
And it offers a new annotation language for user to conveniently design search space. And it offers a new annotation language for user to conveniently design search space.
Also user could write code using any language or any machine learning framework. Also user could write code using any language or any machine learning framework.
## Getting Started # Getting Started
TODO: Guide users through getting your code up and running on their own system. In this section you can talk about: TODO: Guide users through getting your code up and running on their own system. In this section you can talk about:
1. Installation process 1. Installation process
2. Software dependencies 2. Software dependencies
3. Latest releases 3. Latest releases
4. API references 4. API references
## Build and Test # Build and Test
TODO: Describe and show how to build your code and run the tests. TODO: Describe and show how to build your code and run the tests.
## Contribute # Contribute
TODO: Explain how other users and developers can contribute to make your code better. TODO: Explain how other users and developers can contribute to make your code better.
## Privacy Statement # Privacy Statement
The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.microsoft.com/en-us/privacystatement) describes the privacy statement of this software. The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.microsoft.com/en-us/privacystatement) describes the privacy statement of this software.
...@@ -25,7 +25,7 @@ assessor: ...@@ -25,7 +25,7 @@ assessor:
optimizationMode: Maximize optimizationMode: Maximize
trial: trial:
trialCommand: python mnist.py trialCommand: python mnist.py
trialCodeDir: $HOME/.nni/examples/trials/mnist-annotation trialCodeDir: /usr/share/nni/examples/trials/mnist-annotation
trialGpuNum: 0 trialGpuNum: 0
``` ```
For our built-in assessors, you need to fill two fields: `assessorName` which chooses NNI provided assessors (refer to [here]() for built-in assessors), `optimizationMode` which includes Maximize and Minimize (you want to maximize or minimize your trial result). For our built-in assessors, you need to fill two fields: `assessorName` which chooses NNI provided assessors (refer to [here]() for built-in assessors), `optimizationMode` which includes Maximize and Minimize (you want to maximize or minimize your trial result).
...@@ -54,7 +54,7 @@ assessor: ...@@ -54,7 +54,7 @@ assessor:
assessorGpuNum: 0 assessorGpuNum: 0
trial: trial:
trialCommand: python mnist.py trialCommand: python mnist.py
trialCodeDir: $HOME/.nni/examples/trials/mnist-annotation trialCodeDir: /usr/share/nni/examples/trials/mnist-annotation
trialGpuNum: 0 trialGpuNum: 0
``` ```
You only need to fill three field: `assessorCommand`, `assessorCodeDir` and `assessorGpuNum`. You only need to fill three field: `assessorCommand`, `assessorCodeDir` and `assessorGpuNum`.
\ No newline at end of file
...@@ -25,31 +25,26 @@ The tool dispatchs and runs trail jobs that generated by tunning algorithms to s ...@@ -25,31 +25,26 @@ The tool dispatchs and runs trail jobs that generated by tunning algorithms to s
* As a ML platform owner, you want to support AutoML in your platform * As a ML platform owner, you want to support AutoML in your platform
## **Setup** ## **Setup**
* install using deb file * __Dependencies__
nni requires:
TBD
* install from source code
``` ```
### Prepare Node.js 10.8.0 or above python >= 3.5
wget https://nodejs.org/dist/v10.8.0/node-v10.8.0-linux-x64.tar.xz node >= 10.9.0
tar xf node-v10.8.0-linux-x64.tar.xz yarn >= 1.9.4
mv node-v10.8.0-linux-x64/* /usr/local/node/
### Prepare Yarn 1.6.0 or above
wget https://github.com/yarnpkg/yarn/releases/download/v1.6.0/yarn-v1.6.0.tar.gz
tar xf yarn-v1.6.0.tar.gz
mv yarn-v1.6.0/* /usr/local/yarn/
### Add Node.js and Yarn in PATH
export PATH=/usr/local/node/bin:/usr/local/yarn/bin:$PATH
### clone nni source code
git clone ...
### build and install nni
make build
sudo make install
``` ```
Before install nni, please make sure you have installed python environment correctly.
* __User installation__
* clone nni repository
git clone https://github.com/Microsoft/NeuralNetworkIntelligence
* run install.sh
cd NeuralNetworkIntelligence
sh ./install.sh
This documentation assumes you have setup one or more [training services](). For more details about installation, please refer to [Installation instructions](Installation.md).
## **Quick start: run an experiment at local** ## **Quick start: run an experiment at local**
Requirements: Requirements:
...@@ -57,7 +52,7 @@ Requirements: ...@@ -57,7 +52,7 @@ Requirements:
Run the following command to create an experiemnt for [mnist] Run the following command to create an experiemnt for [mnist]
```bash ```bash
nnictl create --config $HOME/.nni/examples/trials/mnist-annotation/config.yaml nnictl create --config /usr/share/nni/examples/trials/mnist-annotation/config.yml
``` ```
This command will start the experiment and WebUI. The WebUI endpoint will be shown in the output of this command (for example, `http://localhost:8080`). Open this URL using your browsers. You can analyze your experiment through WebUI, or open trials' tensorboard. This command will start the experiment and WebUI. The WebUI endpoint will be shown in the output of this command (for example, `http://localhost:8080`). Open this URL using your browsers. You can analyze your experiment through WebUI, or open trials' tensorboard.
...@@ -69,9 +64,9 @@ An experiment is to run multiple trial jobs, each trial job tries a configuratio ...@@ -69,9 +64,9 @@ An experiment is to run multiple trial jobs, each trial job tries a configuratio
* Provide a yaml experiment configure file * Provide a yaml experiment configure file
* (optional) Provide or choose an assessor * (optional) Provide or choose an assessor
**Prepare trial**: Let's use a simple trial example, e.g. mnist, provided by NNI. After you installed NNI, NNI examples have been put in $HOME/.nni/examples, run `ls $HOME/.nni/examples/trials` to see all the trial examples. You can simply execute the following command to run the NNI mnist example: **Prepare trial**: Let's use a simple trial example, e.g. mnist, provided by NNI. After you installed NNI, NNI examples have been put in /usr/share/nni/examples, run `ls /usr/share/nni/examples/trials` to see all the trial examples. You can simply execute the following command to run the NNI mnist example:
python $HOME/.nni/examples/trials/mnist-annotation/mnist.py python /usr/share/nni/examples/trials/mnist-annotation/mnist.py
This command will be filled in the yaml configure file below. Please refer to [here]() for how to write your own trial. This command will be filled in the yaml configure file below. Please refer to [here]() for how to write your own trial.
...@@ -82,7 +77,7 @@ This command will be filled in the yaml configure file below. Please refer to [h ...@@ -82,7 +77,7 @@ This command will be filled in the yaml configure file below. Please refer to [h
*tunerName* is used to specify a tuner in NNI, *optimizationMode* is to indicate whether you want to maximize or minimize your trial's result. *tunerName* is used to specify a tuner in NNI, *optimizationMode* is to indicate whether you want to maximize or minimize your trial's result.
**Prepare configure file**: Since you have already known which trial code you are going to run and which tuner you are going to use, it is time to prepare the yaml configure file. NNI provides a demo configure file for each trial example, `cat $HOME/.nni/examples/trials/mnist-annotation/config.yaml` to see it. Its content is basically shown below: **Prepare configure file**: Since you have already known which trial code you are going to run and which tuner you are going to use, it is time to prepare the yaml configure file. NNI provides a demo configure file for each trial example, `cat /usr/share/nni/examples/trials/mnist-annotation/config.yml` to see it. Its content is basically shown below:
``` ```
authorName: your_name authorName: your_name
...@@ -102,7 +97,7 @@ tuner: ...@@ -102,7 +97,7 @@ tuner:
optimizationMode: Maximize optimizationMode: Maximize
trial: trial:
trialCommand: python mnist.py trialCommand: python mnist.py
trialCodeDir: $HOME/.nni/examples/trials/mnist-annotation trialCodeDir: /usr/share/nni/examples/trials/mnist-annotation
trialGpuNum: 0 trialGpuNum: 0
``` ```
...@@ -110,7 +105,7 @@ Here *useAnnotation* is true because this trial example uses our python annotati ...@@ -110,7 +105,7 @@ Here *useAnnotation* is true because this trial example uses our python annotati
With all these steps done, we can run the experiment with the following command: With all these steps done, we can run the experiment with the following command:
nnictl create --config $HOME/.nni/examples/trials/mnist-annotation/config.yaml nnictl create --config /usr/share/nni/examples/trials/mnist-annotation/config.yml
You can refer to [here](NNICTLDOC.md) for more usage guide of *nnictl* command line tool. You can refer to [here](NNICTLDOC.md) for more usage guide of *nnictl* command line tool.
......
Installation instructions
===
## install using deb file
TBD
## install from source code
* Prepare Node.js 10.9.0 or above
wget https://nodejs.org/dist/v10.9.0/node-v10.9.0-linux-x64.tar.xz
tar xf node-v10.9.0-linux-x64.tar.xz
mv node-v10.9.0-linux-x64/* /usr/local/node/
* Prepare Yarn 1.9.4 or above
wget https://github.com/yarnpkg/yarn/releases/download/v1.9.4/yarn-v1.9.4.tar.gz
tar xf yarn-v1.9.4.tar.gz
mv yarn-v1.9.4/* /usr/local/yarn/
* Add Node.js and Yarn in PATH
export PATH=/usr/local/node/bin:/usr/local/yarn/bin:$PATH
* clone nni source code
git clone https://github.com/Microsoft/NeuralNetworkIntelligence
* build and install nni
make build
sudo make install
...@@ -182,24 +182,24 @@ nnictl log ...@@ -182,24 +182,24 @@ nnictl log
### Manage experiment information ### Manage experiment information
* __nnictl experiment ls__ * __nnictl experiment show__
* Description * Description
Show the information of experiment. Show the information of experiment.
* Usage * Usage
nnictl experiment ls nnictl experiment show
* __nnictl config ls__ * __nnictl config show__
* Description * Description
Display the current context information. Display the current context information.
* Usage * Usage
nnictl config ls nnictl config show
### Manage restful server ### Manage restful server
* __nnictl rest check__ * __nnictl rest check__
......
...@@ -30,7 +30,7 @@ tuner: ...@@ -30,7 +30,7 @@ tuner:
optimizationMode: Maximize optimizationMode: Maximize
trial: trial:
trialCommand: python mnist.py trialCommand: python mnist.py
trialCodeDir: $HOME/.nni/examples/trials/mnist-annotation trialCodeDir: /usr/share/nni/examples/trials/mnist-annotation
trialGpuNum: 0 trialGpuNum: 0
#machineList can be empty if the platform is local #machineList can be empty if the platform is local
machineList: machineList:
......
...@@ -5,7 +5,7 @@ maxExecDuration: 1h ...@@ -5,7 +5,7 @@ maxExecDuration: 1h
maxTrialNum: 1 maxTrialNum: 1
#choice: local, remote #choice: local, remote
trainingServicePlatform: local trainingServicePlatform: local
searchSpacePath: $HOME/.nni/examples/trials/cifar10/search_space.json searchSpacePath: /usr/share/nni/examples/trials/cifar10/search_space.json
#choice: true, false #choice: true, false
useAnnotation: false useAnnotation: false
tuner: tuner:
...@@ -15,5 +15,5 @@ tuner: ...@@ -15,5 +15,5 @@ tuner:
optimizationMode: Maximize optimizationMode: Maximize
trial: trial:
trialCommand: python3 cifar10.py trialCommand: python3 cifar10.py
trialCodeDir: $HOME/.nni/examples/trials/cifar10 trialCodeDir: /usr/share/nni/examples/trials/cifar10
trialGpuNum: 0 trialGpuNum: 0
\ No newline at end of file
authorName: default
experimentName: example_cifar10
trialConcurrency: 1
maxExecDuration: 1h
maxTrialNum: 1
#choice: local, remote
trainingServicePlatform: local
searchSpacePath: /usr/share/nni/examples/trials/cifar10/search_space.json
#choice: true, false
useAnnotation: false
tuner:
#choice: TPE, Random, Anneal, Evolution
tunerName: TPE
#choice: Maximize, Minimize
optimizationMode: Maximize
assessor:
#choice: Medianstop
assessorName: Medianstop
#choice: Maximize, Minimize
optimizationMode: Maximize
trial:
trialCommand: python3 cifar10.py
trialCodeDir: /usr/share/nni/examples/trials/cifar10
trialGpuNum: 0
\ No newline at end of file
...@@ -9,8 +9,8 @@ trainingServicePlatform: local ...@@ -9,8 +9,8 @@ trainingServicePlatform: local
useAnnotation: false useAnnotation: false
tuner: tuner:
tunerCommand: python3 __main__.py tunerCommand: python3 __main__.py
tunerCwd: $HOME/.nni/examples/tuners/ga_customer_tuner tunerCwd: /usr/share/nni/examples/tuners/ga_customer_tuner
trial: trial:
trialCommand: python3 trial.py trialCommand: python3 trial.py
trialCodeDir: $HOME/.nni/examples/trials/ga_squad trialCodeDir: /usr/share/nni/examples/trials/ga_squad
trialGpuNum: 0 trialGpuNum: 0
\ No newline at end of file
...@@ -14,5 +14,5 @@ tuner: ...@@ -14,5 +14,5 @@ tuner:
optimizationMode: Maximize optimizationMode: Maximize
trial: trial:
trialCommand: python3 mnist.py trialCommand: python3 mnist.py
trialCodeDir: $HOME/.nni/examples/trials/mnist-annotation trialCodeDir: /usr/share/nni/examples/trials/mnist-annotation
trialGpuNum: 0 trialGpuNum: 0
\ No newline at end of file
...@@ -5,7 +5,7 @@ maxExecDuration: 1h ...@@ -5,7 +5,7 @@ maxExecDuration: 1h
maxTrialNum: 1 maxTrialNum: 1
#choice: local, remote #choice: local, remote
trainingServicePlatform: local trainingServicePlatform: local
searchSpacePath: $HOME/.nni/examples/trials/mnist-keras/search_space.json searchSpacePath: /usr/share/nni/examples/trials/mnist-keras/search_space.json
#choice: true, false #choice: true, false
useAnnotation: false useAnnotation: false
tuner: tuner:
...@@ -15,5 +15,5 @@ tuner: ...@@ -15,5 +15,5 @@ tuner:
optimizationMode: Maximize optimizationMode: Maximize
trial: trial:
trialCommand: python3 mnist-keras.py trialCommand: python3 mnist-keras.py
trialCodeDir: $HOME/.nni/examples/trials/mnist-keras trialCodeDir: /usr/share/nni/examples/trials/mnist-keras
trialGpuNum: 0 trialGpuNum: 0
\ No newline at end of file
...@@ -14,5 +14,5 @@ tuner: ...@@ -14,5 +14,5 @@ tuner:
optimizationMode: Maximize optimizationMode: Maximize
trial: trial:
trialCommand: python3 mnist.py trialCommand: python3 mnist.py
trialCodeDir: $HOME/.nni/examples/trials/mnist-smartparam trialCodeDir: /usr/share/nni/examples/trials/mnist-smartparam
trialGpuNum: 0 trialGpuNum: 0
\ No newline at end of file
...@@ -5,7 +5,7 @@ maxExecDuration: 1h ...@@ -5,7 +5,7 @@ maxExecDuration: 1h
maxTrialNum: 1 maxTrialNum: 1
#choice: local, remote #choice: local, remote
trainingServicePlatform: local trainingServicePlatform: local
searchSpacePath: $HOME/.nni/examples/trials/mnist/search_space.json searchSpacePath: /usr/share/nni/examples/trials/mnist/search_space.json
#choice: true, false #choice: true, false
useAnnotation: false useAnnotation: false
tuner: tuner:
...@@ -15,5 +15,5 @@ tuner: ...@@ -15,5 +15,5 @@ tuner:
optimizationMode: Maximize optimizationMode: Maximize
trial: trial:
trialCommand: python3 mnist.py trialCommand: python3 mnist.py
trialCodeDir: $HOME/.nni/examples/trials/mnist trialCodeDir: /usr/share/nni/examples/trials/mnist
trialGpuNum: 0 trialGpuNum: 0
\ No newline at end of file
authorName: default
experimentName: example_mnist
trialConcurrency: 1
maxExecDuration: 1h
maxTrialNum: 1
#choice: local, remote
trainingServicePlatform: local
searchSpacePath: /usr/share/nni/examples/trials/mnist/search_space.json
#choice: true, false
useAnnotation: false
tuner:
#choice: TPE, Random, Anneal, Evolution
tunerName: TPE
#choice: Maximize, Minimize
optimizationMode: Maximize
assessor:
#choice: Medianstop
assessorName: Medianstop
#choice: Maximize, Minimize
optimizationMode: Maximize
trial:
trialCommand: python3 mnist.py
trialCodeDir: /usr/share/nni/examples/trials/mnist
trialGpuNum: 0
\ No newline at end of file
#!/bin/bash
wget -4 -nc https://nodejs.org/dist/v10.9.0/node-v10.9.0-linux-x64.tar.xz --header "Referer: nodejs.org"
tar -xf 'node-v10.9.0-linux-x64.tar.xz'
sudo cp -rf node-v10.9.0-linux-x64/* /usr/local/node/
rm -rf node-v10.9.0-linux-x64*
wget -4 -nc https://github.com/yarnpkg/yarn/releases/download/v1.9.4/yarn-v1.9.4.tar.gz
tar -xf 'yarn-v1.9.4.tar.gz'
sudo cp -rf yarn-v1.9.4/* /usr/local/yarn/
rm -rf yarn-v1.9.4*
export PATH=/usr/local/node/bin:/usr/local/yarn/bin:$PATH
make
sudo make install
...@@ -96,10 +96,11 @@ class IpcInterface { ...@@ -96,10 +96,11 @@ class IpcInterface {
* @param content: payload of command * @param content: payload of command
*/ */
public sendCommand(commandType: string, content: string = ''): void { public sendCommand(commandType: string, content: string = ''): void {
this.logger.debug(`ipcInterface command type: [${commandType}], content:[${content}]`);
assert.ok(this.acceptCommandTypes.has(commandType)); assert.ok(this.acceptCommandTypes.has(commandType));
const data: Buffer = encodeCommand(commandType, content); const data: Buffer = encodeCommand(commandType, content);
if (!this.outgoingStream.write(data)) { if (!this.outgoingStream.write(data)) {
//this.logger.warning('Commands jammed in buffer!'); this.logger.error('Commands jammed in buffer!');
} }
} }
......
...@@ -32,7 +32,7 @@ import { NNIDataStore } from './core/nniDataStore'; ...@@ -32,7 +32,7 @@ import { NNIDataStore } from './core/nniDataStore';
import { NNIManager } from './core/nnimanager'; import { NNIManager } from './core/nnimanager';
import { SqlDB } from './core/sqlDatabase'; import { SqlDB } from './core/sqlDatabase';
import { RestServer } from './rest_server/server'; import { RestServer } from './rest_server/server';
import { LocalTrainingService } from './training_service/local/localTrainingService'; import { LocalTrainingServiceForGPU } from './training_service/local/localTrainingServiceForGPU';
import { import {
RemoteMachineTrainingService RemoteMachineTrainingService
} from './training_service/remote_machine/remoteMachineTrainingService'; } from './training_service/remote_machine/remoteMachineTrainingService';
...@@ -46,7 +46,7 @@ function initStartupInfo(startExpMode: string, resumeExperimentId: string) { ...@@ -46,7 +46,7 @@ function initStartupInfo(startExpMode: string, resumeExperimentId: string) {
async function initContainer(platformMode: string): Promise<void> { async function initContainer(platformMode: string): Promise<void> {
if (platformMode === 'local') { if (platformMode === 'local') {
Container.bind(TrainingService).to(LocalTrainingService).scope(Scope.Singleton); Container.bind(TrainingService).to(LocalTrainingServiceForGPU).scope(Scope.Singleton);
} else if (platformMode === 'remote') { } else if (platformMode === 'remote') {
Container.bind(TrainingService).to(RemoteMachineTrainingService).scope(Scope.Singleton); Container.bind(TrainingService).to(RemoteMachineTrainingService).scope(Scope.Singleton);
} else { } else {
......
/**
* Copyright (c) Microsoft Corporation
* All rights reserved.
*
* MIT License
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
* to permit persons to whom the Software is furnished to do so, subject to the following conditions:
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
* BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
'use strict';
/**
* Trial job configuration class
* Representing trial job configurable properties
*/
export class TrialConfig {
/** Trail command */
public readonly command : string;
/** Code directory */
public readonly codeDir : string;
/** Required GPU number for trial job. The number should be in [0,100] */
public readonly gpuNum : number;
/**
* Constructor
* @param command Trail command
* @param codeDir Code directory
* @param gpuNum Required GPU number for trial job
*/
constructor(command : string, codeDir : string, gpuNum : number) {
this.command = command;
this.codeDir = codeDir;
this.gpuNum = gpuNum;
}
}
\ No newline at end of file
/**
* Copyright (c) Microsoft Corporation
* All rights reserved.
*
* MIT License
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
* to permit persons to whom the Software is furnished to do so, subject to the following conditions:
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
* BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
'use strict';
/**
* Enum of metadata keys for configuration
*/
export enum TrialConfigMetadataKey {
MACHINE_LIST = 'machine_list',
TRIAL_CONFIG = 'trial_config',
EXPERIMENT_ID = 'experimentId',
RANDOM_SCHEDULER = 'random_scheduler'
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment