Unverified Commit 0b9d6ce6 authored by Chi Song's avatar Chi Song Committed by GitHub
Browse files

Reuse OpenPAI jobs to run multiple trials (#2521)

Designed new interface to support reusable training service, currently only applies to OpenPAI, and default disabled.

Replace trial_keeper.py to trial_runner.py, trial_runner holds an environment, and receives commands from nni manager to run or stop an trial, and return events to nni manager.
Add trial dispatcher, which inherits from original trianing service interface. It uses to share as many as possible code of all training service, and isolate with training services.
Add EnvironmentService interface to manage environment, including start/stop an environment, refresh status of environments.
Add command channel on both nni manager and trial runner parts, it supports different ways to pass messages between them. Current supported channels are file, web sockets. and supported commands from nni manager are start, kill trial, send new parameters; from runner are initialized(support some channel doesn't know which runner connected), trial end, stdout ((new type), including metric like before), version check (new type), gpu info (new type).
Add storage service to wrapper a storage to standard file operations, like NFS, azure storage and so on.
Partial support run multiple trials in parallel on runner side, but not supported by trial dispatcher side.
Other minor changes,

Add log_level to TS UT, so that UT can show debug level log.
Expose platform to start info.
Add RouterTrainingService to keep origianl OpenPAI training service, and support dynamic IOC binding.
Add more GPU info for future usage, including GPU mem total/free/used, gpu type.
Make some license information consistence.
Fix async/await problems on Array.forEach, this method doesn't support async actually.
Fix IT errors on download data, which causes by my #2484 .
Accelerate some run loop pattern by reducing sleep seconds.
parent 6de15707
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
'use strict';
import { GPUSummary } from "training_service/common/gpuData";
import { getLogger, Logger } from "../../common/log";
import { TrialJobStatus } from "../../common/trainingService";
import { EventEmitter } from "events";
import { WebCommandChannel } from "./channels/webCommandChannel";
import { CommandChannel } from "./commandChannel";
export type EnvironmentStatus = 'UNKNOWN' | 'WAITING' | 'RUNNING' | 'SUCCEEDED' | 'FAILED' | 'USER_CANCELED';
export type Channel = "web" | "file" | "aml" | "ut";
export class EnvironmentInformation {
private log: Logger;
// NNI environment ID
public id: string;
// training platform unique job ID.
public jobId: string;
// training platform job friendly name, in case it's different with job ID.
public jobName: string;
// key states
// true: environment is ready to run trial.
public isIdle: boolean = false;
// true: environment is running, waiting, or unknown.
public isAlive: boolean = true;
// don't set status in environment directly, use setFinalState function to set a final state.
public status: EnvironmentStatus = "UNKNOWN";
public trackingUrl: string = "";
public workingFolder: string = "";
public runnerWorkingFolder: string = "";
public command: string = "";
public nodeCount: number = 1;
// it's used to aggregate node status for multiple node trial
public nodes: Map<string, NodeInfomation>;
public gpuSummary: Map<string, GPUSummary> = new Map<string, GPUSummary>();
constructor(id: string, jobName: string, jobId?: string) {
this.log = getLogger();
this.id = id;
this.jobName = jobName;
this.jobId = jobId ? jobId : jobName;
this.nodes = new Map<string, NodeInfomation>();
}
public setFinalStatus(status: EnvironmentStatus): void {
switch (status) {
case 'WAITING':
case 'SUCCEEDED':
case 'FAILED':
case 'USER_CANCELED':
this.status = status;
break;
default:
this.log.error(`Environment: job ${this.jobId} set an invalid final state ${status}.`);
break;
}
}
}
export abstract class EnvironmentService {
public abstract get hasStorageService(): boolean;
public abstract config(key: string, value: string): Promise<void>;
public abstract refreshEnvironmentsStatus(environments: EnvironmentInformation[]): Promise<void>;
public abstract startEnvironment(environment: EnvironmentInformation): Promise<void>;
public abstract stopEnvironment(environment: EnvironmentInformation): Promise<void>;
public getCommandChannel(commandEmitter: EventEmitter): CommandChannel {
return new WebCommandChannel(commandEmitter);
}
public createEnviornmentInfomation(envId: string, envName: string): EnvironmentInformation {
return new EnvironmentInformation(envId, envName);
}
}
export class NodeInfomation {
public id: string;
public status: TrialJobStatus = "UNKNOWN";
public endTime?: number;
constructor(id: string) {
this.id = id;
}
}
export class RunnerSettings {
public experimentId: string = "";
public platform: string = "";
public nniManagerIP: string = "";
public nniManagerPort: number = 8081;
public nniManagerVersion: string = "";
public logCollection: string = "none";
public command: string = "";
public enableGpuCollector: boolean = false;
// specify which communication channel is used by runner.
// supported channel includes: rest, storage, aml
public commandChannel: Channel = "file";
}
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
'use strict';
import * as fs from 'fs';
import * as request from 'request';
import { Deferred } from 'ts-deferred';
import * as component from '../../../common/component';
import { getExperimentId } from '../../../common/experimentStartupInfo';
import { getLogger, Logger } from '../../../common/log';
import { TrialConfigMetadataKey } from '../../common/trialConfigMetadataKey';
import { PAIClusterConfig } from '../../pai/paiConfig';
import { NNIPAIK8STrialConfig } from '../../pai/paiK8S/paiK8SConfig';
import { EnvironmentInformation, EnvironmentService } from '../environment';
import { StorageService } from '../storageService';
const yaml = require('js-yaml');
/**
* Collector PAI jobs info from PAI cluster, and update pai job status locally
*/
@component.Singleton
export class OpenPaiEnvironmentService extends EnvironmentService {
private readonly log: Logger = getLogger();
private paiClusterConfig: PAIClusterConfig | undefined;
private paiTrialConfig: NNIPAIK8STrialConfig | undefined;
private paiJobConfig: any;
private paiToken?: string;
private paiTokenUpdateTime?: number;
private readonly paiTokenUpdateInterval: number;
private protocol: string = 'http';
private experimentId: string;
constructor() {
super();
this.paiTokenUpdateInterval = 7200000; //2hours
this.experimentId = getExperimentId();
}
public get hasStorageService(): boolean {
return true;
}
public async config(key: string, value: string): Promise<void> {
switch (key) {
case TrialConfigMetadataKey.PAI_CLUSTER_CONFIG:
this.paiClusterConfig = <PAIClusterConfig>JSON.parse(value);
this.paiClusterConfig.host = this.formatPAIHost(this.paiClusterConfig.host);
if (this.paiClusterConfig.passWord) {
// Get PAI authentication token
await this.updatePaiToken();
} else if (this.paiClusterConfig.token) {
this.paiToken = this.paiClusterConfig.token;
}
break;
case TrialConfigMetadataKey.TRIAL_CONFIG: {
if (this.paiClusterConfig === undefined) {
this.log.error('pai cluster config is not initialized');
break;
}
this.paiTrialConfig = <NNIPAIK8STrialConfig>JSON.parse(value);
// Validate to make sure codeDir doesn't have too many files
const storageService = component.get<StorageService>(StorageService);
const remoteRoot = storageService.joinPath(this.paiTrialConfig.nniManagerNFSMountPath, this.experimentId);
storageService.initialize(this.paiTrialConfig.nniManagerNFSMountPath, remoteRoot);
if (this.paiTrialConfig.paiConfigPath) {
this.paiJobConfig = yaml.safeLoad(fs.readFileSync(this.paiTrialConfig.paiConfigPath, 'utf8'));
}
break;
}
default:
this.log.debug(`OpenPAI not proccessed metadata key: '${key}', value: '${value}'`);
}
}
public async refreshEnvironmentsStatus(environments: EnvironmentInformation[]): Promise<void> {
const deferred: Deferred<void> = new Deferred<void>();
await this.refreshPlatform();
if (this.paiClusterConfig === undefined) {
throw new Error('PAI Cluster config is not initialized');
}
if (this.paiToken === undefined) {
throw new Error('PAI token is not initialized');
}
const getJobInfoRequest: request.Options = {
uri: `${this.protocol}://${this.paiClusterConfig.host}/rest-server/api/v2/jobs?username=${this.paiClusterConfig.userName}`,
method: 'GET',
json: true,
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${this.paiToken}`
}
};
request(getJobInfoRequest, async (error: any, response: request.Response, body: any) => {
if ((error !== undefined && error !== null) || response.statusCode >= 400) {
this.log.error(`OpenPAI: get environment list from PAI Cluster failed!\nerror: ${error}`);
deferred.reject(error);
} else {
const jobInfos = new Map<string, any>();
body.forEach((jobInfo: any) => {
jobInfos.set(jobInfo.name, jobInfo);
});
environments.forEach((environment) => {
if (jobInfos.has(environment.jobId)) {
const jobResponse = jobInfos.get(environment.jobId);
if (jobResponse && jobResponse.state) {
const oldEnvironmentStatus = environment.status;
switch (jobResponse.state) {
case 'RUNNING':
case 'WAITING':
// RUNNING status is set by runner, and ignore waiting status
break;
case 'SUCCEEDED':
case 'FAILED':
environment.setFinalStatus(jobResponse.state);
break;
case 'STOPPED':
case 'STOPPING':
environment.setFinalStatus('USER_CANCELED');
break;
default:
this.log.error(`OpenPAI: job ${environment.jobId} returns unknown state ${jobResponse.state}.`);
environment.setFinalStatus('UNKNOWN');
}
if (oldEnvironmentStatus !== environment.status) {
this.log.debug(`OpenPAI: job ${environment.jobId} change status ${oldEnvironmentStatus} to ${environment.status} due to job is ${jobResponse.state}.`)
}
} else {
this.log.error(`OpenPAI: job ${environment.jobId} has no state returned. body:${JSON.stringify(jobResponse)}`);
// some error happens, and mark this environment
environment.status = 'FAILED';
}
} else {
this.log.error(`OpenPAI job ${environment.jobId} is not found in job list.`);
environment.status = 'UNKNOWN';
}
});
deferred.resolve();
}
});
return deferred.promise;
}
public async startEnvironment(environment: EnvironmentInformation): Promise<void> {
const deferred: Deferred<void> = new Deferred<void>();
await this.refreshPlatform();
if (this.paiClusterConfig === undefined) {
throw new Error('PAI Cluster config is not initialized');
}
if (this.paiToken === undefined) {
throw new Error('PAI token is not initialized');
}
if (this.paiTrialConfig === undefined) {
throw new Error('PAI trial config is not initialized');
}
// Step 1. Prepare PAI job configuration
environment.runnerWorkingFolder = `${this.paiTrialConfig.containerNFSMountPath}/${this.experimentId}/envs/${environment.id}`;
environment.command = `cd ${environment.runnerWorkingFolder} && ${environment.command}`
environment.trackingUrl = `${this.protocol}://${this.paiClusterConfig.host}/job-detail.html?username=${this.paiClusterConfig.userName}&jobName=${environment.jobId}`
// Step 2. Generate Job Configuration in yaml format
const paiJobConfig = this.generateJobConfigInYamlFormat(environment);
this.log.debug(`generated paiJobConfig: ${paiJobConfig}`);
// Step 3. Submit PAI job via Rest call
const submitJobRequest: request.Options = {
uri: `${this.protocol}://${this.paiClusterConfig.host}/rest-server/api/v2/jobs`,
method: 'POST',
body: paiJobConfig,
headers: {
'Content-Type': 'text/yaml',
Authorization: `Bearer ${this.paiToken}`
}
};
request(submitJobRequest, (error, response, body) => {
if ((error !== undefined && error !== null) || response.statusCode >= 400) {
const errorMessage: string = (error !== undefined && error !== null) ? error.message :
`start environment ${environment.jobId} failed, http code:${response.statusCode}, http body: ${body}`;
this.log.error(errorMessage);
environment.status = 'FAILED';
}
deferred.resolve();
});
return deferred.promise;
}
public async stopEnvironment(environment: EnvironmentInformation): Promise<void> {
const deferred: Deferred<void> = new Deferred<void>();
if (this.paiClusterConfig === undefined) {
return Promise.reject(new Error('PAI Cluster config is not initialized'));
}
if (this.paiToken === undefined) {
return Promise.reject(Error('PAI token is not initialized'));
}
const stopJobRequest: request.Options = {
uri: `${this.protocol}://${this.paiClusterConfig.host}/rest-server/api/v2/jobs/${this.paiClusterConfig.userName}~${environment.jobId}/executionType`,
method: 'PUT',
json: true,
body: { value: 'STOP' },
time: true,
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${this.paiToken}`
}
};
this.log.debug(`stopping OpenPAI environment ${environment.jobId}, ${stopJobRequest.uri}`);
try {
request(stopJobRequest, (error, response, _body) => {
try {
if ((error !== undefined && error !== null) || (response && response.statusCode >= 400)) {
this.log.error(`OpenPAI: stop job ${environment.jobId} failed with ${response.statusCode}\n${error}`);
deferred.reject((error !== undefined && error !== null) ? error :
`Stop trial failed, http code: ${response.statusCode}`);
} else {
this.log.info(`OpenPAI job ${environment.jobId} stopped.`);
}
deferred.resolve();
} catch (error) {
this.log.error(`OpenPAI error when inner stopping environment ${error}`);
deferred.reject(error);
}
});
} catch (error) {
this.log.error(`OpenPAI error when stopping environment ${error}`);
return Promise.reject(error);
}
return deferred.promise;
}
private async refreshPlatform(): Promise<void> {
if (this.paiClusterConfig && this.paiClusterConfig.passWord) {
try {
await this.updatePaiToken();
} catch (error) {
this.log.error(`${error}`);
if (this.paiToken === undefined) {
throw new Error(error);
}
}
}
}
private generateJobConfigInYamlFormat(environment: EnvironmentInformation): any {
if (this.paiTrialConfig === undefined) {
throw new Error('trial config is not initialized');
}
const jobName = environment.jobId;
let nniJobConfig: any = undefined;
if (this.paiTrialConfig.paiConfigPath) {
nniJobConfig = JSON.parse(JSON.stringify(this.paiJobConfig)); //Trick for deep clone in Typescript
nniJobConfig.name = jobName;
if (nniJobConfig.taskRoles) {
environment.nodeCount = 0;
// count instance
for (const taskRoleName in nniJobConfig.taskRoles) {
const taskRole = nniJobConfig.taskRoles[taskRoleName];
let instanceCount = 1;
if (taskRole.instances) {
instanceCount = taskRole.instances;
}
environment.nodeCount += instanceCount;
}
// Each taskRole will generate new command in NNI's command format
// Each command will be formatted to NNI style
for (const taskRoleName in nniJobConfig.taskRoles) {
const taskRole = nniJobConfig.taskRoles[taskRoleName];
// replace ' to '\''
const joinedCommand = taskRole.commands.join(" && ").replace("'", "'\\''").trim();
const nniTrialCommand = `${environment.command} --node_count ${environment.nodeCount} --trial_command '${joinedCommand}'`;
this.log.debug(`replace command ${taskRole.commands} to ${[nniTrialCommand]}`);
taskRole.commands = [nniTrialCommand];
}
}
} else {
nniJobConfig = {
protocolVersion: 2,
name: jobName,
type: 'job',
jobRetryCount: 0,
prerequisites: [
{
type: 'dockerimage',
uri: this.paiTrialConfig.image,
name: 'docker_image_0'
}
],
taskRoles: {
taskrole: {
instances: 1,
completion: {
minFailedInstances: 1,
minSucceededInstances: -1
},
taskRetryCount: 0,
dockerImage: 'docker_image_0',
resourcePerInstance: {
gpu: this.paiTrialConfig.gpuNum,
cpu: this.paiTrialConfig.cpuNum,
memoryMB: this.paiTrialConfig.memoryMB
},
commands: [
environment.command
]
}
},
extras: {
'storages': [
{
name: this.paiTrialConfig.paiStorageConfigName
}
],
submitFrom: 'submit-job-v2'
}
}
if (this.paiTrialConfig.virtualCluster) {
nniJobConfig.defaults = {
virtualCluster: this.paiTrialConfig.virtualCluster
}
}
}
return yaml.safeDump(nniJobConfig);
}
protected formatPAIHost(host: string): string {
// If users' host start with 'http://' or 'https://', use the original host,
// or format to 'http//${host}'
if (host.startsWith('http://')) {
this.protocol = 'http';
return host.replace('http://', '');
} else if (host.startsWith('https://')) {
this.protocol = 'https';
return host.replace('https://', '');
} else {
return host;
}
}
/**
* Update pai token by the interval time or initialize the pai token
*/
protected async updatePaiToken(): Promise<void> {
const deferred: Deferred<void> = new Deferred<void>();
const currentTime: number = new Date().getTime();
//If pai token initialized and not reach the interval time, do not update
if (this.paiTokenUpdateTime !== undefined && (currentTime - this.paiTokenUpdateTime) < this.paiTokenUpdateInterval) {
return Promise.resolve();
}
if (this.paiClusterConfig === undefined) {
const paiClusterConfigError: string = `pai cluster config not initialized!`;
this.log.error(`${paiClusterConfigError}`);
throw Error(`${paiClusterConfigError}`);
}
const authenticationReq: request.Options = {
uri: `${this.protocol}://${this.paiClusterConfig.host}/rest-server/api/v1/token`,
method: 'POST',
json: true,
body: {
username: this.paiClusterConfig.userName,
password: this.paiClusterConfig.passWord
}
};
request(authenticationReq, (error: any, response: request.Response, body: any) => {
if (error !== undefined && error !== null) {
this.log.error(`Get PAI token failed: ${error.message}, authenticationReq: ${authenticationReq}`);
deferred.reject(new Error(`Get PAI token failed: ${error.message}`));
} else {
if (response.statusCode !== 200) {
this.log.error(`Get PAI token failed: get PAI Rest return code ${response.statusCode}, authenticationReq: ${authenticationReq}`);
deferred.reject(new Error(`Get PAI token failed code: ${response.statusCode}, body: ${response.body}, authenticationReq: ${authenticationReq}, please check paiConfig username or password`));
} else {
this.paiToken = body.token;
this.paiTokenUpdateTime = new Date().getTime();
deferred.resolve();
}
}
});
let timeoutId: NodeJS.Timer;
const timeoutDelay: Promise<void> = new Promise<void>((_resolve: Function, reject: Function): void => {
// Set timeout and reject the promise once reach timeout (5 seconds)
timeoutId = setTimeout(
() => reject(new Error('Get PAI token timeout. Please check your PAI cluster.')),
5000);
});
return Promise.race([timeoutDelay, deferred.promise])
.finally(() => { clearTimeout(timeoutId); });
}
}
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
'use strict';
import { Container, Scope } from 'typescript-ioc';
import * as component from '../../common/component';
import { getLogger, Logger } from '../../common/log';
import { TrainingService, TrialJobApplicationForm, TrialJobDetail, TrialJobMetric } from '../../common/trainingService';
import { delay } from '../../common/utils';
import { TrialConfigMetadataKey } from '../common/trialConfigMetadataKey';
import { PAIClusterConfig } from '../pai/paiConfig';
import { PAIK8STrainingService } from '../pai/paiK8S/paiK8STrainingService';
import { EnvironmentService } from './environment';
import { OpenPaiEnvironmentService } from './environments/openPaiEnvironmentService';
import { MountedStorageService } from './storages/mountedStorageService';
import { StorageService } from './storageService';
import { TrialDispatcher } from './trialDispatcher';
/**
* It's a intermedia implementation to support reusable training service.
* The final goal is to support reusable training job in higher level than training service.
*/
@component.Singleton
class RouterTrainingService implements TrainingService {
protected readonly log!: Logger;
private internalTrainingService: TrainingService | undefined;
private metaDataCache: Map<string, string> = new Map<string, string>();
constructor() {
this.log = getLogger();
}
public async listTrialJobs(): Promise<TrialJobDetail[]> {
if (this.internalTrainingService === undefined) {
throw new Error("TrainingService is not assigned!");
}
return await this.internalTrainingService.listTrialJobs();
}
public async getTrialJob(trialJobId: string): Promise<TrialJobDetail> {
if (this.internalTrainingService === undefined) {
throw new Error("TrainingService is not assigned!");
}
return await this.internalTrainingService.getTrialJob(trialJobId);
}
public addTrialJobMetricListener(listener: (metric: TrialJobMetric) => void): void {
if (this.internalTrainingService === undefined) {
throw new Error("TrainingService is not assigned!");
}
this.internalTrainingService.addTrialJobMetricListener(listener);
}
public removeTrialJobMetricListener(listener: (metric: TrialJobMetric) => void): void {
if (this.internalTrainingService === undefined) {
throw new Error("TrainingService is not assigned!");
}
this.internalTrainingService.removeTrialJobMetricListener(listener);
}
public async submitTrialJob(form: TrialJobApplicationForm): Promise<TrialJobDetail> {
if (this.internalTrainingService === undefined) {
throw new Error("TrainingService is not assigned!");
}
return await this.internalTrainingService.submitTrialJob(form);
}
public async updateTrialJob(trialJobId: string, form: TrialJobApplicationForm): Promise<TrialJobDetail> {
if (this.internalTrainingService === undefined) {
throw new Error("TrainingService is not assigned!");
}
return await this.internalTrainingService.updateTrialJob(trialJobId, form);
}
public get isMultiPhaseJobSupported(): boolean {
if (this.internalTrainingService === undefined) {
throw new Error("TrainingService is not assigned!");
}
return this.internalTrainingService.isMultiPhaseJobSupported;
}
public async cancelTrialJob(trialJobId: string, isEarlyStopped?: boolean | undefined): Promise<void> {
if (this.internalTrainingService === undefined) {
throw new Error("TrainingService is not assigned!");
}
await this.internalTrainingService.cancelTrialJob(trialJobId, isEarlyStopped);
}
public async setClusterMetadata(key: string, value: string): Promise<void> {
if (this.internalTrainingService === undefined) {
if (key === TrialConfigMetadataKey.PAI_CLUSTER_CONFIG) {
const config = <PAIClusterConfig>JSON.parse(value);
if (config.reuse === true) {
this.log.info(`reuse flag enabled, use EnvironmentManager.`);
this.internalTrainingService = component.get(TrialDispatcher);
// TODO to support other serivces later.
Container.bind(EnvironmentService)
.to(OpenPaiEnvironmentService)
.scope(Scope.Singleton);
// TODO to support other storages later.
Container.bind(StorageService)
.to(MountedStorageService)
.scope(Scope.Singleton);
} else {
this.log.debug(`caching metadata key:{} value:{}, as training service is not determined.`);
this.internalTrainingService = component.get(PAIK8STrainingService);
}
for (const [key, value] of this.metaDataCache) {
if (this.internalTrainingService === undefined) {
throw new Error("TrainingService is not assigned!");
}
await this.internalTrainingService.setClusterMetadata(key, value);
}
if (this.internalTrainingService === undefined) {
throw new Error("TrainingService is not assigned!");
}
await this.internalTrainingService.setClusterMetadata(key, value);
this.metaDataCache.clear();
} else {
this.log.debug(`caching metadata key:{} value:{}, as training service is not determined.`);
this.metaDataCache.set(key, value);
}
} else {
await this.internalTrainingService.setClusterMetadata(key, value);
}
}
public async getClusterMetadata(key: string): Promise<string> {
if (this.internalTrainingService === undefined) {
throw new Error("TrainingService is not assigned!");
}
return await this.internalTrainingService.getClusterMetadata(key);
}
public async cleanUp(): Promise<void> {
if (this.internalTrainingService === undefined) {
throw new Error("TrainingService is not assigned!");
}
await this.internalTrainingService.cleanUp();
}
public async run(): Promise<void> {
// wait internal training service is assigned.
// It will be assigned after set metadata of paiConfig
while (this.internalTrainingService === undefined) {
await delay(100);
}
return await this.internalTrainingService.run();
}
}
export { RouterTrainingService };
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
'use strict';
import * as fs from 'fs';
import * as os from 'os';
import * as path from 'path';
import { getLogger, Logger } from '../../common/log';
import { uniqueString } from '../../common/utils';
import { tarAdd } from '../common/util';
export abstract class StorageService {
protected localRoot: string = "";
protected remoteRoot: string = "";
protected logger: Logger;
protected abstract internalConfig(key: string, value: string): void;
protected abstract async internalRemove(remotePath: string, isDirectory: boolean, isRecursive: boolean): Promise<void>;
protected abstract async internalRename(remotePath: string, newName: string): Promise<void>;
protected abstract async internalMkdir(remotePath: string): Promise<void>;
protected abstract async internalCopy(sourcePath: string, targetPath: string, isDirectory: boolean, isFromRemote: boolean, isToRemote: boolean): Promise<string>;
protected abstract async internalExists(remotePath: string): Promise<boolean>;
protected abstract async internalRead(remotePath: string, offset: number, length: number): Promise<string>;
protected abstract async internalList(remotePath: string): Promise<string[]>;
protected abstract async internalAttach(remotePath: string, content: string): Promise<boolean>;
protected abstract internalIsRelativePath(path: string): boolean;
protected abstract internalJoin(...paths: string[]): string;
protected abstract internalDirname(...paths: string[]): string;
protected abstract internalBasename(...paths: string[]): string;
constructor() {
this.logger = getLogger();
}
public initialize(localRoot: string, remoteRoot: string): void {
this.logger.debug(`Initializing storage to local: ${localRoot} remote: ${remoteRoot}`);
this.localRoot = localRoot;
this.remoteRoot = remoteRoot;
}
public async rename(remotePath: string, newName: string): Promise<void> {
remotePath = this.expandPath(true, remotePath);
this.logger.debug(`rename remotePath: ${remotePath} to: ${newName}`);
await this.internalRename(remotePath, newName);
}
public async createDirectory(remotePath: string): Promise<void> {
remotePath = this.expandPath(true, remotePath);
this.logger.debug(`create remotePath: ${remotePath}`);
await this.internalMkdir(remotePath);
}
public async copyDirectory(localPath: string, remotePath: string, asGzip: boolean = false): Promise<string> {
localPath = this.expandPath(false, localPath);
remotePath = this.expandPath(true, remotePath);
this.logger.debug(`copy localPath: ${localPath} to remotePath: ${remotePath}, asGzip ${asGzip}`);
if (!await this.internalExists(remotePath)) {
await this.internalMkdir(remotePath);
}
if (asGzip) {
const localPathBaseName = path.basename(localPath);
const tempTarFileName = `nni_tmp_${localPathBaseName}_${uniqueString(5)}.tar.gz`;
const tarFileName = `${localPathBaseName}.tar.gz`;
const localTarPath: string = path.join(os.tmpdir(), tempTarFileName);
await tarAdd(localTarPath, localPath);
await this.internalCopy(localTarPath, remotePath, false, false, true);
const remoteFileName = this.internalJoin(remotePath, tempTarFileName);
await this.internalRename(remoteFileName, tarFileName);
await fs.promises.unlink(localTarPath);
remotePath = this.internalJoin(remotePath, tarFileName);
} else {
await this.internalCopy(localPath, remotePath, true, false, true);
remotePath = this.internalJoin(remotePath, path.basename(localPath));
}
return remotePath;
}
public async copyDirectoryBack(remotePath: string, localPath: string): Promise<string> {
localPath = this.expandPath(false, localPath);
remotePath = this.expandPath(true, remotePath);
this.logger.debug(`copy remotePath: ${remotePath} to localPath: ${localPath}`);
return await this.internalCopy(localPath, remotePath, true, true, false);
}
public async removeDirectory(remotePath: string, isRecursive: boolean): Promise<void> {
remotePath = this.expandPath(true, remotePath);
this.logger.debug(`remove remotePath: ${remotePath}`);
await this.internalRemove(remotePath, true, isRecursive);
}
public async readFileContent(remotePath: string, offset: number = -1, length: number = -1): Promise<string> {
remotePath = this.expandPath(true, remotePath);
this.logger.debug(`read remote file: ${remotePath}, offset: ${offset}, length: ${length}`);
return this.internalRead(remotePath, offset, length);
}
public async listDirectory(remotePath: string): Promise<string[]> {
remotePath = this.expandPath(true, remotePath);
this.logger.debug(`list remotePath: ${remotePath}`);
return await this.internalList(remotePath);
}
public async exists(remotePath: string): Promise<boolean> {
remotePath = this.expandPath(true, remotePath);
const exists = await this.internalExists(remotePath);
this.logger.debug(`exists remotePath: ${remotePath} is ${exists}`);
return exists
}
public async save(content: string, remotePath: string, isAttach: boolean = false): Promise<void> {
remotePath = this.expandPath(true, remotePath);
this.logger.debug(`saving content to remotePath: ${remotePath}, length: ${content.length}, isAttach: ${isAttach}`);
const remoteDir = this.internalDirname(remotePath);
if (isAttach) {
if (await this.internalExists(remoteDir) === false) {
await this.internalMkdir(remoteDir);
}
const result = await this.internalAttach(remotePath, content);
if (false === result) {
throw new Error("this.internalAttach doesn't support");
}
} else {
const fileName = this.internalBasename(remotePath);
const tempFileName = `temp_${uniqueString(4)}_${fileName}`;
const localTempFileName = path.join(os.tmpdir(), tempFileName);
const remoteTempFile = this.internalJoin(remoteDir, tempFileName);
if (await this.internalExists(remotePath) === true) {
await this.internalRemove(remotePath, false, false);
}
await fs.promises.writeFile(localTempFileName, content);
await this.internalCopy(localTempFileName, remoteDir, false, false, true);
await this.rename(remoteTempFile, fileName);
await fs.promises.unlink(localTempFileName);
}
}
public async copyFile(localPath: string, remotePath: string): Promise<void> {
localPath = this.expandPath(false, localPath);
remotePath = this.expandPath(true, remotePath);
this.logger.debug(`copying file localPath: ${localPath} to remotePath: ${remotePath}`);
await this.internalCopy(localPath, remotePath, false, false, true);
}
public async copyFileBack(remotePath: string, localPath: string): Promise<void> {
localPath = this.expandPath(false, localPath);
remotePath = this.expandPath(true, remotePath);
this.logger.debug(`copy file remotePath: ${remotePath} to localPath: ${localPath}`);
await this.internalCopy(localPath, remotePath, false, true, false);
}
public async removeFile(remotePath: string): Promise<void> {
remotePath = this.expandPath(true, remotePath);
this.logger.debug(`remove file remotePath: ${remotePath}`);
await this.internalRemove(remotePath, false, false);
}
public joinPath(...paths: string[]): string {
let fullPath = this.internalJoin(...paths);
if (this.internalIsRelativePath(fullPath) === true && this.remoteRoot !== "") {
fullPath = this.internalJoin(this.remoteRoot, fullPath);
}
return fullPath;
}
private expandPath(isRemote: boolean, ...paths: string[]): string {
let normalizedPath: string;
if (isRemote) {
normalizedPath = this.joinPath(...paths);
} else {
normalizedPath = path.join(...paths);
if (!path.isAbsolute(normalizedPath) && this.localRoot !== "") {
normalizedPath = path.join(this.localRoot, normalizedPath);
}
}
return normalizedPath;
}
}
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
import * as fs from 'fs';
import * as path from 'path';
import { Deferred } from "ts-deferred";
import { StorageService } from "../storageService";
export class MountedStorageService extends StorageService {
protected internalConfig(_key: string, _value: string): void {
// nothing to config
}
protected async internalRemove(path: string, isDirectory: boolean, isRecursive: boolean): Promise<void> {
if (isDirectory) {
if (isRecursive) {
const children = await fs.promises.readdir(path);
for (const file of children) {
const stat = await fs.promises.lstat(file);
this.internalRemove(file, stat.isDirectory(), isRecursive);
}
} else {
await fs.promises.rmdir(path);
}
} else {
await fs.promises.unlink(path);
}
}
protected async internalRename(remotePath: string, newName: string): Promise<void> {
const dirName = path.dirname(remotePath);
newName = this.internalJoin(dirName, newName);
await fs.promises.rename(remotePath, newName);
}
protected async internalMkdir(remotePath: string): Promise<void> {
if (!fs.existsSync(remotePath)) {
await fs.promises.mkdir(remotePath, { recursive: true });
}
}
protected async internalCopy(sourcePath: string, targetPath: string, isDirectory: boolean, isFromRemote: boolean = false, isToRemote: boolean = true): Promise<string> {
if (sourcePath === targetPath) {
return targetPath;
}
this.logger.debug(`copying ${sourcePath} to ${targetPath}, dir ${isDirectory}, isFromRemote ${isFromRemote}, isToRemote: ${isToRemote}`);
if (isDirectory) {
const basename = isFromRemote ? this.internalBasename(sourcePath) : path.basename(sourcePath);
if (isToRemote) {
targetPath = this.internalJoin(targetPath, basename);
await this.internalMkdir(targetPath);
} else {
targetPath = path.join(targetPath, basename);
await fs.promises.mkdir(targetPath);
}
const children = await fs.promises.readdir(sourcePath);
for (const child of children) {
const childSourcePath = this.internalJoin(sourcePath, child);
const stat = await fs.promises.lstat(childSourcePath);
await this.internalCopy(childSourcePath, targetPath, stat.isDirectory(), isFromRemote, isToRemote);
}
return targetPath;
} else {
// This behavior may not be consistent for each platform, but it needs to correct to same
await this.internalMkdir(targetPath);
const targetFileName = path.join(targetPath, path.basename(sourcePath));
await fs.promises.copyFile(sourcePath, targetFileName);
return targetFileName;
}
}
protected async internalExists(remotePath: string): Promise<boolean> {
const deferred = new Deferred<boolean>();
fs.exists(remotePath, (exists) => {
deferred.resolve(exists);
});
return deferred.promise;
}
protected async internalRead(remotePath: string, offset?: number, length?: number): Promise<string> {
const deferred = new Deferred<string>();
// set a max length to 1MB for performance concern.
const maxLength = 1024 * 1024;
if (offset === undefined) {
offset = -1;
}
const current: number = offset < 0 ? 0 : offset;
if (length === undefined) {
length = -1;
}
const readLength: number = length < 0 ? maxLength : length;
let result: string = "";
const stream = fs.createReadStream(remotePath,
{
encoding: "utf8",
start: current,
end: readLength + current,
}).on("data", (data) => {
result += data;
}).on("end", () => {
stream.close();
deferred.resolve(result);
}).on("error", (err) => {
deferred.reject(err);
});
return deferred.promise;
}
protected async internalList(remotePath: string): Promise<string[]> {
let results: string[] = [];
if (await this.internalExists(remotePath) === true) {
results = await fs.promises.readdir(remotePath);
}
return results;
}
protected async internalAttach(remotePath: string, content: string): Promise<boolean> {
await fs.promises.appendFile(remotePath, content, {
encoding: "utf8",
flag: "a",
});
return true;
}
protected internalIsRelativePath(remotePath: string): boolean {
return !path.isAbsolute(remotePath);
}
protected internalJoin(...paths: string[]): string {
return path.join(...paths);
}
protected internalDirname(remotePath: string): string {
return path.dirname(remotePath);
}
protected internalBasename(remotePath: string): string {
return path.basename(remotePath);
}
}
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
'use strict';
import { TrialJobApplicationForm, TrialJobDetail, TrialJobStatus } from "../../common/trainingService";
import { GPUInfo } from "../../training_service/common/gpuData";
import { EnvironmentInformation, NodeInfomation } from "./environment";
export class TrialDetail implements TrialJobDetail {
public id: string;
public status: TrialJobStatus;
public submitTime: number;
public startTime?: number;
public endTime?: number;
public tags?: string[];
public url?: string;
public workingDirectory: string;
public form: TrialJobApplicationForm;
public isEarlyStopped?: boolean;
public environment?: EnvironmentInformation;
// init settings of trial
public settings = {};
// it's used to aggregate node status for multiple node trial
public nodes: Map<string, NodeInfomation>;
// assigned GPUs for multi-trial scheduled.
public assignedGpus: GPUInfo[] = [];
public readonly TRIAL_METADATA_DIR = ".nni";
constructor(id: string, status: TrialJobStatus, submitTime: number,
workingDirectory: string, form: TrialJobApplicationForm) {
this.id = id;
this.status = status;
this.submitTime = submitTime;
this.workingDirectory = workingDirectory;
this.form = form;
this.tags = [];
this.nodes = new Map<string, NodeInfomation>();
}
}
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
'use strict';
import { EventEmitter } from 'events';
import * as fs from 'fs';
import * as path from 'path';
import { Writable } from 'stream';
import { String } from 'typescript-string-operations';
import * as component from '../../common/component';
import { getExperimentId, getPlatform, getBasePort } from '../../common/experimentStartupInfo';
import { getLogger, Logger } from '../../common/log';
import { NNIManagerIpConfig, TrainingService, TrialJobApplicationForm, TrialJobMetric, TrialJobStatus } from '../../common/trainingService';
import { delay, getExperimentRootDir, getLogLevel, getVersion, mkDirPSync, uniqueString } from '../../common/utils';
import { GPU_INFO, INITIALIZED, KILL_TRIAL_JOB, NEW_TRIAL_JOB, REPORT_METRIC_DATA, SEND_TRIAL_JOB_PARAMETER, STDOUT, TRIAL_END, VERSION_CHECK } from '../../core/commands';
import { GPUSummary } from '../../training_service/common/gpuData';
import { CONTAINER_INSTALL_NNI_SHELL_FORMAT } from '../common/containerJobData';
import { TrialConfig } from '../common/trialConfig';
import { TrialConfigMetadataKey } from '../common/trialConfigMetadataKey';
import { validateCodeDir } from '../common/util';
import { WebCommandChannel } from './channels/webCommandChannel';
import { Command, CommandChannel } from './commandChannel';
import { EnvironmentInformation, EnvironmentService, NodeInfomation, RunnerSettings } from './environment';
import { StorageService } from './storageService';
import { TrialDetail } from './trial';
/**
* It uses to manage jobs on training platforms
* and expose trial as trial job to upper level.
**/
@component.Singleton
class TrialDispatcher implements TrainingService {
private readonly NNI_METRICS_PATTERN: string = `NNISDK_MEb'(?<metrics>.*?)'`;
private readonly log: Logger;
private readonly isDeveloping: boolean = false;
private stopping: boolean = false;
private readonly metricsEmitter: EventEmitter;
private readonly experimentId: string;
private enableVersionCheck: boolean = true;
private trialConfig: TrialConfig | undefined;
private runnerSettings: RunnerSettings;
private commandEmitter: EventEmitter | undefined;
private commandChannel: CommandChannel | undefined;
private readonly trials: Map<string, TrialDetail>;
private readonly environments: Map<string, EnvironmentInformation>;
constructor() {
this.log = getLogger();
this.trials = new Map<string, TrialDetail>();
this.environments = new Map<string, EnvironmentInformation>();
this.metricsEmitter = new EventEmitter();
this.experimentId = getExperimentId();
this.runnerSettings = new RunnerSettings();
this.runnerSettings.experimentId = this.experimentId;
this.runnerSettings.platform = getPlatform();
const logLevel = getLogLevel();
this.log.debug(`current folder ${__dirname}`);
// different source folder in Linux and Windows
if (logLevel == "debug" && (fs.existsSync("../../../src/nni_manager") || __dirname.endsWith("src\\nni_manager\\dist\\training_service\\reusable"))) {
this.log.debug("log level is debug, and exist code folder, so set to developing mode.");
this.isDeveloping = true;
this.runnerSettings.enableGpuCollector = true;
}
}
public async listTrialJobs(): Promise<TrialDetail[]> {
const trials: TrialDetail[] = [];
for (const key of this.trials.keys()) {
trials.push(await this.getTrialJob(key));
}
return trials;
}
public async getTrialJob(trialJobId: string): Promise<TrialDetail> {
const trial: TrialDetail | undefined = this.trials.get(trialJobId);
if (trial === undefined) {
throw new Error(`trial job ${trialJobId} not found`);
}
return trial;
}
public async submitTrialJob(form: TrialJobApplicationForm): Promise<TrialDetail> {
if (this.trialConfig === undefined) {
throw new Error(`trialConfig not initialized!`);
}
const trialId: string = uniqueString(5);
const environmentService = component.get<EnvironmentService>(EnvironmentService);
let trialWorkingFolder: string = "";
if (environmentService.hasStorageService) {
const storageService = component.get<StorageService>(StorageService);
trialWorkingFolder = storageService.joinPath('trials', trialId);
}
const trialJobDetail: TrialDetail = new TrialDetail(trialId, "WAITING", Date.now(), trialWorkingFolder, form);
this.trials.set(trialId, trialJobDetail);
return trialJobDetail;
}
// to support multi phase
public async updateTrialJob(trialJobId: string, form: TrialJobApplicationForm): Promise<TrialDetail> {
const trialDetail = await this.getTrialJob(trialJobId);
const environment = trialDetail.environment;
if (environment === undefined) {
throw new Error(`TrialDispatcher: trial ${trialJobId}'s env shouldn't be undefined in updateTrialJob.`);
}
if (this.commandChannel === undefined) {
throw new Error(`TrialDispatcher: commandChannel shouldn't be undefined in updateTrialJob.`);
}
const message = {
"trialId": trialJobId,
"parameters": form.hyperParameters,
}
await this.commandChannel.sendCommand(environment, SEND_TRIAL_JOB_PARAMETER, message);
return trialDetail;
}
public async cancelTrialJob(trialJobId: string, isEarlyStopped?: boolean | undefined): Promise<void> {
if (this.commandChannel === undefined) {
throw new Error(`TrialDispatcher: commandChannel shouldn't be undefined in cancelTrialJob.`);
}
const trial = await this.getTrialJob(trialJobId);
switch (trial.status) {
case "RUNNING":
case "WAITING":
case "UNKNOWN":
{
const environment = trial.environment;
if (environment) {
await this.commandChannel.sendCommand(environment, KILL_TRIAL_JOB, trial.id);
trial.isEarlyStopped = isEarlyStopped;
trial.status = trial.isEarlyStopped === true ?
'EARLY_STOPPED' : 'USER_CANCELED';
this.releaseEnvironment(trial);
}
}
break;
}
}
public async run(): Promise<void> {
const environmentService = component.get<EnvironmentService>(EnvironmentService);
this.commandEmitter = new EventEmitter();
this.commandChannel = new WebCommandChannel(this.commandEmitter);
// TODO it's a hard code of web channel, it needs to be improved.
this.runnerSettings.nniManagerPort = getBasePort() + 1;
this.runnerSettings.commandChannel = this.commandChannel.channelName;
// for AML channel, other channels can ignore this.
this.commandChannel.config("MetricEmitter", this.metricsEmitter);
// start channel
this.commandEmitter.on("command", (command: Command): void => {
this.handleCommand(command).catch((err: Error) => {
this.log.error(`TrialDispatcher: error on handle env ${command.environment.id} command: ${command.command}, data: ${command.data}, error: ${err}`);
})
});
this.commandChannel.start();
this.log.info(`TrialDispatcher: started channel: ${this.commandChannel.constructor.name}`);
if (this.trialConfig === undefined) {
throw new Error(`trial config shouldn't be undefined in run()`);
}
if (environmentService.hasStorageService) {
this.log.info(`TrialDispatcher: copying code and settings.`);
const storageService = component.get<StorageService>(StorageService);
// Copy the compressed file to remoteDirectory and delete it
const codeDir = path.resolve(this.trialConfig.codeDir);
const envDir = storageService.joinPath("envs");
const codeFileName = await storageService.copyDirectory(codeDir, envDir, true);
storageService.rename(codeFileName, "nni-code.tar.gz");
const installFileName = storageService.joinPath(envDir, 'install_nni.sh');
await storageService.save(CONTAINER_INSTALL_NNI_SHELL_FORMAT, installFileName);
const runnerSettings = storageService.joinPath(envDir, "settings.json");
await storageService.save(JSON.stringify(this.runnerSettings), runnerSettings);
if (this.isDeveloping) {
let trialToolsPath = path.join(__dirname, "../../../../../tools/nni_trial_tool");
if (false === fs.existsSync(trialToolsPath)) {
trialToolsPath = path.join(__dirname, "..\\..\\..\\..\\..\\tools\\nni_trial_tool");
}
await storageService.copyDirectory(trialToolsPath, envDir, true);
}
}
this.log.info(`TrialDispatcher: run loop started.`);
await Promise.all([
this.environmentMaintenanceLoop(),
this.trialManagementLoop(),
]);
}
public addTrialJobMetricListener(listener: (metric: TrialJobMetric) => void): void {
this.metricsEmitter.on('metric', listener);
}
public removeTrialJobMetricListener(listener: (metric: TrialJobMetric) => void): void {
this.metricsEmitter.off('metric', listener);
}
public get isMultiPhaseJobSupported(): boolean {
return true;
}
public async setClusterMetadata(key: string, value: string): Promise<void> {
switch (key) {
case TrialConfigMetadataKey.NNI_MANAGER_IP:
this.runnerSettings.nniManagerIP = (<NNIManagerIpConfig>JSON.parse(value)).nniManagerIp;
break;
case TrialConfigMetadataKey.VERSION_CHECK:
this.enableVersionCheck = (value === 'true' || value === 'True');
this.runnerSettings.nniManagerVersion = this.enableVersionCheck ? await getVersion() : '';
break;
case TrialConfigMetadataKey.LOG_COLLECTION:
this.runnerSettings.logCollection = value;
break;
case TrialConfigMetadataKey.TRIAL_CONFIG:
// TODO to support more storage types by better parameters.
this.trialConfig = <TrialConfig>JSON.parse(value);
this.runnerSettings.command = this.trialConfig.command;
// Validate to make sure codeDir doesn't have too many files
await validateCodeDir(this.trialConfig.codeDir);
break;
}
const environmentService = component.get<EnvironmentService>(EnvironmentService);
await environmentService.config(key, value);
}
public getClusterMetadata(_key: string): Promise<string> {
throw new Error('Not implemented!');
}
public async cleanUp(): Promise<void> {
if (this.commandChannel === undefined) {
throw new Error(`TrialDispatcher: commandChannel shouldn't be undefined in cleanUp.`);
}
if (this.commandEmitter === undefined) {
throw new Error(`TrialDispatcher: commandEmitter shouldn't be undefined in cleanUp.`);
}
this.stopping = true;
const environmentService = component.get<EnvironmentService>(EnvironmentService);
const environments = [...this.environments.values()];
for (let index = 0; index < environments.length; index++) {
const environment = environments[index];
if (environment.isAlive === true) {
this.log.info(`stopping environment ${environment.id}...`);
await environmentService.stopEnvironment(environment);
await this.commandChannel.close(environment);
this.log.info(`stopped environment ${environment.id}.`);
}
}
this.commandEmitter.off("command", this.handleCommand);
this.commandChannel.stop();
}
private async environmentMaintenanceLoop(): Promise<void> {
if (this.commandChannel === undefined) {
throw new Error(`TrialDispatcher: commandChannel shouldn't be undefined in environmentMaintenanceLoop.`);
}
const environmentService = component.get<EnvironmentService>(EnvironmentService);
while (!this.stopping) {
const environments: EnvironmentInformation[] = [];
for (const environment of this.environments.values()) {
if (environment.isAlive === true) {
environments.push(environment);
} else {
await this.commandChannel.close(environment);
}
}
await environmentService.refreshEnvironmentsStatus(environments);
environments.forEach((environment) => {
const oldIsAlive = environment.isAlive;
switch (environment.status) {
case 'WAITING':
case 'RUNNING':
case 'UNKNOWN':
environment.isAlive = true;
break;
default:
environment.isAlive = false;
break;
}
if (oldIsAlive !== environment.isAlive) {
this.log.debug(`set environment ${environment.id} isAlive from ${oldIsAlive} to ${environment.isAlive} due to status is ${environment.status}.`);
}
});
await delay(5000);
}
}
private async trialManagementLoop(): Promise<void> {
if (this.commandChannel === undefined) {
throw new Error(`TrialDispatcher: commandChannel shouldn't be undefined in trialManagementLoop.`);
}
while (!this.stopping) {
await delay(2000);
const toRefreshedTrials: TrialDetail[] = [];
for (const trial of this.trials.values()) {
if (trial.status === "RUNNING" || trial.status === "WAITING" || trial.status === "UNKNOWN") {
toRefreshedTrials.push(trial);
}
}
if (toRefreshedTrials.length == 0) {
continue;
}
const waitingTrials: TrialDetail[] = [];
let liveTrialsCount = 0;
for (const trial of toRefreshedTrials) {
const currentStatus = trial.status;
switch (currentStatus) {
case "RUNNING":
{
const environment = trial.environment;
if (environment === undefined) {
this.log.error(`found running trial ${trial.id} has no environment, set trial to UNKNOWN.`);
trial.status = "UNKNOWN";
liveTrialsCount++;
continue;
}
const environmentStatus = environment.status;
// any node exit, then make sure the whole trial stopped.
if (trial.nodes.size > 0) {
const completedCount = trial.nodes.size;
let finalStatus: TrialJobStatus = "SUCCEEDED";
let lastTimestamp: number | undefined;
this.log.debug(`found ${completedCount} completed trial node(s), nodeCount: ${environment.nodeCount}`);
// if some trial processes doesn't exit, kill it for next one.
// for example, in horovod, it's just sleep command, has no impact on trial result.
if (environment.nodeCount > completedCount) {
this.log.info(`stop partial completed trial ${trial.id}`);
await this.commandChannel.sendCommand(environment, KILL_TRIAL_JOB, trial.id);
}
for (const node of trial.nodes.values()) {
if (node.status === "FAILED") {
finalStatus = "FAILED";
}
if (node.endTime !== undefined) {
if (lastTimestamp === undefined) {
lastTimestamp = node.endTime
} else {
lastTimestamp = Math.max(node.endTime, lastTimestamp);
}
}
}
trial.status = finalStatus;
if (lastTimestamp === undefined) {
trial.endTime = lastTimestamp;
}
this.releaseEnvironment(trial);
} else if (environmentStatus !== "RUNNING") {
this.log.error(`found running trial ${trial.id} on '${environment.jobId}' with '${environmentStatus}', set trial to environment status.`);
this.releaseEnvironment(trial);
trial.status = environmentStatus;
} else {
liveTrialsCount++;
}
}
break;
case "WAITING":
case "UNKNOWN":
// deal it later, if there is free environment.
waitingTrials.push(trial);
liveTrialsCount++;
break;
}
}
let liveEnvironmentsCount = 0;
const idleEnvironments: EnvironmentInformation[] = [];
this.environments.forEach((environment) => {
if (environment.isAlive === true) {
liveEnvironmentsCount++;
if (environment.status === "RUNNING" && environment.isIdle) {
idleEnvironments.push(environment);
}
}
});
while (idleEnvironments.length > 0 && waitingTrials.length > 0) {
const trial = waitingTrials.shift();
const idleEnvironment = idleEnvironments.shift();
if (trial !== undefined && idleEnvironment != undefined) {
await this.assignEnvironment(trial, idleEnvironment);
}
}
if (liveEnvironmentsCount < liveTrialsCount) {
this.log.info(`request new environment, since live trials ${liveTrialsCount} ` +
`is more than live environments ${liveEnvironmentsCount}`);
for (let index = 0; index < liveTrialsCount - liveEnvironmentsCount; index++) {
await this.requestEnvironment();
}
}
}
}
private async requestEnvironment(): Promise<void> {
if (this.commandChannel === undefined) {
throw new Error(`TrialDispatcher: commandChannel shouldn't be undefined in requestEnvironment.`);
}
const environmentService = component.get<EnvironmentService>(EnvironmentService);
const envId = uniqueString(5);
const envName = `nni_exp_${this.experimentId}_env_${envId}`;
const environment = environmentService.createEnviornmentInfomation(envId, envName);
environment.command = `sh ../install_nni.sh && python3 -m nni_trial_tool.trial_runner`;
if (this.isDeveloping) {
environment.command = "[ -d \"nni_trial_tool\" ] && echo \"nni_trial_tool exists already\" || (mkdir ./nni_trial_tool && tar -xof ../nni_trial_tool.tar.gz -C ./nni_trial_tool) && pip3 install websockets && " + environment.command;
}
if (environmentService.hasStorageService) {
const storageService = component.get<StorageService>(StorageService);
environment.workingFolder = storageService.joinPath("envs", envId);
await storageService.createDirectory(environment.workingFolder);
}
this.environments.set(environment.id, environment);
await environmentService.startEnvironment(environment);
if (environment.status === "FAILED") {
environment.isIdle = false;
environment.isAlive = false;
throw new Error(`error on request environment ${environment.jobId}, please check log for more details.`);
} else {
environment.isIdle = true;
environment.isAlive = true;
}
await this.commandChannel.open(environment);
this.log.info(`requested environment ${environment.id} and job id is ${environment.jobId}.`);
}
private async assignEnvironment(trial: TrialDetail, environment: EnvironmentInformation): Promise<void> {
if (this.commandChannel === undefined) {
throw new Error(`TrialDispatcher: commandChannel shouldn't be undefined in assignEnvironment.`);
}
if (trial.environment) {
throw new Error(`trial ${trial.id} has assigned environment ${trial.environment.id} already, not assign to ${environment.id}!`);
}
if (environment.isIdle == false) {
throw new Error(`environment ${environment.id} is not idle, and cannot be assigned again!`);
}
this.log.info(`assigning environment ${environment.id} to trial ${trial.id}.`);
environment.isIdle = false;
trial.environment = environment;
trial.settings = {
trialId: trial.id,
sequenceId: trial.form.sequenceId,
parameter: trial.form.hyperParameters,
}
trial.startTime = Date.now();
trial.status = "RUNNING";
await this.commandChannel.sendCommand(trial.environment, NEW_TRIAL_JOB, trial.settings);
}
private releaseEnvironment(trial: TrialDetail): void {
if (!trial.environment) {
throw new Error(`environment is not assigned to trial ${trial.id}, and cannot be released!`);
}
if (trial.environment.isIdle) {
throw new Error(`environment ${trial.environment.id} is idle already!`);
}
trial.environment.isIdle = true;
trial.environment = undefined;
}
private async handleMetricData(trialId: string, data: any): Promise<void> {
if (Array.isArray(data)) {
for (const subItem of data) {
this.metricsEmitter.emit('metric', {
id: trialId,
data: subItem
});
}
} else {
this.metricsEmitter.emit('metric', {
id: trialId,
data: data
});
}
}
private async handleStdout(commandData: any): Promise<void> {
const trialLogDir: string = path.join(getExperimentRootDir(), 'trials', commandData["trial"]);
mkDirPSync(trialLogDir);
const trialLogPath: string = path.join(trialLogDir, 'stdout_log_collection.log');
try {
let skipLogging: boolean = false;
if (commandData["tag"] === 'trial' && commandData["msg"] !== undefined) {
const message = commandData["msg"];
const metricsContent: any = message.match(this.NNI_METRICS_PATTERN);
if (metricsContent && metricsContent.groups) {
const key: string = 'metrics';
const data = metricsContent.groups[key];
const metricData = JSON.parse('"' + data.split('"').join('\\"') + '"');
await this.handleMetricData(commandData["trial"], metricData);
skipLogging = true;
}
}
if (!skipLogging) {
// Construct write stream to write remote trial's log into local file
const writeStream: Writable = fs.createWriteStream(trialLogPath, {
flags: 'a+',
encoding: 'utf8',
autoClose: true
});
writeStream.write(String.Format('{0}\n', commandData["msg"]));
writeStream.end();
}
} catch (err) {
this.log.error(`TrialDispatcher: handleStdout error: ${err}`);
}
}
private async handleCommand(command: Command): Promise<void> {
this.log.debug(`TrialDispatcher: env ${command.environment.id} received command ${command.command}, data: ${command.data}`);
const environment = command.environment;
const data = command.data;
const nodeId = data["node"];
switch (command.command) {
case REPORT_METRIC_DATA:
this.log.error(`TrialDispatcher: TODO: not implement to handle direct REPORT_METRIC_DATA command yet.`);
break;
case STDOUT:
await this.handleStdout(data);
break;
case INITIALIZED:
{
const oldStatus = environment.status;
let isAllReady = true;
if (environment.nodeCount > 1) {
let node = environment.nodes.get(nodeId);
if (node === undefined) {
node = new NodeInfomation(nodeId);
environment.nodes.set(nodeId, node);
}
const oldNodeStatus = node.status;
if (oldNodeStatus === "UNKNOWN" || oldNodeStatus === "WAITING") {
node.status = "RUNNING";
}
if (environment.nodes.size === environment.nodeCount) {
for (const node of environment.nodes.values()) {
if (node.status !== "RUNNING") {
isAllReady = false;
break;
}
}
} else {
isAllReady = false;
}
}
// single node is always ready to set env status
if (isAllReady && oldStatus === "UNKNOWN") {
environment.status = "RUNNING";
this.log.info(`TrialDispatcher: env ${environment.id} received initialized message, old status: ${oldStatus}, new status: ${environment.status}.`);
}
}
break;
case VERSION_CHECK:
{
if (this.enableVersionCheck) {
const checkResultSuccess: boolean = data["tag"] === 'VCSuccess' ? true : false;
if (checkResultSuccess) {
this.log.info(`TrialDispatcher: Version check in trialKeeper success!`);
} else {
const errorMessage = `TrialDispatcher: Version check error, ${data["msg"]}!`;
this.log.error(errorMessage);
}
}
}
break;
case GPU_INFO:
environment.gpuSummary.set(nodeId, <GPUSummary>(data));
break;
case TRIAL_END:
{
const trialId = data["trial"];
const trial = await this.getTrialJob(trialId);
const code = parseInt(data["code"]);
const timestamp = parseInt(data["time"]);
let exitStatus: TrialJobStatus = "SUCCEEDED";
if (code !== 0) {
exitStatus = "FAILED";
}
let node = environment.nodes.get(nodeId);
if (node === undefined) {
node = new NodeInfomation(nodeId);
trial.nodes.set(nodeId, node);
}
if (undefined === node) {
throw new Error("node is impossible to be undefined (see above code), but make eslint happy!");
}
node.status = exitStatus;
node.endTime = timestamp;
}
break;
}
}
}
export { TrialDispatcher };
......@@ -11,14 +11,12 @@
"@babel/code-frame@^7.8.3":
version "7.8.3"
resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.8.3.tgz#33e25903d7481181534e12ec0a25f16b6fcf419e"
integrity sha512-a9gxpmdXtZEInkCSHUJDLHZVBgb1QS0jhss4cPP93EW7s+uC5bikET2twEF3KV+7rDblJcmNvTR7VJejqd2C2g==
dependencies:
"@babel/highlight" "^7.8.3"
"@babel/core@^7.7.5":
version "7.9.0"
resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.9.0.tgz#ac977b538b77e132ff706f3b8a4dbad09c03c56e"
integrity sha512-kWc7L0fw1xwvI0zi8OKVBuxRVefwGOrKSQMvrQ3dW+bIIavBY3/NpXmpjMy7bQnLgwgzWQZ8TlM57YHpHNHz4w==
dependencies:
"@babel/code-frame" "^7.8.3"
"@babel/generator" "^7.9.0"
......@@ -40,7 +38,6 @@
"@babel/generator@^7.9.0":
version "7.9.4"
resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.9.4.tgz#12441e90c3b3c4159cdecf312075bf1a8ce2dbce"
integrity sha512-rjP8ahaDy/ouhrvCoU1E5mqaitWrxwuNGU+dy1EpaoK48jZay4MdkskKGIMHLZNewg8sAsqpGSREJwP0zH3YQA==
dependencies:
"@babel/types" "^7.9.0"
jsesc "^2.5.1"
......@@ -50,7 +47,6 @@
"@babel/helper-function-name@^7.8.3":
version "7.8.3"
resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.8.3.tgz#eeeb665a01b1f11068e9fb86ad56a1cb1a824cca"
integrity sha512-BCxgX1BC2hD/oBlIFUgOCQDOPV8nSINxCwM3o93xP4P9Fq6aV5sgv2cOOITDMtCfQ+3PvHp3l689XZvAM9QyOA==
dependencies:
"@babel/helper-get-function-arity" "^7.8.3"
"@babel/template" "^7.8.3"
......@@ -59,28 +55,24 @@
"@babel/helper-get-function-arity@^7.8.3":
version "7.8.3"
resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.8.3.tgz#b894b947bd004381ce63ea1db9f08547e920abd5"
integrity sha512-FVDR+Gd9iLjUMY1fzE2SR0IuaJToR4RkCDARVfsBBPSP53GEqSFjD8gNyxg246VUyc/ALRxFaAK8rVG7UT7xRA==
dependencies:
"@babel/types" "^7.8.3"
"@babel/helper-member-expression-to-functions@^7.8.3":
version "7.8.3"
resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.8.3.tgz#659b710498ea6c1d9907e0c73f206eee7dadc24c"
integrity sha512-fO4Egq88utkQFjbPrSHGmGLFqmrshs11d46WI+WZDESt7Wu7wN2G2Iu+NMMZJFDOVRHAMIkB5SNh30NtwCA7RA==
dependencies:
"@babel/types" "^7.8.3"
"@babel/helper-module-imports@^7.8.3":
version "7.8.3"
resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.8.3.tgz#7fe39589b39c016331b6b8c3f441e8f0b1419498"
integrity sha512-R0Bx3jippsbAEtzkpZ/6FIiuzOURPcMjHp+Z6xPe6DtApDJx+w7UYyOLanZqO8+wKR9G10s/FmHXvxaMd9s6Kg==
dependencies:
"@babel/types" "^7.8.3"
"@babel/helper-module-transforms@^7.9.0":
version "7.9.0"
resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.9.0.tgz#43b34dfe15961918707d247327431388e9fe96e5"
integrity sha512-0FvKyu0gpPfIQ8EkxlrAydOWROdHpBmiCiRwLkUiBGhCUPRRbVD2/tm3sFr/c/GWFrQ/ffutGUAnx7V0FzT2wA==
dependencies:
"@babel/helper-module-imports" "^7.8.3"
"@babel/helper-replace-supers" "^7.8.6"
......@@ -93,14 +85,12 @@
"@babel/helper-optimise-call-expression@^7.8.3":
version "7.8.3"
resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.8.3.tgz#7ed071813d09c75298ef4f208956006b6111ecb9"
integrity sha512-Kag20n86cbO2AvHca6EJsvqAd82gc6VMGule4HwebwMlwkpXuVqrNRj6CkCV2sKxgi9MyAUnZVnZ6lJ1/vKhHQ==
dependencies:
"@babel/types" "^7.8.3"
"@babel/helper-replace-supers@^7.8.6":
version "7.8.6"
resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.8.6.tgz#5ada744fd5ad73203bf1d67459a27dcba67effc8"
integrity sha512-PeMArdA4Sv/Wf4zXwBKPqVj7n9UF/xg6slNRtZW84FM7JpE1CbG8B612FyM4cxrf4fMAMGO0kR7voy1ForHHFA==
dependencies:
"@babel/helper-member-expression-to-functions" "^7.8.3"
"@babel/helper-optimise-call-expression" "^7.8.3"
......@@ -110,7 +100,6 @@
"@babel/helper-simple-access@^7.8.3":
version "7.8.3"
resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.8.3.tgz#7f8109928b4dab4654076986af575231deb639ae"
integrity sha512-VNGUDjx5cCWg4vvCTR8qQ7YJYZ+HBjxOgXEl7ounz+4Sn7+LMD3CFrCTEU6/qXKbA2nKg21CwhhBzO0RpRbdCw==
dependencies:
"@babel/template" "^7.8.3"
"@babel/types" "^7.8.3"
......@@ -118,19 +107,16 @@
"@babel/helper-split-export-declaration@^7.8.3":
version "7.8.3"
resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.8.3.tgz#31a9f30070f91368a7182cf05f831781065fc7a9"
integrity sha512-3x3yOeyBhW851hroze7ElzdkeRXQYQbFIb7gLK1WQYsw2GWDay5gAJNw1sWJ0VFP6z5J1whqeXH/WCdCjZv6dA==
dependencies:
"@babel/types" "^7.8.3"
"@babel/helper-validator-identifier@^7.9.0":
version "7.9.0"
resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.9.0.tgz#ad53562a7fc29b3b9a91bbf7d10397fd146346ed"
integrity sha512-6G8bQKjOh+of4PV/ThDm/rRqlU7+IGoJuofpagU5GlEl29Vv0RGqqt86ZGRV8ZuSOY3o+8yXl5y782SMcG7SHw==
"@babel/helpers@^7.9.0":
version "7.9.2"
resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.9.2.tgz#b42a81a811f1e7313b88cba8adc66b3d9ae6c09f"
integrity sha512-JwLvzlXVPjO8eU9c/wF9/zOIN7X6h8DYf7mG4CiFRZRvZNKEF5dQ3H3V+ASkHoIB3mWhatgl5ONhyqHRI6MppA==
dependencies:
"@babel/template" "^7.8.3"
"@babel/traverse" "^7.9.0"
......@@ -147,7 +133,6 @@
"@babel/highlight@^7.8.3":
version "7.9.0"
resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.9.0.tgz#4e9b45ccb82b79607271b2979ad82c7b68163079"
integrity sha512-lJZPilxX7Op3Nv/2cvFdnlepPXDxi29wxteT57Q965oc5R9v86ztx0jfxVrTcBk8C2kcPkkDa2Z4T3ZsPPVWsQ==
dependencies:
"@babel/helper-validator-identifier" "^7.9.0"
chalk "^2.0.0"
......@@ -156,12 +141,10 @@
"@babel/parser@^7.7.5", "@babel/parser@^7.8.6", "@babel/parser@^7.9.0":
version "7.9.4"
resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.9.4.tgz#68a35e6b0319bbc014465be43828300113f2f2e8"
integrity sha512-bC49otXX6N0/VYhgOMh4gnP26E9xnDZK3TmbNpxYzzz9BQLBosQwfyOe9/cXUU3txYhTzLCbcqd5c8y/OmCjHA==
"@babel/template@^7.7.4", "@babel/template@^7.8.3", "@babel/template@^7.8.6":
version "7.8.6"
resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.8.6.tgz#86b22af15f828dfb086474f964dcc3e39c43ce2b"
integrity sha512-zbMsPMy/v0PWFZEhQJ66bqjhH+z0JgMoBWuikXybgG3Gkd/3t5oQ1Rw2WQhnSrsOmsKXnZOx15tkC4qON/+JPg==
dependencies:
"@babel/code-frame" "^7.8.3"
"@babel/parser" "^7.8.6"
......@@ -170,7 +153,6 @@
"@babel/traverse@^7.7.4", "@babel/traverse@^7.8.6", "@babel/traverse@^7.9.0":
version "7.9.0"
resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.9.0.tgz#d3882c2830e513f4fe4cec9fe76ea1cc78747892"
integrity sha512-jAZQj0+kn4WTHO5dUZkZKhbFrqZE7K5LAQ5JysMnmvGij+wOdr+8lWqPeW0BcF4wFwrEXXtdGO7wcV6YPJcf3w==
dependencies:
"@babel/code-frame" "^7.8.3"
"@babel/generator" "^7.9.0"
......@@ -185,7 +167,6 @@
"@babel/types@^7.8.3", "@babel/types@^7.8.6", "@babel/types@^7.9.0":
version "7.9.0"
resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.9.0.tgz#00b064c3df83ad32b2dbf5ff07312b15c7f1efb5"
integrity sha512-BS9JKfXkzzJl8RluW4JGknzpiUV7ZrvTayM6yfqLTVBEnFtyowVIOu6rqxRd5cVO6yGoWf4T8u8dgK9oB+GCng==
dependencies:
"@babel/helper-validator-identifier" "^7.9.0"
lodash "^4.17.13"
......@@ -194,7 +175,6 @@
"@istanbuljs/load-nyc-config@^1.0.0":
version "1.0.0"
resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.0.0.tgz#10602de5570baea82f8afbfa2630b24e7a8cfe5b"
integrity sha512-ZR0rq/f/E4f4XcgnDvtMWXCUJpi8eO0rssVhmztsZqLIEFA9UUP9zmpE0VxlM+kv/E1ul2I876Fwil2ayptDVg==
dependencies:
camelcase "^5.3.1"
find-up "^4.1.0"
......@@ -204,7 +184,6 @@
"@istanbuljs/schema@^0.1.2":
version "0.1.2"
resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.2.tgz#26520bf09abe4a5644cd5414e37125a8954241dd"
integrity sha512-tsAQNx32a8CoFhjhijUIhI4kccIAgmGhy8LZMZgGfmXcpMbPRUqn5LWmgRttILi6yeGmBJd2xsPkFMs0PzgPCw==
"@sindresorhus/is@^0.7.0":
version "0.7.0"
......@@ -234,7 +213,6 @@
"@types/color-name@^1.1.1":
version "1.1.1"
resolved "https://registry.yarnpkg.com/@types/color-name/-/color-name-1.1.1.tgz#1c1261bbeaa10a8055bbc5d8ab84b7b2afc846a0"
integrity sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==
"@types/connect@*":
version "3.4.32"
......@@ -465,6 +443,12 @@
version "2.3.3"
resolved "https://registry.yarnpkg.com/@types/tough-cookie/-/tough-cookie-2.3.3.tgz#7f226d67d654ec9070e755f46daebf014628e9d9"
"@types/ws@^7.2.5":
version "7.2.5"
resolved "https://registry.yarnpkg.com/@types/ws/-/ws-7.2.5.tgz#513f28b04a1ea1aa9dc2cad3f26e8e37c88aae49"
dependencies:
"@types/node" "*"
"@typescript-eslint/eslint-plugin@^2.10.0":
version "2.10.0"
resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-2.10.0.tgz#c4cb103275e555e8a7e9b3d14c5951eb6d431e70"
......@@ -529,7 +513,6 @@ acorn-jsx@^5.1.0:
acorn@>=7.1.1, acorn@^7.1.0:
version "7.1.1"
resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.1.1.tgz#e35668de0b402f359de515c5482a1ab9f89a69bf"
integrity sha512-add7dgA5ppRPxCFJoAGfMDi7PIBXq1RtGo7BhbLaxwrXPOmw8gq48Y9ozT01hUKy9byMjlR20EJhu5zlkErEkg==
agent-base@4, agent-base@^4.3.0:
version "4.3.0"
......@@ -559,7 +542,6 @@ aggregate-error@^1.0.0:
aggregate-error@^3.0.0:
version "3.0.1"
resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.0.1.tgz#db2fe7246e536f40d9b5442a39e117d7dd6a24e0"
integrity sha512-quoaXsZ9/BLNae5yiNoUz+Nhkwz83GhWwtYFglcjEQB2NDHCIpApbqXxIFnm4Pq/Nvhrsq5sYJFyohrrxnTGAA==
dependencies:
clean-stack "^2.0.0"
indent-string "^4.0.0"
......@@ -591,7 +573,6 @@ ansi-align@^2.0.0:
ansi-colors@3.2.3:
version "3.2.3"
resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-3.2.3.tgz#57d35b8686e851e2cc04c403f1c00203976a1813"
integrity sha512-LEHHyuhlPY3TmuUYMh2oz89lTShfvgbmzaBcxve9t/9Wuy7Dwf4yoAKcND7KFT1HAQfqZ12qtc+DUrBMeKF9nw==
ansi-escapes@^4.2.1:
version "4.3.0"
......@@ -624,7 +605,6 @@ ansi-styles@^3.2.0, ansi-styles@^3.2.1:
ansi-styles@^4.0.0:
version "4.2.1"
resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.2.1.tgz#90ae75c424d008d2624c5bf29ead3177ebfcf359"
integrity sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==
dependencies:
"@types/color-name" "^1.1.1"
color-convert "^2.0.1"
......@@ -640,7 +620,6 @@ ansistyles@~0.1.3:
anymatch@~3.1.1:
version "3.1.1"
resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.1.tgz#c55ecf02185e2469259399310c173ce31233b142"
integrity sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==
dependencies:
normalize-path "^3.0.0"
picomatch "^2.0.4"
......@@ -648,7 +627,6 @@ anymatch@~3.1.1:
append-transform@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/append-transform/-/append-transform-2.0.0.tgz#99d9d29c7b38391e6f428d28ce136551f0b77e12"
integrity sha512-7yeyCEurROLQJFv5Xj4lEGTy0borxepjFv1g22oAdqFu//SrAlDl1O1Nxx15SH1RoliUml6p8dwJW9jvZughhg==
dependencies:
default-require-extensions "^3.0.0"
......@@ -779,7 +757,6 @@ bin-links@^1.1.2, bin-links@^1.1.6:
binary-extensions@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.0.0.tgz#23c0df14f6a88077f5f986c0d167ec03c3d5537c"
integrity sha512-Phlt0plgpIIBOGTT/ehfFnbNlfsDEiqmzE2KRXoX1bLIlir4X/MR+zSyBEkL05ffWgnRSf/DXv+WrUAVr93/ow==
bluebird@^3.5.1, bluebird@^3.5.3, bluebird@^3.5.5:
version "3.7.2"
......@@ -828,7 +805,6 @@ brace-expansion@^1.1.7:
braces@~3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107"
integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==
dependencies:
fill-range "^7.0.1"
......@@ -899,7 +875,6 @@ cacheable-request@^2.1.1:
caching-transform@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/caching-transform/-/caching-transform-4.0.0.tgz#00d297a4206d71e2163c39eaffa8157ac0651f0f"
integrity sha512-kpqOvwXnjjN44D89K5ccQC+RUrsy7jB/XLlRrx0D7/2HNcTPqzsb6XgYoErwko6QsV184CA2YgS1fxDiiDZMWA==
dependencies:
hasha "^5.0.0"
make-dir "^3.0.0"
......@@ -925,7 +900,6 @@ camelcase@^4.0.0, camelcase@^4.1.0:
camelcase@^5.0.0, camelcase@^5.3.1:
version "5.3.1"
resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320"
integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==
capture-stack-trace@^1.0.0:
version "1.0.1"
......@@ -987,7 +961,6 @@ child-process-promise@^2.2.1:
chokidar@3.3.0:
version "3.3.0"
resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.3.0.tgz#12c0714668c55800f659e262d4962a97faf554a6"
integrity sha512-dGmKLDdT3Gdl7fBUe8XK+gAtGmzy5Fn0XkkWQuYxGIgWVPPse2CxFA5mtrlD0TOHaHjEUqkWNyP1XdHoJES/4A==
dependencies:
anymatch "~3.1.1"
braces "~3.0.2"
......@@ -1033,7 +1006,6 @@ clean-stack@^1.0.0:
clean-stack@^2.0.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b"
integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==
cli-boxes@^1.0.0:
version "1.0.0"
......@@ -1076,7 +1048,6 @@ cliui@^4.0.0:
cliui@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/cliui/-/cliui-5.0.0.tgz#deefcfdb2e800784aa34f46fa08e06851c7bbbc5"
integrity sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==
dependencies:
string-width "^3.1.0"
strip-ansi "^5.2.0"
......@@ -1085,7 +1056,6 @@ cliui@^5.0.0:
cliui@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1"
integrity sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==
dependencies:
string-width "^4.2.0"
strip-ansi "^6.0.0"
......@@ -1125,7 +1095,6 @@ color-convert@^1.9.0:
color-convert@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3"
integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==
dependencies:
color-name "~1.1.4"
......@@ -1136,7 +1105,6 @@ color-name@1.1.1:
color-name@~1.1.4:
version "1.1.4"
resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2"
integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==
colors@^1.1.2:
version "1.4.0"
......@@ -1211,7 +1179,6 @@ content-type@~1.0.4:
convert-source-map@^1.7.0:
version "1.7.0"
resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442"
integrity sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA==
dependencies:
safe-buffer "~5.1.1"
......@@ -1272,7 +1239,6 @@ cross-spawn@^6.0.0, cross-spawn@^6.0.5:
cross-spawn@^7.0.0:
version "7.0.1"
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.1.tgz#0ab56286e0f7c24e153d04cc2aa027e43a9a5d14"
integrity sha512-u7v4o84SwFpD32Z8IIcPZ6z1/ie24O6RU3RbtL5Y316l3KuHVPx9ItBgWQ6VlfAFnRnTtMUrsQ9MUUTuEZjogg==
dependencies:
path-key "^3.1.0"
shebang-command "^2.0.0"
......@@ -1355,7 +1321,6 @@ deepmerge@^2.1.1:
default-require-extensions@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/default-require-extensions/-/default-require-extensions-3.0.0.tgz#e03f93aac9b2b6443fc52e5e4a37b3ad9ad8df96"
integrity sha512-ek6DpXq/SCpvjhpFsLFRVtIxJCRw6fUR42lYMVZuUMK7n8eMz4Uh5clckdBjEpLhn/gEBZo7hDJnJcwdKLKQjg==
dependencies:
strip-bom "^4.0.0"
......@@ -1766,7 +1731,6 @@ file-entry-cache@^5.0.1:
fill-range@^7.0.1:
version "7.0.1"
resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40"
integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==
dependencies:
to-regex-range "^5.0.1"
......@@ -1785,7 +1749,6 @@ finalhandler@1.1.1:
find-cache-dir@^3.2.0:
version "3.3.1"
resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-3.3.1.tgz#89b33fad4a4670daa94f855f7fbe31d6d84fe880"
integrity sha512-t2GDMt3oGC/v+BMwzmllWDuJF/xcDtE5j/fCGbqDD7OLuJkj0cfh1YSA5VKPvwMeLFLNDBkwOKZ2X85jGLVftQ==
dependencies:
commondir "^1.0.1"
make-dir "^3.0.2"
......@@ -1810,7 +1773,6 @@ find-up@^2.1.0:
find-up@^4.0.0, find-up@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19"
integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==
dependencies:
locate-path "^5.0.0"
path-exists "^4.0.0"
......@@ -1826,7 +1788,6 @@ flat-cache@^2.0.1:
flat@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/flat/-/flat-4.1.0.tgz#090bec8b05e39cba309747f1d588f04dbaf98db2"
integrity sha512-Px/TiLIznH7gEDlPXcUD4KnBusa6kR6ayRUVcnEAbreRIuhkqow/mun59BuRXwoYk7ZQOLW1ZM05ilIvK38hFw==
dependencies:
is-buffer "~2.0.3"
......@@ -1850,7 +1811,6 @@ flush-write-stream@^1.0.0:
foreground-child@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/foreground-child/-/foreground-child-2.0.0.tgz#71b32800c9f15aa8f2f83f4a6bd9bff35d861a53"
integrity sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==
dependencies:
cross-spawn "^7.0.0"
signal-exit "^3.0.2"
......@@ -1892,7 +1852,6 @@ from2@^2.1.0, from2@^2.1.1:
fromentries@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/fromentries/-/fromentries-1.2.0.tgz#e6aa06f240d6267f913cea422075ef88b63e7897"
integrity sha512-33X7H/wdfO99GdRLLgkjUrD4geAFdq/Uv0kl3HD4da6HDixd2GUg8Mw7dahLCV9r/EARkmtYBB6Tch4EEokFTQ==
fs-minipass@^1.2.5:
version "1.2.5"
......@@ -1931,7 +1890,6 @@ fs.realpath@^1.0.0:
fsevents@~2.1.1:
version "2.1.2"
resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.1.2.tgz#4c0a1fb34bc68e543b4b82a9ec392bfbda840805"
integrity sha512-R4wDiBwZ0KzpgOWetKDug1FZcYhqYnUYKtfZYt4mD5SBz76q0KR4Q9o7GIPamsVPGmW3EYPPJ0dOOjvx32ldZA==
function-bind@^1.1.1:
version "1.1.1"
......@@ -1961,7 +1919,6 @@ genfun@^5.0.0:
gensync@^1.0.0-beta.1:
version "1.0.0-beta.1"
resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.1.tgz#58f4361ff987e5ff6e1e7a210827aa371eaac269"
integrity sha512-r8EC6NO1sngH/zdD9fiRDLdcgnbayXah+mLgManTaIZJqEC1MZstmnox8KpnI2/fxQwrp5OpCOYWLp4rBl4Jcg==
gentle-fs@^2.3.0:
version "2.3.0"
......@@ -1986,7 +1943,6 @@ get-caller-file@^1.0.1:
get-caller-file@^2.0.1:
version "2.0.5"
resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e"
integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==
get-func-name@^2.0.0:
version "2.0.0"
......@@ -2024,7 +1980,6 @@ glob-parent@^5.0.0:
glob-parent@~5.1.0:
version "5.1.1"
resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.1.tgz#b6c1ef417c4e5663ea498f1c45afac6916bbc229"
integrity sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ==
dependencies:
is-glob "^4.0.1"
......@@ -2162,7 +2117,6 @@ has-flag@^3.0.0:
has-flag@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b"
integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==
has-symbol-support-x@^1.4.1:
version "1.4.2"
......@@ -2198,7 +2152,6 @@ hash-base@^3.0.0:
hasha@^5.0.0:
version "5.2.0"
resolved "https://registry.yarnpkg.com/hasha/-/hasha-5.2.0.tgz#33094d1f69c40a4a6ac7be53d5fe3ff95a269e0c"
integrity sha512-2W+jKdQbAdSIrggA8Q35Br8qKadTrqCTC8+XZvBWepKDK6m9XkX6Iz1a2yh2KP01kzAR/dpuMeUnocoLYDcskw==
dependencies:
is-stream "^2.0.0"
type-fest "^0.8.0"
......@@ -2206,7 +2159,6 @@ hasha@^5.0.0:
he@1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f"
integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==
hoek@2.x.x, hoek@^4.2.1:
version "4.2.1"
......@@ -2223,7 +2175,6 @@ hosted-git-info@^2.7.1, hosted-git-info@^2.8.5:
html-escaper@^2.0.0:
version "2.0.2"
resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453"
integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==
http-cache-semantics@3.8.1, http-cache-semantics@^3.8.1:
version "3.8.1"
......@@ -2336,7 +2287,6 @@ indent-string@^3.0.0:
indent-string@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251"
integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==
infer-owner@^1.0.3, infer-owner@^1.0.4:
version "1.0.4"
......@@ -2418,14 +2368,12 @@ ipaddr.js@1.6.0:
is-binary-path@~2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09"
integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==
dependencies:
binary-extensions "^2.0.0"
is-buffer@~2.0.3:
version "2.0.4"
resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.4.tgz#3e572f23c8411a5cfd9557c849e3665e0b290623"
integrity sha512-Kq1rokWXOPXWuaMAqZiJW4XxsmD9zGx9q4aePabbn3qCRGedtH7Cm+zV8WETitMfu1wdh+Rvd6w5egwSngUX2A==
is-callable@^1.1.4:
version "1.1.4"
......@@ -2491,7 +2439,6 @@ is-npm@^1.0.0:
is-number@^7.0.0:
version "7.0.0"
resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b"
integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==
is-obj@^1.0.0:
version "1.0.1"
......@@ -2540,7 +2487,6 @@ is-stream@^1.0.0, is-stream@^1.1.0:
is-stream@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.0.tgz#bde9c32680d6fae04129d6ac9d921ce7815f78e3"
integrity sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw==
is-symbol@^1.0.2:
version "1.0.3"
......@@ -2555,7 +2501,6 @@ is-typedarray@^1.0.0, is-typedarray@~1.0.0:
is-windows@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d"
integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==
is@^3.2.1:
version "3.3.0"
......@@ -2584,19 +2529,16 @@ isstream@~0.1.2:
istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.0.0-alpha.1:
version "3.0.0"
resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.0.0.tgz#f5944a37c70b550b02a78a5c3b2055b280cec8ec"
integrity sha512-UiUIqxMgRDET6eR+o5HbfRYP1l0hqkWOs7vNxC/mggutCMUIhWMm8gAHb8tHlyfD3/l6rlgNA5cKdDzEAf6hEg==
istanbul-lib-hook@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/istanbul-lib-hook/-/istanbul-lib-hook-3.0.0.tgz#8f84c9434888cc6b1d0a9d7092a76d239ebf0cc6"
integrity sha512-Pt/uge1Q9s+5VAZ+pCo16TYMWPBIl+oaNIjgLQxcX0itS6ueeaA+pEfThZpH8WxhFgCiEb8sAJY6MdUKgiIWaQ==
dependencies:
append-transform "^2.0.0"
istanbul-lib-instrument@^4.0.0:
version "4.0.1"
resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.1.tgz#61f13ac2c96cfefb076fe7131156cc05907874e6"
integrity sha512-imIchxnodll7pvQBYOqUu88EufLCU56LMeFPZZM/fJZ1irYcYdqroaV+ACK1Ila8ls09iEYArp+nqyC6lW1Vfg==
dependencies:
"@babel/core" "^7.7.5"
"@babel/parser" "^7.7.5"
......@@ -2609,7 +2551,6 @@ istanbul-lib-instrument@^4.0.0:
istanbul-lib-processinfo@^2.0.2:
version "2.0.2"
resolved "https://registry.yarnpkg.com/istanbul-lib-processinfo/-/istanbul-lib-processinfo-2.0.2.tgz#e1426514662244b2f25df728e8fd1ba35fe53b9c"
integrity sha512-kOwpa7z9hme+IBPZMzQ5vdQj8srYgAtaRqeI48NGmAQ+/5yKiHLV0QbYqQpxsdEF0+w14SoB8YbnHKcXE2KnYw==
dependencies:
archy "^1.0.0"
cross-spawn "^7.0.0"
......@@ -2622,7 +2563,6 @@ istanbul-lib-processinfo@^2.0.2:
istanbul-lib-report@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6"
integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==
dependencies:
istanbul-lib-coverage "^3.0.0"
make-dir "^3.0.0"
......@@ -2631,7 +2571,6 @@ istanbul-lib-report@^3.0.0:
istanbul-lib-source-maps@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.0.tgz#75743ce6d96bb86dc7ee4352cf6366a23f0b1ad9"
integrity sha512-c16LpFRkR8vQXyHZ5nLpY35JZtzj1PQY1iZmesUbf1FZHbIupcWfjgOXBY9YHkLEQ6puz1u4Dgj6qmU/DisrZg==
dependencies:
debug "^4.1.1"
istanbul-lib-coverage "^3.0.0"
......@@ -2721,7 +2660,6 @@ json-stringify-safe@~5.0.1:
json5@^2.1.2:
version "2.1.2"
resolved "https://registry.yarnpkg.com/json5/-/json5-2.1.2.tgz#43ef1f0af9835dd624751a6b7fa48874fb2d608e"
integrity sha512-MoUOQ4WdiN3yxhm7NEVJSJrieAo5hNSLQ5sj05OTRHPL9HOBy8u4Bu88jsC1jvqAdN+E1bJmsUcZH+1HQxliqQ==
dependencies:
minimist "^1.2.5"
......@@ -2924,7 +2862,6 @@ locate-path@^3.0.0:
locate-path@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0"
integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==
dependencies:
p-locate "^4.1.0"
......@@ -3023,7 +2960,6 @@ lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.15:
log-symbols@3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-3.0.0.tgz#f3a08516a5dea893336a7dee14d18a1cfdab77c4"
integrity sha512-dSkNGuI7iG3mfvDzUuYZyvk5dD9ocYCYzNU6CYDE6+Xqd+gwme6Z00NS3dUh8mq/73HaEtT7m6W+yUPtU6BZnQ==
dependencies:
chalk "^2.4.2"
......@@ -3068,7 +3004,6 @@ make-dir@^1.0.0:
make-dir@^3.0.0, make-dir@^3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.0.2.tgz#04a1acbf22221e1d6ef43559f43e05a90dbb4392"
integrity sha512-rYKABKutXa6vXTXhoV18cBE7PaewPXHe/Bdq4v+ZLMhxbWApkFFplT0LcbMW+6BbjnQXzZ/sAvSE/JdguApG5w==
dependencies:
semver "^6.0.0"
......@@ -3170,7 +3105,6 @@ minimatch@3.0.4, minimatch@^3.0.4:
minimist@^1.2.0, minimist@^1.2.5:
version "1.2.5"
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602"
integrity sha1-Z9ZgFLZqaoqqDAg8X9WN9OTpdgI=
minipass@^2.2.1, minipass@^2.3.3:
version "2.3.3"
......@@ -3231,7 +3165,6 @@ mississippi@^3.0.0:
mkdirp@0.5.3, mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@~0.5.0, mkdirp@~0.5.1:
version "0.5.3"
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.3.tgz#5a514b7179259287952881e94410ec5465659f8c"
integrity sha512-P+2gwrFqx8lhew375MQHHeTlY8AuOJSrGf0R5ddkEndUkmwpgUob/vQuBD1V22/Cw1/lJr4x+EjllSezBThzBg==
dependencies:
minimist "^1.2.5"
......@@ -3243,7 +3176,6 @@ mkdirp@^1.0.3:
mocha@^7.1.1:
version "7.1.1"
resolved "https://registry.yarnpkg.com/mocha/-/mocha-7.1.1.tgz#89fbb30d09429845b1bb893a830bf5771049a441"
integrity sha512-3qQsu3ijNS3GkWcccT5Zw0hf/rWvu1fTN9sPvEd81hlwsr30GX2GcDSSoBxo24IR8FelmrAydGC6/1J5QQP4WA==
dependencies:
ansi-colors "3.2.3"
browser-stdout "1.3.1"
......@@ -3328,7 +3260,6 @@ nice-try@^1.0.4:
node-environment-flags@1.0.6:
version "1.0.6"
resolved "https://registry.yarnpkg.com/node-environment-flags/-/node-environment-flags-1.0.6.tgz#a30ac13621f6f7d674260a54dede048c3982c088"
integrity sha512-5Evy2epuL+6TM0lCQGpFIj6KwiEsGh1SrHUhTbNX+sLbBtjidPZFAnVK9y5yU1+h//RitLbRHTIMyxQPtxMdHw==
dependencies:
object.getownpropertydescriptors "^2.0.3"
semver "^5.7.0"
......@@ -3399,7 +3330,6 @@ node-pre-gyp@^0.10.3:
node-preload@^0.2.1:
version "0.2.1"
resolved "https://registry.yarnpkg.com/node-preload/-/node-preload-0.2.1.tgz#c03043bb327f417a18fee7ab7ee57b408a144301"
integrity sha512-RM5oyBy45cLEoHqCeh+MNuFAxO0vTFBLskvQbOKnEE7YTTSN4tbN8QWDIPQ6L+WvKsB/qLEGpYe2ZZ9d4W9OIQ==
dependencies:
process-on-spawn "^1.0.0"
......@@ -3439,7 +3369,6 @@ normalize-package-data@^2.0.0, normalize-package-data@^2.4.0, normalize-package-
normalize-path@^3.0.0, normalize-path@~3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65"
integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==
normalize-url@2.0.1:
version "2.0.1"
......@@ -3875,7 +3804,6 @@ p-limit@^2.0.0:
p-limit@^2.2.0:
version "2.2.2"
resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.2.2.tgz#61279b67721f5287aa1c13a9a7fbbc48c9291b1e"
integrity sha512-WGR+xHecKTr7EbUEhyLSh5Dube9JtdiG78ufaeLxTgpudf/20KqyMioIUZJAezlTIi6evxuoUs9YXc11cU+yzQ==
dependencies:
p-try "^2.0.0"
......@@ -3894,14 +3822,12 @@ p-locate@^3.0.0:
p-locate@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07"
integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==
dependencies:
p-limit "^2.2.0"
p-map@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/p-map/-/p-map-3.0.0.tgz#d704d9af8a2ba684e2600d9a215983d4141a979d"
integrity sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==
dependencies:
aggregate-error "^3.0.0"
......@@ -3928,7 +3854,6 @@ p-try@^2.0.0:
package-hash@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/package-hash/-/package-hash-4.0.0.tgz#3537f654665ec3cc38827387fc904c163c54f506"
integrity sha512-whdkPIooSu/bASggZ96BWVvZTRMOFxnyUG5PnTSGKoJE2gd5mbVNmR2Nj20QFzxYYgAXpoqC+AiXzl+UMRh7zQ==
dependencies:
graceful-fs "^4.1.15"
hasha "^5.0.0"
......@@ -4014,7 +3939,6 @@ path-exists@^3.0.0:
path-exists@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3"
integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==
path-is-absolute@^1.0.0:
version "1.0.1"
......@@ -4031,7 +3955,6 @@ path-key@^2.0.0, path-key@^2.0.1:
path-key@^3.1.0:
version "3.1.1"
resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375"
integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==
path-parse@^1.0.6:
version "1.0.6"
......@@ -4052,7 +3975,6 @@ performance-now@^2.1.0:
picomatch@^2.0.4:
version "2.2.2"
resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad"
integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==
pify@^2.0.0:
version "2.3.0"
......@@ -4075,7 +3997,6 @@ pinkie@^2.0.0:
pkg-dir@^4.1.0:
version "4.2.0"
resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3"
integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==
dependencies:
find-up "^4.0.0"
......@@ -4102,7 +4023,6 @@ process-nextick-args@~2.0.0:
process-on-spawn@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/process-on-spawn/-/process-on-spawn-1.0.0.tgz#95b05a23073d30a17acfdc92a440efd2baefdc93"
integrity sha512-1WsPDsUSMmZH5LeMLegqkPDrsGgsWwk1Exipy2hvB0o/F0ASzbpIctSCcZIK1ykJvtTJULEH+20WOFjMvGnCTg==
dependencies:
fromentries "^1.2.0"
......@@ -4355,7 +4275,6 @@ readdir-scoped-modules@^1.0.0, readdir-scoped-modules@^1.1.0:
readdirp@~3.2.0:
version "3.2.0"
resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.2.0.tgz#c30c33352b12c96dfb4b895421a49fd5a9593839"
integrity sha512-crk4Qu3pmXwgxdSgGhgA/eXiJAPQiX4GMOZZMXnqKxHX7TaoL+3gQVo/WeuAiogr07DpnfjIMpXXa+PAIvwPGQ==
dependencies:
picomatch "^2.0.4"
......@@ -4459,7 +4378,6 @@ require-main-filename@^1.0.1:
require-main-filename@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b"
integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==
resolve-from@^4.0.0:
version "4.0.0"
......@@ -4468,7 +4386,6 @@ resolve-from@^4.0.0:
resolve-from@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69"
integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==
resolve@^1.10.0:
version "1.13.1"
......@@ -4479,7 +4396,6 @@ resolve@^1.10.0:
resolve@^1.3.2:
version "1.15.1"
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.15.1.tgz#27bdcdeffeaf2d6244b95bb0f9f4b4653451f3e8"
integrity sha512-84oo6ZTtoTUpjgNEr5SJyzQhzL72gaRodsSfyxC/AXRvwu0Yse9H8eF9IpGo7b8YetZhlI6v7ZQ6bKBFV/6S7w==
dependencies:
path-parse "^1.0.6"
......@@ -4525,7 +4441,6 @@ rimraf@^2.6.1, rimraf@^2.6.2:
rimraf@^3.0.0:
version "3.0.2"
resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a"
integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==
dependencies:
glob "^7.1.3"
......@@ -4657,7 +4572,6 @@ shebang-command@^1.2.0:
shebang-command@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea"
integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==
dependencies:
shebang-regex "^3.0.0"
......@@ -4668,7 +4582,6 @@ shebang-regex@^1.0.0:
shebang-regex@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172"
integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==
signal-exit@^3.0.0, signal-exit@^3.0.2:
version "3.0.2"
......@@ -4743,7 +4656,6 @@ source-map@^0.6.0, source-map@^0.6.1:
spawn-wrap@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/spawn-wrap/-/spawn-wrap-2.0.0.tgz#103685b8b8f9b79771318827aa78650a610d457e"
integrity sha512-EeajNjfN9zMnULLwhZZQU3GWBoFNkbngTUPfaawT4RkMiviTxcX0qfhVbGey39mfctfDHkWtuecgQ8NJcyQWHg==
dependencies:
foreground-child "^2.0.0"
is-windows "^1.0.2"
......@@ -4959,7 +4871,6 @@ strip-ansi@^6.0.0:
strip-bom@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878"
integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==
strip-eof@^1.0.0:
version "1.0.0"
......@@ -4976,7 +4887,6 @@ strip-json-comments@^3.0.1:
supports-color@6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.0.0.tgz#76cfe742cf1f41bb9b1c29ad03068c05b4c0e40a"
integrity sha512-on9Kwidc1IUQo+bQdhi8+Tijpo0e1SS6RoGo2guUwn5vdaxw8RXOF9Vb2ws+ihWOmh4JnCJOvaziZWP1VABaLg==
dependencies:
has-flag "^3.0.0"
......@@ -4989,7 +4899,6 @@ supports-color@^5.3.0:
supports-color@^7.1.0:
version "7.1.0"
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.1.0.tgz#68e32591df73e25ad1c4b49108a2ec507962bfd1"
integrity sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==
dependencies:
has-flag "^4.0.0"
......@@ -5051,7 +4960,6 @@ term-size@^1.2.0:
test-exclude@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e"
integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==
dependencies:
"@istanbuljs/schema" "^0.1.2"
glob "^7.1.4"
......@@ -5093,7 +5001,6 @@ to-fast-properties@^2.0.0:
to-regex-range@^5.0.1:
version "5.0.1"
resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4"
integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==
dependencies:
is-number "^7.0.0"
......@@ -5185,7 +5092,6 @@ type-is@~1.6.15, type-is@~1.6.16:
typedarray-to-buffer@^3.1.5:
version "3.1.5"
resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080"
integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==
dependencies:
is-typedarray "^1.0.0"
......@@ -5369,7 +5275,6 @@ which@1.3.1, which@^1.2.9, which@^1.3.0, which@^1.3.1:
which@^2.0.1:
version "2.0.2"
resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1"
integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==
dependencies:
isexe "^2.0.0"
......@@ -5405,7 +5310,6 @@ wrap-ansi@^2.0.0:
wrap-ansi@^5.1.0:
version "5.1.0"
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09"
integrity sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==
dependencies:
ansi-styles "^3.2.0"
string-width "^3.0.0"
......@@ -5414,7 +5318,6 @@ wrap-ansi@^5.1.0:
wrap-ansi@^6.2.0:
version "6.2.0"
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53"
integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==
dependencies:
ansi-styles "^4.0.0"
string-width "^4.1.0"
......@@ -5443,7 +5346,6 @@ write-file-atomic@^2.3.0, write-file-atomic@^2.4.3:
write-file-atomic@^3.0.0:
version "3.0.3"
resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8"
integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==
dependencies:
imurmurhash "^0.1.4"
is-typedarray "^1.0.0"
......@@ -5462,6 +5364,10 @@ ws@^6.0.0:
dependencies:
async-limiter "~1.0.0"
ws@^7.3.0:
version "7.3.0"
resolved "https://registry.yarnpkg.com/ws/-/ws-7.3.0.tgz#4b2f7f219b3d3737bc1a2fbf145d825b94d38ffd"
xdg-basedir@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-3.0.0.tgz#496b2cc109eca8dbacfe2dc72b603c17c5870ad4"
......@@ -5508,7 +5414,6 @@ yallist@^4.0.0:
yargs-parser@13.1.2, yargs-parser@^13.1.2:
version "13.1.2"
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.2.tgz#130f09702ebaeef2650d54ce6e3e5706f7a4fb38"
integrity sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==
dependencies:
camelcase "^5.0.0"
decamelize "^1.2.0"
......@@ -5516,7 +5421,6 @@ yargs-parser@13.1.2, yargs-parser@^13.1.2:
yargs-parser@^18.1.1:
version "18.1.2"
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.2.tgz#2f482bea2136dbde0861683abea7756d30b504f1"
integrity sha512-hlIPNR3IzC1YuL1c2UwwDKpXlNFBqD1Fswwh1khz5+d8Cq/8yc/Mn0i+rQXduu8hcrFKvO7Eryk+09NecTQAAQ==
dependencies:
camelcase "^5.0.0"
decamelize "^1.2.0"
......@@ -5530,7 +5434,6 @@ yargs-parser@^9.0.2:
yargs-unparser@1.6.0:
version "1.6.0"
resolved "https://registry.yarnpkg.com/yargs-unparser/-/yargs-unparser-1.6.0.tgz#ef25c2c769ff6bd09e4b0f9d7c605fb27846ea9f"
integrity sha512-W9tKgmSn0DpSatfri0nx52Joq5hVXgeLiqR/5G0sZNDoLZFOr/xjBUDcShCOGNsBnEMNo1KAMBkTej1Hm62HTw==
dependencies:
flat "^4.1.0"
lodash "^4.17.15"
......@@ -5539,7 +5442,6 @@ yargs-unparser@1.6.0:
yargs@13.3.2, yargs@^13.3.0:
version "13.3.2"
resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.3.2.tgz#ad7ffefec1aa59565ac915f82dccb38a9c31a2dd"
integrity sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==
dependencies:
cliui "^5.0.0"
find-up "^3.0.0"
......@@ -5572,7 +5474,6 @@ yargs@^11.0.0:
yargs@^15.0.2:
version "15.3.1"
resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.3.1.tgz#9505b472763963e54afe60148ad27a330818e98b"
integrity sha512-92O1HWEjw27sBfgmXiixJWT5hRBp2eobqXicLtPBIDBhYB+1HpwZlXmbW2luivBJHBzki+7VyCLRtAkScbTBQA==
dependencies:
cliui "^6.0.0"
decamelize "^1.2.0"
......
# core
json_tricks
websockets
# hyperopt tuner
numpy
......
......@@ -49,10 +49,17 @@ testCases:
maxTrialNum: 2
trialConcurrency: 1
- name: mnist-pytorch
- name: mnist-pytorch-local
configFile: test/config/examples/mnist-pytorch.yml
# download data first, to prevent concurrent issue.
launchCommand: python3 ../examples/trials/mnist-pytorch/mnist.py --epochs 1 --batch_num 0 --data_dir ../examples/trials/mnist-pytorch/data && nnictl create --config $configFile --debug
trainingService: local
- name: mnist-pytorch-non-local
configFile: test/config/examples/mnist-pytorch.yml
# download data first, to prevent concurrent issue.
launchCommand: nnictl create --config $configFile --debug
trainingService: remote pai kubeflow frameworkcontroller dlts
- name: mnist-annotation
configFile: test/config/examples/mnist-annotation.yml
......
......@@ -203,11 +203,13 @@ pai_config_schema = {
'paiConfig': Or({
'userName': setType('userName', str),
'passWord': setType('passWord', str),
'host': setType('host', str)
'host': setType('host', str),
Optional('reuse'): setType('reuse', bool)
}, {
'userName': setType('userName', str),
'token': setType('token', str),
'host': setType('host', str)
'host': setType('host', str),
Optional('reuse'): setType('reuse', bool)
})
}
......
......@@ -51,7 +51,7 @@ def rest_delete(url, timeout, show_error=False):
def check_rest_server(rest_port):
'''Check if restful server is ready'''
retry_count = 5
retry_count = 20
for _ in range(retry_count):
response = rest_get(check_status_url(rest_port), REST_TIME_OUT)
if response:
......@@ -60,7 +60,7 @@ def check_rest_server(rest_port):
else:
return False, response
else:
time.sleep(3)
time.sleep(1)
return False, response
def check_rest_server_quick(rest_port):
......
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import json
import threading
import time
from abc import ABC, abstractmethod
from queue import Empty, Queue
from .log_utils import LogType, nni_log
from .commands import CommandType
INTERVAL_SECONDS = 0.5
class BaseChannel(ABC):
def __init__(self, args):
self.is_keep_parsed = args.node_count > 1
self.args = args
self.node_id = self.args.node_id
@abstractmethod
def _inner_send(self, message):
pass
@abstractmethod
def _inner_receive(self):
return []
@abstractmethod
def _inner_open(self):
pass
@abstractmethod
def _inner_close(self):
pass
def open(self):
# initialize receive, send threads.
self.is_running = True
self.receive_queue = Queue()
self.receive_thread = threading.Thread(target=self._receive_loop)
self.receive_thread.start()
self.send_queue = Queue()
self.send_thread = threading.Thread(target=self._send_loop)
self.send_thread.start()
self._inner_open()
client_info = {
"isReady": True,
"runnerId": self.args.runner_id,
"expId": self.args.exp_id,
}
nni_log(LogType.Info, 'Channel: send ready information %s' % client_info)
self.send(CommandType.Initialized, client_info)
def close(self):
self.is_running = False
self._inner_close()
def send(self, command, data):
"""Send command to Training Service.
command: CommandType object.
data: string payload.
the message is sent synchronized.
"""
data["node"] = self.node_id
data = json.dumps(data)
data = data.encode('utf8')
message = b'%b%014d%b' % (command.value, len(data), data)
self.send_queue.put(message)
def sent(self):
return self.send_queue.qsize() == 0
def received(self):
return self.receive_queue.qsize() > 0
def receive(self):
"""Receive a command from Training Service.
Returns a tuple of command (CommandType) and payload (str)
"""
command = None
data = None
try:
command_content = self.receive_queue.get(False)
if command_content is not None:
if (len(command_content) < 16):
# invalid header
nni_log(LogType.Error, 'incorrect command is found, command must be greater than 16 bytes!')
return None, None
header = command_content[:16]
command = CommandType(header[:2])
length = int(header[2:])
if (len(command_content)-16 != length):
nni_log(LogType.Error, 'incorrect command length, length {}, actual data length is {}, header {}.'
.format(length, len(command_content)-16, header))
return None, None
data = command_content[16:16+length]
data = json.loads(data.decode('utf8'))
if self.node_id is None:
nni_log(LogType.Info, 'Received command, header: [%s], data: [%s]' % (header, data))
else:
nni_log(LogType.Info, 'Received command(%s), header: [%s], data: [%s]' % (self.node_id, header, data))
except Empty:
# do nothing, if no command received.
pass
except Exception as identifier:
nni_log(LogType.Error, 'meet unhandled exception in base_channel: %s' % identifier)
return command, data
def _fetch_message(self, buffer, has_new_line=False):
messages = []
while(len(buffer)) >= 16:
header = buffer[:16]
length = int(header[2:])
message_length = length+16
total_length = message_length
if has_new_line:
total_length += 1
# break, if buffer is too short.
if len(buffer) < total_length:
break
data = buffer[16:message_length]
if has_new_line and 10 != buffer[total_length-1]:
nni_log(LogType.Error, 'end of message should be \\n, but got {}'.format(self.in_cache[total_length-1]))
buffer = buffer[total_length:]
messages.append(header + data)
return messages, buffer
def _receive_loop(self):
while (self.is_running):
messages = self._inner_receive()
if messages is not None:
for message in messages:
self.receive_queue.put(message)
time.sleep(INTERVAL_SECONDS)
def _send_loop(self):
while (self.is_running):
message = None
try:
# no sleep, since it's a block call with INTERVAL_SECONDS second timeout
message = self.send_queue.get(True, INTERVAL_SECONDS)
except Empty:
# do nothing, if no command received.
pass
if message is not None:
self._inner_send(message)
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from enum import Enum
class CommandType(Enum):
Initialize = b'IN'
RequestTrialJobs = b'GE'
ReportMetricData = b'ME'
ReportGpuInfo = b'GI'
UpdateSearchSpace = b'SS'
ImportData = b'FD'
AddCustomizedTrialJob = b'AD'
TrialEnd = b'EN'
Terminate = b'TE'
Ping = b'PI'
Initialized = b'ID'
NewTrialJob = b'TR'
SendTrialJobParameter = b'SP'
NoMoreTrialJobs = b'NO'
KillTrialJob = b'KI'
StdOut = b'SO'
VersionCheck = b'VC'
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import os
from .base_channel import BaseChannel
command_path = "./commands"
runner_commands_file_name_prefix = "runner_commands"
manager_commands_file_name = "manager_commands.txt"
class FileChannel(BaseChannel):
def __init__(self, args):
self.node_id = args.node_id
self.out_file = None
self.in_file = None
self.in_offset = 0
self.in_cache = b""
super(FileChannel, self).__init__(args)
def _inner_open(self):
pass
def _inner_close(self):
if self.out_file is not None:
self.out_file.close()
self.out_file = None
if self.in_file is not None:
self.in_file.close()
self.in_file = None
def _inner_send(self, message):
if self.out_file is None:
if not os.path.exists(command_path):
os.makedirs(command_path, exist_ok=True)
if self.node_id is None:
file_name = os.path.join(command_path, "%s.txt" % runner_commands_file_name_prefix)
else:
file_name = os.path.join(command_path, "%s_%s.txt" % (
runner_commands_file_name_prefix, self.node_id))
self.out_file = open(file_name, "ab")
self.out_file.write(message)
self.out_file.write(b'\n')
self.out_file.flush()
def _open_manager_command(self):
full_name = os.path.join(command_path, manager_commands_file_name)
if self.in_file is not None and self.in_file.closed:
self.in_file = None
if self.in_file is None and os.path.exists(full_name):
self.in_file = open(full_name, "rb")
self.in_file.seek(self.in_offset)
def _inner_receive(self):
messages = []
if self.in_file is None:
self._open_manager_command()
if self.in_file is not None:
self.in_file.seek(0, os.SEEK_END)
new_offset = self.in_file.tell()
self.in_file.seek(self.in_offset, os.SEEK_SET)
count = new_offset - self.in_offset
if count > 0:
self.in_cache += self.in_file.read(count)
self.in_offset = new_offset
messages, self.in_cache = self._fetch_message(self.in_cache, True)
return messages
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import subprocess
import time
import traceback
from xml.dom import minidom
def collect_gpu_usage(node_id):
cmd = 'nvidia-smi -q -x'.split()
info = None
try:
smi_output = subprocess.check_output(cmd)
info = parse_nvidia_smi_result(smi_output)
except Exception:
traceback.print_exc()
info = gen_empty_gpu_metric()
return info
def parse_nvidia_smi_result(smi):
try:
output = {}
xmldoc = minidom.parseString(smi)
gpuList = xmldoc.getElementsByTagName('gpu')
output["Timestamp"] = time.asctime(time.localtime())
output["gpuCount"] = len(gpuList)
output["gpuInfos"] = []
for gpuIndex, gpu in enumerate(gpuList):
gpuInfo = {}
gpuInfo['index'] = gpuIndex
gpuInfo['gpuUtil'] = gpu.getElementsByTagName('utilization')[0]\
.getElementsByTagName('gpu_util')[0]\
.childNodes[0].data.replace("%", "").strip()
gpuInfo['gpuMemUtil'] = gpu.getElementsByTagName('utilization')[0]\
.getElementsByTagName('memory_util')[0]\
.childNodes[0].data.replace("%", "").strip()
processes = gpu.getElementsByTagName('processes')
runningProNumber = len(processes[0].getElementsByTagName('process_info'))
gpuInfo['activeProcessNum'] = runningProNumber
gpuInfo['gpuType'] = gpu.getElementsByTagName('product_name')[0]\
.childNodes[0].data
memUsage = gpu.getElementsByTagName('fb_memory_usage')[0]
gpuInfo['gpuMemTotal'] = memUsage.getElementsByTagName('total')[0]\
.childNodes[0].data.replace("MiB", "").strip()
gpuInfo['gpuMemUsed'] = memUsage.getElementsByTagName('used')[0]\
.childNodes[0].data.replace("MiB", "").strip()
gpuInfo['gpuMemFree'] = memUsage.getElementsByTagName('free')[0]\
.childNodes[0].data.replace("MiB", "").strip()
output["gpuInfos"].append(gpuInfo)
except Exception:
traceback.print_exc()
output = {}
return output
def gen_empty_gpu_metric():
try:
output = {}
output["Timestamp"] = time.asctime(time.localtime())
output["gpuCount"] = 0
output["gpuInfos"] = []
except Exception:
traceback.print_exc()
output = {}
return output
......@@ -18,6 +18,8 @@ from queue import Queue
from .rest_utils import rest_post
from .url_utils import gen_send_stdout_url
from .commands import CommandType
@unique
class LogType(Enum):
......@@ -28,23 +30,28 @@ class LogType(Enum):
Error = 'ERROR'
Fatal = 'FATAL'
@unique
class StdOutputType(Enum):
Stdout = 'stdout',
Stderr = 'stderr'
def nni_log(log_type, log_message):
'''Log message into stdout'''
dt = datetime.now()
print('[{0}] {1} {2}'.format(dt, log_type.value, log_message), flush=True)
class NNIRestLogHanlder(StreamHandler):
def __init__(self, host, port, tag, std_output_type=StdOutputType.Stdout):
def __init__(self, host, port, tag, trial_id, channel, std_output_type=StdOutputType.Stdout):
StreamHandler.__init__(self)
self.host = host
self.port = port
self.tag = tag
self.std_output_type = std_output_type
self.trial_id = trial_id
self.channel = channel
self.orig_stdout = sys.__stdout__
self.orig_stderr = sys.__stderr__
......@@ -55,24 +62,32 @@ class NNIRestLogHanlder(StreamHandler):
log_entry['msg'] = self.format(record)
try:
rest_post(gen_send_stdout_url(self.host, self.port), json.dumps(log_entry), 10, True)
if self.channel is None:
rest_post(gen_send_stdout_url(self.host, self.port), json.dumps(log_entry), 10, True)
else:
if self.trial_id is not None:
log_entry["trial"] = self.trial_id
self.channel.send(CommandType.StdOut, log_entry)
except Exception as e:
self.orig_stderr.write(str(e) + '\n')
self.orig_stderr.flush()
class RemoteLogger(object):
"""
NNI remote logger
"""
def __init__(self, syslog_host, syslog_port, tag, std_output_type, log_collection, log_level=logging.INFO):
def __init__(self, syslog_host, syslog_port, tag, std_output_type, log_collection, trial_id=None, channel=None, log_level=logging.INFO):
'''
constructor
'''
self.logger = logging.getLogger('nni_syslog_{}'.format(tag))
self.log_level = log_level
self.logger.setLevel(self.log_level)
handler = NNIRestLogHanlder(syslog_host, syslog_port, tag)
self.logger.addHandler(handler)
self.pipeReader = None
self.handler = NNIRestLogHanlder(syslog_host, syslog_port, tag, trial_id, channel)
self.logger.addHandler(self.handler)
if std_output_type == StdOutputType.Stdout:
self.orig_stdout = sys.__stdout__
else:
......@@ -83,7 +98,8 @@ class RemoteLogger(object):
'''
Get pipe for remote logger
'''
return PipeLogReader(self.logger, self.log_collection, logging.INFO)
self.pipeReader = PipeLogReader(self.logger, self.log_collection, logging.INFO)
return self.pipeReader
def flush(self):
'''
......@@ -104,10 +120,22 @@ class RemoteLogger(object):
except Exception:
pass
def close(self):
'''
Close handlers and resources
'''
if self.pipeReader is not None:
self.pipeReader.set_process_exit()
for handler in self.logger.handlers:
handler.close()
self.logger.removeHandler(handler)
class PipeLogReader(threading.Thread):
"""
The reader thread reads log data from pipe
"""
def __init__(self, logger, log_collection, log_level=logging.INFO):
"""Setup the object with a logger and a loglevel
and start the thread
......@@ -129,7 +157,7 @@ class PipeLogReader(threading.Thread):
'''
Collect lines from 'stream' and put them in 'quque'.
'''
time.sleep(5)
time.sleep(1)
while True:
cur_process_exit = self.process_exit
try:
......
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import os
os.environ['NNI_PLATFORM'] = 'unittest'
os.environ['NNI_TRIAL_JOB_ID'] = 'test_trial_job_id'
os.environ["NNI_OUTPUT_DIR"] = "./unittest"
os.environ["NNI_SYS_DIR"] = "./unittest"
os.environ["NNI_EXP_ID"] = "test_exp_id"
os.environ["MULTI_PHASE"] = "true"
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import json
import os
import random
import shutil
import string
import sys
import time
import unittest
from argparse import Namespace
from datetime import datetime
from tools.nni_trial_tool.base_channel import CommandType
from tools.nni_trial_tool.file_channel import (FileChannel, command_path,
manager_commands_file_name)
sys.path.append("..")
runner_file_name = "commands/runner_commands.txt"
manager_file_name = "commands/manager_commands.txt"
class FileChannelTest(unittest.TestCase):
def setUp(self):
self.args = Namespace()
self.args.node_count = 1
self.args.node_id = None
if os.path.exists(command_path):
shutil.rmtree(command_path)
def test_send(self):
fc = None
try:
fc = FileChannel(self.args)
fc.send(CommandType.ReportGpuInfo, "command1")
fc.send(CommandType.ReportGpuInfo, "command2")
self.check_timeout(2, lambda: os.path.exists(runner_file_name))
self.assertTrue(os.path.exists(runner_file_name))
with open(runner_file_name, "rb") as runner:
lines = runner.readlines()
self.assertListEqual(lines, [b'GI00000000000010"command1"\n', b'GI00000000000010"command2"\n'])
finally:
if fc is not None:
fc.close()
def test_send_multi_node(self):
fc1 = None
fc2 = None
try:
runner1_file_name = "commands/runner_commands_1.txt"
self.args.node_id = 1
fc1 = FileChannel(self.args)
fc1.send(CommandType.ReportGpuInfo, "command1")
# wait command have enough time to write before closed.
runner2_file_name = "commands/runner_commands_2.txt"
self.args.node_id = 2
fc2 = FileChannel(self.args)
fc2.send(CommandType.ReportGpuInfo, "command1")
self.check_timeout(2, lambda: os.path.exists(runner1_file_name) and os.path.exists(runner2_file_name))
self.assertTrue(os.path.exists(runner1_file_name))
with open(runner1_file_name, "rb") as runner:
lines1 = runner.readlines()
self.assertTrue(os.path.exists(runner2_file_name))
with open(runner2_file_name, "rb") as runner:
lines2 = runner.readlines()
self.assertListEqual(lines1, [b'GI00000000000010"command1"\n'])
self.assertListEqual(lines2, [b'GI00000000000010"command1"\n'])
finally:
if fc1 is not None:
fc1.close()
if fc2 is not None:
fc2.close()
def test_receive(self):
fc = None
manager_file = None
try:
fc = FileChannel(self.args)
message = fc.receive()
self.assertEqual(message, (None, None))
os.mkdir(command_path)
manager_file = open(manager_file_name, "wb")
manager_file.write(b'TR00000000000009"manager"\n')
manager_file.flush()
self.check_timeout(2, lambda: fc.received())
message = fc.receive()
self.assertEqual(message, (CommandType.NewTrialJob, "manager"))
manager_file.write(b'TR00000000000010"manager2"\n')
manager_file.flush()
self.check_timeout(2, lambda: fc.received())
message = fc.receive()
self.assertEqual(message, (CommandType.NewTrialJob, "manager2"))
finally:
if fc is not None:
fc.close()
if manager_file is not None:
manager_file.close()
def check_timeout(self, timeout, callback):
interval = 0.01
start = datetime.now().timestamp()
count = int(timeout / interval)
for x in range(count):
if callback():
break
time.sleep(interval)
print("checked {} times, {:3F} seconds".format(x, datetime.now().timestamp()-start))
if __name__ == '__main__':
unittest.main()
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import ctypes
import os
import shlex
import tarfile
import time
from datetime import datetime
from subprocess import Popen
import psutil
from .log_utils import LogType, RemoteLogger, StdOutputType, nni_log
from .commands import CommandType
trial_output_path_name = ".nni"
class Trial:
def __init__(self, args, data):
self.process = None
self.data = data
self.args = args
self.command_channel = args.command_channel
self.trial_syslogger_stdout = None
global NNI_TRIAL_JOB_ID
self.id = data["trialId"]
if self.id is None:
raise Exception("trial_id is not found in %s" % data)
os.environ['NNI_TRIAL_JOB_ID'] = self.id
NNI_TRIAL_JOB_ID = self.id
# for multiple nodes. If it's None, it means single node.
self.node_id = args.node_id
if self.node_id is None:
self.name = self.id
else:
self.name = "%s_%s" % (self.id, self.node_id)
def run(self):
# redirect trial's stdout and stderr to syslog
self.trial_syslogger_stdout = RemoteLogger(self.args.nnimanager_ip, self.args.nnimanager_port, 'trial', StdOutputType.Stdout,
self.args.log_collection, self.id, self.args.command_channel)
nni_log(LogType.Info, "%s: start to run trial" % self.name)
trial_working_dir = os.path.realpath(os.path.join(os.curdir, "..", "..", "trials", self.id))
self.trial_output_dir = os.path.join(trial_working_dir, trial_output_path_name)
trial_code_dir = os.path.join(trial_working_dir, "code")
trial_nnioutput_dir = os.path.join(trial_working_dir, "nnioutput")
environ = os.environ.copy()
environ['NNI_TRIAL_SEQ_ID'] = str(self.data["sequenceId"])
environ['NNI_OUTPUT_DIR'] = os.path.join(trial_working_dir, "nnioutput")
environ['NNI_SYS_DIR'] = trial_working_dir
self.working_dir = trial_working_dir
# prepare code and parameters
prepared_flag_file_name = os.path.join(trial_working_dir, "trial_prepared")
if not os.path.exists(trial_working_dir):
os.makedirs(trial_working_dir, exist_ok=True)
os.makedirs(self.trial_output_dir, exist_ok=True)
os.makedirs(trial_nnioutput_dir, exist_ok=True)
# prepare code
os.makedirs(trial_code_dir, exist_ok=True)
with tarfile.open(os.path.join("..", "nni-code.tar.gz"), "r:gz") as tar:
tar.extractall(trial_code_dir)
# save parameters
nni_log(LogType.Info, '%s: saving parameter %s' % (self.name, self.data["parameter"]["value"]))
parameter_file_name = os.path.join(trial_working_dir, "parameter.cfg")
with open(parameter_file_name, "w") as parameter_file:
parameter_file.write(self.data["parameter"]["value"])
# ready flag
with open(prepared_flag_file_name, "w") as prepared_flag_file:
prepared_flag_file.write("%s" % (int(datetime.now().timestamp() * 1000)))
# make sure code prepared by other node.
if self.node_id is not None:
while True:
if os.path.exists(prepared_flag_file_name):
break
time.sleep(0.1)
self.log_pipe_stdout = self.trial_syslogger_stdout.get_pipelog_reader()
self.process = Popen(self.args.trial_command, shell=True, stdout=self.log_pipe_stdout,
stderr=self.log_pipe_stdout, cwd=trial_code_dir, env=dict(environ))
nni_log(LogType.Info, '{0}: spawns a subprocess (pid {1}) to run command: {2}'.
format(self.name, self.process.pid, shlex.split(self.args.trial_command)))
def save_parameter_file(self, command_data):
parameters = command_data["parameters"]
file_index = int(parameters["index"])
if file_index == 0:
parameter_file_name = "parameter.cfg"
else:
parameter_file_name = "parameter_{}.cfg".format(file_index)
parameter_file_name = os.path.join(self.working_dir, parameter_file_name)
with open(parameter_file_name, "w") as parameter_file:
nni_log(LogType.Info, '%s: saving parameter %s' % (self.name, parameters["value"]))
parameter_file.write(parameters["value"])
def is_running(self):
if (self.process is None):
return False
retCode = self.process.poll()
# child worker process exits and all stdout data is read
if retCode is not None and self.log_pipe_stdout.set_process_exit() and self.log_pipe_stdout.is_read_completed == True:
# In Windows, the retCode -1 is 4294967295. It's larger than c_long, and raise OverflowError.
# So covert it to int32.
retCode = ctypes.c_long(retCode).value
nni_log(LogType.Info, '{0}: subprocess terminated. Exit code is {1}.'.format(self.name, retCode))
end_time = int(datetime.now().timestamp() * 1000)
end_message = {
"code": retCode,
"time": end_time,
"trial": self.id,
}
self.command_channel.send(CommandType.TrialEnd, end_message)
self.cleanup()
return False
else:
return True
def kill(self, trial_id=None):
if trial_id == self.id or trial_id is None:
if self.process is not None:
nni_log(LogType.Info, "%s: killing trial" % self.name)
for child in psutil.Process(self.process.pid).children(True):
child.kill()
self.process.kill()
self.cleanup()
def cleanup(self):
nni_log(LogType.Info, "%s: clean up trial" % self.name)
self.process = None
if self.log_pipe_stdout is not None:
self.log_pipe_stdout.set_process_exit()
self.log_pipe_stdout = None
if self.trial_syslogger_stdout is not None:
self.trial_syslogger_stdout.close()
self.trial_syslogger_stdout = None
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment