"stubs/git@developer.sourcefind.cn:OpenDAS/fairscale.git" did not exist on "35d4129f81523c279fac193cffc909bb8214acec"
Unverified Commit 966041b2 authored by SparkSnail's avatar SparkSnail Committed by GitHub
Browse files

PAI quota management (#1021)

parent 4ac1c3c5
...@@ -21,6 +21,7 @@ nnictl support commands: ...@@ -21,6 +21,7 @@ nnictl support commands:
* [nnictl tensorboard](#tensorboard) * [nnictl tensorboard](#tensorboard)
* [nnictl package](#package) * [nnictl package](#package)
* [nnictl --version](#version) * [nnictl --version](#version)
* [nnictl hdfs](#hdfs)
### Manage an experiment ### Manage an experiment
...@@ -650,3 +651,37 @@ Debug mode will disable version check function in Trialkeeper. ...@@ -650,3 +651,37 @@ Debug mode will disable version check function in Trialkeeper.
```bash ```bash
nnictl --version nnictl --version
``` ```
<a name="hdfs"></a>
![](https://placehold.it/15/1589F0/000000?text=+) `Manage hdfs`
* __nnictl hdfs set__
* Description
set the host and userName of hdfs
* Usage
```bash
nnictl hdfs set [OPTIONS]
```
* Options
|Name, shorthand|Required|Default|Description|
|------|------|------ |------|
|--host| True| |The host ip of hdfs, the format is xx.xx.xx.xx, for example, 10.10.10.10|
|--user_name| True| |The userName of hdfs|
* __nnictl hdfs clean__
* Description
Clean up the code files that nni automatically copied to hdfs. This command deletes all such files under the user_name.
* Usage
```bash
nnictl hdfs clean
```
...@@ -23,6 +23,7 @@ import os ...@@ -23,6 +23,7 @@ import os
import json import json
import shutil import shutil
from .constants import NNICTL_HOME_DIR from .constants import NNICTL_HOME_DIR
from .common_utils import print_error
class Config: class Config:
'''a util class to load and save config''' '''a util class to load and save config'''
...@@ -119,4 +120,26 @@ class Experiments: ...@@ -119,4 +120,26 @@ class Experiments:
return json.load(file) return json.load(file)
except ValueError: except ValueError:
return {} return {}
return {} return {}
\ No newline at end of file
class HDFSConfig:
'''manage hdfs configuration'''
def __init__(self):
os.makedirs(NNICTL_HOME_DIR, exist_ok=True)
self.hdfs_config_file = os.path.join(NNICTL_HOME_DIR, '.hdfs')
def get_config(self):
if os.path.exists(self.hdfs_config_file):
try:
with open(self.hdfs_config_file, 'r') as file:
return json.load(file)
except Exception as exception:
print_error(exception)
return None
else:
return None
def set_config(self, host, user_name):
with open(self.hdfs_config_file, 'w') as file:
json.dump({'host':host, 'userName': user_name}, file)
...@@ -194,6 +194,15 @@ def parse_args(): ...@@ -194,6 +194,15 @@ def parse_args():
'the unit is second') 'the unit is second')
parser_top.set_defaults(func=monitor_experiment) parser_top.set_defaults(func=monitor_experiment)
parser_hdfs = subparsers.add_parser('hdfs', help='monitor hdfs files')
parser_hdfs_subparsers = parser_hdfs.add_subparsers()
parser_hdfs_set = parser_hdfs_subparsers.add_parser('set', help='set the host and userName of hdfs')
parser_hdfs_set.add_argument('--host', required=True, dest='host', help='the host of hdfs')
parser_hdfs_set.add_argument('--user_name', required=True, dest='user_name', help='the userName of hdfs')
parser_hdfs_set.set_defaults(func=hdfs_set)
parser_hdfs_list = parser_hdfs_subparsers.add_parser('clean', help='clean hdfs files')
parser_hdfs_list.set_defaults(func=hdfs_clean)
args = parser.parse_args() args = parser.parse_args()
args.func(args) args.func(args)
......
...@@ -26,7 +26,8 @@ import datetime ...@@ -26,7 +26,8 @@ import datetime
import time import time
from subprocess import call, check_output from subprocess import call, check_output
from .rest_utils import rest_get, rest_delete, check_rest_server_quick, check_response from .rest_utils import rest_get, rest_delete, check_rest_server_quick, check_response
from .config_utils import Config, Experiments from pyhdfs import HdfsClient, HdfsFileNotFoundException
from .config_utils import Config, Experiments, HDFSConfig
from .url_utils import trial_jobs_url, experiment_url, trial_job_id_url from .url_utils import trial_jobs_url, experiment_url, trial_job_id_url
from .constants import NNICTL_HOME_DIR, EXPERIMENT_INFORMATION_FORMAT, EXPERIMENT_DETAIL_FORMAT, \ from .constants import NNICTL_HOME_DIR, EXPERIMENT_INFORMATION_FORMAT, EXPERIMENT_DETAIL_FORMAT, \
EXPERIMENT_MONITOR_INFO, TRIAL_MONITOR_HEAD, TRIAL_MONITOR_CONTENT, TRIAL_MONITOR_TAIL, REST_TIME_OUT EXPERIMENT_MONITOR_INFO, TRIAL_MONITOR_HEAD, TRIAL_MONITOR_CONTENT, TRIAL_MONITOR_TAIL, REST_TIME_OUT
...@@ -505,3 +506,35 @@ def export_trials_data(args): ...@@ -505,3 +506,35 @@ def export_trials_data(args):
print_error('Export failed...') print_error('Export failed...')
else: else:
print_error('Restful server is not Running') print_error('Restful server is not Running')
def hdfs_set(args):
hdfsConfig = HDFSConfig()
hdfsConfig.set_config(args.host, args.user_name)
print_normal('HDFS account update success!')
def hdfs_clean(args):
hdfsConfig = HDFSConfig()
if not hdfsConfig.get_config():
print_error('Please use \'nnictl hdfs set\' command to set hdfs account first!')
exit(1)
host = hdfsConfig.get_config().get('host')
user_name = hdfsConfig.get_config().get('userName')
hdfs_client = HdfsClient(hosts='{0}:80'.format(host), user_name=user_name, webhdfs_path='/webhdfs/api/v1', timeout=5)
root_path = os.path.join('/', user_name, 'nni', 'experiments')
while True:
inputs = input('INFO: clean up all files in {0}, do you want to continue?[Y/N]:'.format(root_path))
if inputs.lower() not in ['y', 'n', 'yes', 'no']:
print_warning('please input Y or N!')
elif inputs.lower() in ['n', 'no']:
exit(0)
else:
break
path_list = hdfs_client.listdir(root_path)
for path in path_list:
full_path = os.path.join(root_path, path)
print_normal('deleting {0}'.format(full_path))
if hdfs_client.delete(full_path, recursive=True):
print_normal('delete success!')
else:
print_normal('delete failed!')
print_normal('DONE')
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment