Unverified Commit 7d3a664c authored by SparkSnail's avatar SparkSnail Committed by GitHub
Browse files

revert nnictl hdfs command (#1120)

parent 252d35e0
...@@ -21,7 +21,6 @@ nnictl support commands: ...@@ -21,7 +21,6 @@ nnictl support commands:
* [nnictl tensorboard](#tensorboard) * [nnictl tensorboard](#tensorboard)
* [nnictl package](#package) * [nnictl package](#package)
* [nnictl --version](#version) * [nnictl --version](#version)
* [nnictl hdfs](#hdfs)
### Manage an experiment ### Manage an experiment
...@@ -125,21 +124,21 @@ Debug mode will disable version check function in Trialkeeper. ...@@ -125,21 +124,21 @@ Debug mode will disable version check function in Trialkeeper.
nnictl stop nnictl stop
``` ```
1. If there is an id specified, and the id matches the running experiment, nnictl will stop the corresponding experiment, or will print error message. 2. If there is an id specified, and the id matches the running experiment, nnictl will stop the corresponding experiment, or will print error message.
```bash ```bash
nnictl stop [experiment_id] nnictl stop [experiment_id]
``` ```
1. Users could use 'nnictl stop all' to stop all experiments. 3. Users could use 'nnictl stop all' to stop all experiments.
```bash ```bash
nnictl stop all nnictl stop all
``` ```
1. If the id ends with *, nnictl will stop all experiments whose ids matchs the regular. 4. If the id ends with *, nnictl will stop all experiments whose ids matchs the regular.
1. If the id does not exist but match the prefix of an experiment id, nnictl will stop the matched experiment. 5. If the id does not exist but match the prefix of an experiment id, nnictl will stop the matched experiment.
1. If the id does not exist but match multiple prefix of the experiment ids, nnictl will give id information. 6. If the id does not exist but match multiple prefix of the experiment ids, nnictl will give id information.
<a name="update"></a> <a name="update"></a>
...@@ -651,37 +650,4 @@ Debug mode will disable version check function in Trialkeeper. ...@@ -651,37 +650,4 @@ Debug mode will disable version check function in Trialkeeper.
```bash ```bash
nnictl --version nnictl --version
``` ```
<a name="hdfs"></a> \ No newline at end of file
![](https://placehold.it/15/1589F0/000000?text=+) `Manage hdfs`
* __nnictl hdfs set__
* Description
set the host and userName of hdfs
* Usage
```bash
nnictl hdfs set [OPTIONS]
```
* Options
|Name, shorthand|Required|Default|Description|
|------|------|------ |------|
|--host| True| |The host ip of hdfs, the format is xx.xx.xx.xx, for example, 10.10.10.10|
|--user_name| True| |The userName of hdfs|
* __nnictl hdfs clean__
* Description
Clean up the code files that nni automatically copied to hdfs. This command deletes all such files under the user_name.
* Usage
```bash
nnictl hdfs clean
```
...@@ -23,7 +23,6 @@ import os ...@@ -23,7 +23,6 @@ import os
import json import json
import shutil import shutil
from .constants import NNICTL_HOME_DIR from .constants import NNICTL_HOME_DIR
from .common_utils import print_error
class Config: class Config:
'''a util class to load and save config''' '''a util class to load and save config'''
...@@ -121,25 +120,3 @@ class Experiments: ...@@ -121,25 +120,3 @@ class Experiments:
except ValueError: except ValueError:
return {} return {}
return {} return {}
class HDFSConfig:
'''manage hdfs configuration'''
def __init__(self):
os.makedirs(NNICTL_HOME_DIR, exist_ok=True)
self.hdfs_config_file = os.path.join(NNICTL_HOME_DIR, '.hdfs')
def get_config(self):
if os.path.exists(self.hdfs_config_file):
try:
with open(self.hdfs_config_file, 'r') as file:
return json.load(file)
except Exception as exception:
print_error(exception)
return None
else:
return None
def set_config(self, host, user_name):
with open(self.hdfs_config_file, 'w') as file:
json.dump({'host':host, 'userName': user_name}, file)
...@@ -194,15 +194,6 @@ def parse_args(): ...@@ -194,15 +194,6 @@ def parse_args():
'the unit is second') 'the unit is second')
parser_top.set_defaults(func=monitor_experiment) parser_top.set_defaults(func=monitor_experiment)
parser_hdfs = subparsers.add_parser('hdfs', help='monitor hdfs files')
parser_hdfs_subparsers = parser_hdfs.add_subparsers()
parser_hdfs_set = parser_hdfs_subparsers.add_parser('set', help='set the host and userName of hdfs')
parser_hdfs_set.add_argument('--host', required=True, dest='host', help='the host of hdfs')
parser_hdfs_set.add_argument('--user_name', required=True, dest='user_name', help='the userName of hdfs')
parser_hdfs_set.set_defaults(func=hdfs_set)
parser_hdfs_list = parser_hdfs_subparsers.add_parser('clean', help='clean hdfs files')
parser_hdfs_list.set_defaults(func=hdfs_clean)
args = parser.parse_args() args = parser.parse_args()
args.func(args) args.func(args)
......
...@@ -27,8 +27,7 @@ import time ...@@ -27,8 +27,7 @@ import time
from subprocess import call, check_output from subprocess import call, check_output
from .rest_utils import rest_get, rest_delete, check_rest_server_quick, check_response from .rest_utils import rest_get, rest_delete, check_rest_server_quick, check_response
from .url_utils import trial_jobs_url, experiment_url, trial_job_id_url, export_data_url from .url_utils import trial_jobs_url, experiment_url, trial_job_id_url, export_data_url
from pyhdfs import HdfsClient, HdfsFileNotFoundException from .config_utils import Config, Experiments
from .config_utils import Config, Experiments, HDFSConfig
from .constants import NNICTL_HOME_DIR, EXPERIMENT_INFORMATION_FORMAT, EXPERIMENT_DETAIL_FORMAT, \ from .constants import NNICTL_HOME_DIR, EXPERIMENT_INFORMATION_FORMAT, EXPERIMENT_DETAIL_FORMAT, \
EXPERIMENT_MONITOR_INFO, TRIAL_MONITOR_HEAD, TRIAL_MONITOR_CONTENT, TRIAL_MONITOR_TAIL, REST_TIME_OUT EXPERIMENT_MONITOR_INFO, TRIAL_MONITOR_HEAD, TRIAL_MONITOR_CONTENT, TRIAL_MONITOR_TAIL, REST_TIME_OUT
from .common_utils import print_normal, print_error, print_warning, detect_process from .common_utils import print_normal, print_error, print_warning, detect_process
...@@ -486,36 +485,4 @@ def export_trials_data(args): ...@@ -486,36 +485,4 @@ def export_trials_data(args):
else: else:
print_error('Export failed...') print_error('Export failed...')
else: else:
print_error('Restful server is not Running') print_error('Restful server is not Running')
\ No newline at end of file
def hdfs_set(args):
hdfsConfig = HDFSConfig()
hdfsConfig.set_config(args.host, args.user_name)
print_normal('HDFS account update success!')
def hdfs_clean(args):
hdfsConfig = HDFSConfig()
if not hdfsConfig.get_config():
print_error('Please use \'nnictl hdfs set\' command to set hdfs account first!')
exit(1)
host = hdfsConfig.get_config().get('host')
user_name = hdfsConfig.get_config().get('userName')
hdfs_client = HdfsClient(hosts='{0}:80'.format(host), user_name=user_name, webhdfs_path='/webhdfs/api/v1', timeout=5)
root_path = os.path.join('/', user_name, 'nni', 'experiments')
while True:
inputs = input('INFO: clean up all files in {0}, do you want to continue?[Y/N]:'.format(root_path))
if inputs.lower() not in ['y', 'n', 'yes', 'no']:
print_warning('please input Y or N!')
elif inputs.lower() in ['n', 'no']:
exit(0)
else:
break
path_list = hdfs_client.listdir(root_path)
for path in path_list:
full_path = os.path.join(root_path, path)
print_normal('deleting {0}'.format(full_path))
if hdfs_client.delete(full_path, recursive=True):
print_normal('delete success!')
else:
print_normal('delete failed!')
print_normal('DONE')
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment