Unverified Commit d48ad027 authored by SparkSnail's avatar SparkSnail Committed by GitHub
Browse files

Merge pull request #184 from microsoft/master

merge master
parents 9352cc88 22993e5d
...@@ -36,8 +36,8 @@ def process_install(package_name): ...@@ -36,8 +36,8 @@ def process_install(package_name):
def package_install(args): def package_install(args):
'''install packages''' '''install packages'''
process_install(args.name) process_install(args.name)
def package_show(args): def package_show(args):
'''show all packages''' '''show all packages'''
print(' '.join(PACKAGE_REQUIREMENTS.keys())) print(' '.join(PACKAGE_REQUIREMENTS.keys()))
...@@ -112,7 +112,7 @@ def update_concurrency(args): ...@@ -112,7 +112,7 @@ def update_concurrency(args):
print_error('Update %s failed!' % 'concurrency') print_error('Update %s failed!' % 'concurrency')
def update_duration(args): def update_duration(args):
#parse time, change time unit to seconds #parse time, change time unit to seconds
args.value = parse_time(args.value) args.value = parse_time(args.value)
args.port = get_experiment_port(args) args.port = get_experiment_port(args)
if args.port is not None: if args.port is not None:
......
...@@ -40,16 +40,16 @@ def copyHdfsDirectoryToLocal(hdfsDirectory, localDirectory, hdfsClient): ...@@ -40,16 +40,16 @@ def copyHdfsDirectoryToLocal(hdfsDirectory, localDirectory, hdfsClient):
copyHdfsDirectoryToLocal(subHdfsDirectory, subLocalDirectory, hdfsClient) copyHdfsDirectoryToLocal(subHdfsDirectory, subLocalDirectory, hdfsClient)
elif f.type == 'FILE': elif f.type == 'FILE':
hdfsFilePath = posixpath.join(hdfsDirectory, f.pathSuffix) hdfsFilePath = posixpath.join(hdfsDirectory, f.pathSuffix)
localFilePath = os.path.join(localDirectory, f.pathSuffix) localFilePath = os.path.join(localDirectory, f.pathSuffix)
copyHdfsFileToLocal(hdfsFilePath, localFilePath, hdfsClient) copyHdfsFileToLocal(hdfsFilePath, localFilePath, hdfsClient)
else: else:
raise AssertionError('unexpected type {}'.format(f.type)) raise AssertionError('unexpected type {}'.format(f.type))
def copyHdfsFileToLocal(hdfsFilePath, localFilePath, hdfsClient, override=True): def copyHdfsFileToLocal(hdfsFilePath, localFilePath, hdfsClient, override=True):
'''Copy file from HDFS to local''' '''Copy file from HDFS to local'''
if not hdfsClient.exists(hdfsFilePath): if not hdfsClient.exists(hdfsFilePath):
raise Exception('HDFS file {} does not exist!'.format(hdfsFilePath)) raise Exception('HDFS file {} does not exist!'.format(hdfsFilePath))
try: try:
file_status = hdfsClient.get_file_status(hdfsFilePath) file_status = hdfsClient.get_file_status(hdfsFilePath)
if file_status.type != 'FILE': if file_status.type != 'FILE':
raise Exception('HDFS file path {} is not a file'.format(hdfsFilePath)) raise Exception('HDFS file path {} is not a file'.format(hdfsFilePath))
......
...@@ -142,7 +142,7 @@ class PipeLogReader(threading.Thread): ...@@ -142,7 +142,7 @@ class PipeLogReader(threading.Thread):
''' '''
time.sleep(5) time.sleep(5)
while True: while True:
cur_process_exit = self.process_exit cur_process_exit = self.process_exit
try: try:
line = self.queue.get(True, 5) line = self.queue.get(True, 5)
try: try:
...@@ -150,7 +150,7 @@ class PipeLogReader(threading.Thread): ...@@ -150,7 +150,7 @@ class PipeLogReader(threading.Thread):
except Exception as e: except Exception as e:
pass pass
except Exception as e: except Exception as e:
if cur_process_exit == True: if cur_process_exit == True:
self._is_read_completed = True self._is_read_completed = True
break break
...@@ -177,7 +177,7 @@ class PipeLogReader(threading.Thread): ...@@ -177,7 +177,7 @@ class PipeLogReader(threading.Thread):
if not self.log_pattern.match(line): if not self.log_pattern.match(line):
continue continue
self.queue.put(line) self.queue.put(line)
self.pipeReader.close() self.pipeReader.close()
def close(self): def close(self):
...@@ -190,7 +190,7 @@ class PipeLogReader(threading.Thread): ...@@ -190,7 +190,7 @@ class PipeLogReader(threading.Thread):
"""Return if read is completed """Return if read is completed
""" """
return self._is_read_completed return self._is_read_completed
def set_process_exit(self): def set_process_exit(self):
self.process_exit = True self.process_exit = True
return self.process_exit return self.process_exit
\ No newline at end of file
...@@ -39,9 +39,9 @@ class HDFSClientUtilityTest(unittest.TestCase): ...@@ -39,9 +39,9 @@ class HDFSClientUtilityTest(unittest.TestCase):
self.hdfs_config = json.load(file) self.hdfs_config = json.load(file)
except Exception as exception: except Exception as exception:
print(exception) print(exception)
self.hdfs_client = HdfsClient(hosts='{0}:{1}'.format(self.hdfs_config['host'], '50070'), user_name=self.hdfs_config['userName']) self.hdfs_client = HdfsClient(hosts='{0}:{1}'.format(self.hdfs_config['host'], '50070'), user_name=self.hdfs_config['userName'])
def get_random_name(self, length): def get_random_name(self, length):
return ''.join(random.sample(string.ascii_letters + string.digits, length)) return ''.join(random.sample(string.ascii_letters + string.digits, length))
...@@ -49,20 +49,20 @@ class HDFSClientUtilityTest(unittest.TestCase): ...@@ -49,20 +49,20 @@ class HDFSClientUtilityTest(unittest.TestCase):
'''test copyFileToHdfs''' '''test copyFileToHdfs'''
file_name = self.get_random_name(8) file_name = self.get_random_name(8)
file_content = 'hello world!' file_content = 'hello world!'
with open('./{}'.format(file_name), 'w') as file: with open('./{}'.format(file_name), 'w') as file:
file.write(file_content) file.write(file_content)
result = copyFileToHdfs('./{}'.format(file_name), '/{0}/{1}'.format(self.hdfs_config['userName'], file_name), self.hdfs_client) result = copyFileToHdfs('./{}'.format(file_name), '/{0}/{1}'.format(self.hdfs_config['userName'], file_name), self.hdfs_client)
self.assertTrue(result) self.assertTrue(result)
file_list = self.hdfs_client.listdir('/{0}'.format(self.hdfs_config['userName'])) file_list = self.hdfs_client.listdir('/{0}'.format(self.hdfs_config['userName']))
self.assertIn(file_name, file_list) self.assertIn(file_name, file_list)
hdfs_file_name = self.get_random_name(8) hdfs_file_name = self.get_random_name(8)
self.hdfs_client.copy_to_local('/{0}/{1}'.format(self.hdfs_config['userName'], file_name), './{}'.format(hdfs_file_name)) self.hdfs_client.copy_to_local('/{0}/{1}'.format(self.hdfs_config['userName'], file_name), './{}'.format(hdfs_file_name))
self.assertTrue(os.path.exists('./{}'.format(hdfs_file_name))) self.assertTrue(os.path.exists('./{}'.format(hdfs_file_name)))
with open('./{}'.format(hdfs_file_name), 'r') as file: with open('./{}'.format(hdfs_file_name), 'r') as file:
content = file.readline() content = file.readline()
self.assertEqual(file_content, content) self.assertEqual(file_content, content)
...@@ -70,21 +70,21 @@ class HDFSClientUtilityTest(unittest.TestCase): ...@@ -70,21 +70,21 @@ class HDFSClientUtilityTest(unittest.TestCase):
os.remove('./{}'.format(file_name)) os.remove('./{}'.format(file_name))
os.remove('./{}'.format(hdfs_file_name)) os.remove('./{}'.format(hdfs_file_name))
self.hdfs_client.delete('/{0}/{1}'.format(self.hdfs_config['userName'], file_name)) self.hdfs_client.delete('/{0}/{1}'.format(self.hdfs_config['userName'], file_name))
def test_copy_directory_run(self): def test_copy_directory_run(self):
'''test copyDirectoryToHdfs''' '''test copyDirectoryToHdfs'''
directory_name = self.get_random_name(8) directory_name = self.get_random_name(8)
file_name_list = [self.get_random_name(8), self.get_random_name(8)] file_name_list = [self.get_random_name(8), self.get_random_name(8)]
file_content = 'hello world!' file_content = 'hello world!'
os.makedirs('./{}'.format(directory_name)) os.makedirs('./{}'.format(directory_name))
for file_name in file_name_list: for file_name in file_name_list:
with open('./{0}/{1}'.format(directory_name, file_name), 'w') as file: with open('./{0}/{1}'.format(directory_name, file_name), 'w') as file:
file.write(file_content) file.write(file_content)
result = copyDirectoryToHdfs('./{}'.format(directory_name), '/{0}/{1}'.format(self.hdfs_config['userName'], directory_name), self.hdfs_client) result = copyDirectoryToHdfs('./{}'.format(directory_name), '/{0}/{1}'.format(self.hdfs_config['userName'], directory_name), self.hdfs_client)
self.assertTrue(result) self.assertTrue(result)
directory_list = self.hdfs_client.listdir('/{0}'.format(self.hdfs_config['userName'])) directory_list = self.hdfs_client.listdir('/{0}'.format(self.hdfs_config['userName']))
self.assertIn(directory_name, directory_list) self.assertIn(directory_name, directory_list)
...@@ -94,7 +94,7 @@ class HDFSClientUtilityTest(unittest.TestCase): ...@@ -94,7 +94,7 @@ class HDFSClientUtilityTest(unittest.TestCase):
#clean up #clean up
self.hdfs_client.delete('/{0}/{1}/{2}'.format(self.hdfs_config['userName'], directory_name, file_name)) self.hdfs_client.delete('/{0}/{1}/{2}'.format(self.hdfs_config['userName'], directory_name, file_name))
self.hdfs_client.delete('/{0}/{1}'.format(self.hdfs_config['userName'], directory_name)) self.hdfs_client.delete('/{0}/{1}'.format(self.hdfs_config['userName'], directory_name))
shutil.rmtree('./{}'.format(directory_name)) shutil.rmtree('./{}'.format(directory_name))
if __name__ == '__main__': if __name__ == '__main__':
......
...@@ -42,10 +42,10 @@ regular = re.compile('v?(?P<version>[0-9](\.[0-9]){0,1}).*') ...@@ -42,10 +42,10 @@ regular = re.compile('v?(?P<version>[0-9](\.[0-9]){0,1}).*')
def main_loop(args): def main_loop(args):
'''main loop logic for trial keeper''' '''main loop logic for trial keeper'''
if not os.path.exists(LOG_DIR): if not os.path.exists(LOG_DIR):
os.makedirs(LOG_DIR) os.makedirs(LOG_DIR)
stdout_file = open(STDOUT_FULL_PATH, 'a+') stdout_file = open(STDOUT_FULL_PATH, 'a+')
stderr_file = open(STDERR_FULL_PATH, 'a+') stderr_file = open(STDERR_FULL_PATH, 'a+')
trial_keeper_syslogger = RemoteLogger(args.nnimanager_ip, args.nnimanager_port, 'trial_keeper', StdOutputType.Stdout, args.log_collection) trial_keeper_syslogger = RemoteLogger(args.nnimanager_ip, args.nnimanager_port, 'trial_keeper', StdOutputType.Stdout, args.log_collection)
......
...@@ -15,7 +15,7 @@ else{ ...@@ -15,7 +15,7 @@ else{
$PIP_UNINSTALL = """$NNI_PYTHON3\python"" -m pip uninstall -y " $PIP_UNINSTALL = """$NNI_PYTHON3\python"" -m pip uninstall -y "
$NNI_NODE_FOLDER = $NNI_DEPENDENCY_FOLDER+"\nni-node" $NNI_NODE_FOLDER = $NNI_DEPENDENCY_FOLDER+"\nni-node"
$NNI_YARN_FOLDER = $NNI_DEPENDENCY_FOLDER+"\nni-yarn" $NNI_YARN_FOLDER = $NNI_DEPENDENCY_FOLDER+"\nni-yarn"
# uninstall # uninstall
Remove-Item $NNI_PKG_FOLDER -Recurse -Force Remove-Item $NNI_PKG_FOLDER -Recurse -Force
cmd /C $PIP_UNINSTALL "nni" cmd /C $PIP_UNINSTALL "nni"
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment