Unverified Commit d48ad027 authored by SparkSnail's avatar SparkSnail Committed by GitHub
Browse files

Merge pull request #184 from microsoft/master

merge master
parents 9352cc88 22993e5d
......@@ -36,8 +36,8 @@ def process_install(package_name):
def package_install(args):
'''install packages'''
process_install(args.name)
def package_show(args):
'''show all packages'''
print(' '.join(PACKAGE_REQUIREMENTS.keys()))
......@@ -112,7 +112,7 @@ def update_concurrency(args):
print_error('Update %s failed!' % 'concurrency')
def update_duration(args):
#parse time, change time unit to seconds
#parse time, change time unit to seconds
args.value = parse_time(args.value)
args.port = get_experiment_port(args)
if args.port is not None:
......
......@@ -40,16 +40,16 @@ def copyHdfsDirectoryToLocal(hdfsDirectory, localDirectory, hdfsClient):
copyHdfsDirectoryToLocal(subHdfsDirectory, subLocalDirectory, hdfsClient)
elif f.type == 'FILE':
hdfsFilePath = posixpath.join(hdfsDirectory, f.pathSuffix)
localFilePath = os.path.join(localDirectory, f.pathSuffix)
localFilePath = os.path.join(localDirectory, f.pathSuffix)
copyHdfsFileToLocal(hdfsFilePath, localFilePath, hdfsClient)
else:
else:
raise AssertionError('unexpected type {}'.format(f.type))
def copyHdfsFileToLocal(hdfsFilePath, localFilePath, hdfsClient, override=True):
'''Copy file from HDFS to local'''
if not hdfsClient.exists(hdfsFilePath):
raise Exception('HDFS file {} does not exist!'.format(hdfsFilePath))
try:
try:
file_status = hdfsClient.get_file_status(hdfsFilePath)
if file_status.type != 'FILE':
raise Exception('HDFS file path {} is not a file'.format(hdfsFilePath))
......
......@@ -142,7 +142,7 @@ class PipeLogReader(threading.Thread):
'''
time.sleep(5)
while True:
cur_process_exit = self.process_exit
cur_process_exit = self.process_exit
try:
line = self.queue.get(True, 5)
try:
......@@ -150,7 +150,7 @@ class PipeLogReader(threading.Thread):
except Exception as e:
pass
except Exception as e:
if cur_process_exit == True:
if cur_process_exit == True:
self._is_read_completed = True
break
......@@ -177,7 +177,7 @@ class PipeLogReader(threading.Thread):
if not self.log_pattern.match(line):
continue
self.queue.put(line)
self.pipeReader.close()
def close(self):
......@@ -190,7 +190,7 @@ class PipeLogReader(threading.Thread):
"""Return if read is completed
"""
return self._is_read_completed
def set_process_exit(self):
self.process_exit = True
return self.process_exit
\ No newline at end of file
......@@ -39,9 +39,9 @@ class HDFSClientUtilityTest(unittest.TestCase):
self.hdfs_config = json.load(file)
except Exception as exception:
print(exception)
self.hdfs_client = HdfsClient(hosts='{0}:{1}'.format(self.hdfs_config['host'], '50070'), user_name=self.hdfs_config['userName'])
def get_random_name(self, length):
return ''.join(random.sample(string.ascii_letters + string.digits, length))
......@@ -49,20 +49,20 @@ class HDFSClientUtilityTest(unittest.TestCase):
'''test copyFileToHdfs'''
file_name = self.get_random_name(8)
file_content = 'hello world!'
with open('./{}'.format(file_name), 'w') as file:
file.write(file_content)
file.write(file_content)
result = copyFileToHdfs('./{}'.format(file_name), '/{0}/{1}'.format(self.hdfs_config['userName'], file_name), self.hdfs_client)
self.assertTrue(result)
file_list = self.hdfs_client.listdir('/{0}'.format(self.hdfs_config['userName']))
self.assertIn(file_name, file_list)
hdfs_file_name = self.get_random_name(8)
self.hdfs_client.copy_to_local('/{0}/{1}'.format(self.hdfs_config['userName'], file_name), './{}'.format(hdfs_file_name))
self.assertTrue(os.path.exists('./{}'.format(hdfs_file_name)))
with open('./{}'.format(hdfs_file_name), 'r') as file:
content = file.readline()
self.assertEqual(file_content, content)
......@@ -70,21 +70,21 @@ class HDFSClientUtilityTest(unittest.TestCase):
os.remove('./{}'.format(file_name))
os.remove('./{}'.format(hdfs_file_name))
self.hdfs_client.delete('/{0}/{1}'.format(self.hdfs_config['userName'], file_name))
def test_copy_directory_run(self):
'''test copyDirectoryToHdfs'''
directory_name = self.get_random_name(8)
file_name_list = [self.get_random_name(8), self.get_random_name(8)]
file_content = 'hello world!'
os.makedirs('./{}'.format(directory_name))
for file_name in file_name_list:
with open('./{0}/{1}'.format(directory_name, file_name), 'w') as file:
file.write(file_content)
result = copyDirectoryToHdfs('./{}'.format(directory_name), '/{0}/{1}'.format(self.hdfs_config['userName'], directory_name), self.hdfs_client)
self.assertTrue(result)
directory_list = self.hdfs_client.listdir('/{0}'.format(self.hdfs_config['userName']))
self.assertIn(directory_name, directory_list)
......@@ -94,7 +94,7 @@ class HDFSClientUtilityTest(unittest.TestCase):
#clean up
self.hdfs_client.delete('/{0}/{1}/{2}'.format(self.hdfs_config['userName'], directory_name, file_name))
self.hdfs_client.delete('/{0}/{1}'.format(self.hdfs_config['userName'], directory_name))
shutil.rmtree('./{}'.format(directory_name))
if __name__ == '__main__':
......
......@@ -42,10 +42,10 @@ regular = re.compile('v?(?P<version>[0-9](\.[0-9]){0,1}).*')
def main_loop(args):
'''main loop logic for trial keeper'''
if not os.path.exists(LOG_DIR):
os.makedirs(LOG_DIR)
stdout_file = open(STDOUT_FULL_PATH, 'a+')
stderr_file = open(STDERR_FULL_PATH, 'a+')
trial_keeper_syslogger = RemoteLogger(args.nnimanager_ip, args.nnimanager_port, 'trial_keeper', StdOutputType.Stdout, args.log_collection)
......
......@@ -15,7 +15,7 @@ else{
$PIP_UNINSTALL = """$NNI_PYTHON3\python"" -m pip uninstall -y "
$NNI_NODE_FOLDER = $NNI_DEPENDENCY_FOLDER+"\nni-node"
$NNI_YARN_FOLDER = $NNI_DEPENDENCY_FOLDER+"\nni-yarn"
# uninstall
Remove-Item $NNI_PKG_FOLDER -Recurse -Force
cmd /C $PIP_UNINSTALL "nni"
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment