Unverified Commit 7514b9f8 authored by Mingyi's avatar Mingyi Committed by GitHub
Browse files

[CI] Fix CI (#1217)

parent 158e8f1e
......@@ -2,12 +2,10 @@
import argparse
import asyncio
import multiprocessing
import os
import subprocess
import threading
import time
import unittest
from functools import partial
from typing import Callable, List, Optional
......@@ -19,6 +17,7 @@ import torch.nn.functional as F
from sglang.global_config import global_config
from sglang.lang.backend.openai import OpenAI
from sglang.lang.backend.runtime_endpoint import RuntimeEndpoint
from sglang.srt.utils import kill_child_process
from sglang.utils import get_exception_traceback
DEFAULT_MODEL_NAME_FOR_TEST = "meta-llama/Meta-Llama-3.1-8B-Instruct"
......@@ -457,35 +456,35 @@ def run_with_timeout(
return ret_value[0]
def run_one_file(filename, out_queue):
print(f"\n\nRun {filename}\n\n")
ret = unittest.main(module=None, argv=["", "-vb"] + [filename])
def run_unittest_files(files: List[str], timeout_per_file: float):
tic = time.time()
success = True
for filename in files:
out_queue = multiprocessing.Queue()
p = multiprocessing.Process(target=run_one_file, args=(filename, out_queue))
global process
def run_process():
p.start()
p.join()
def run_one_file(filename):
filename = os.path.join(os.getcwd(), filename)
print(f"\n\nRun {filename}\n\n")
process = subprocess.Popen(
["python3", filename], stdout=None, stderr=None, env=os.environ
)
process.wait()
return process.returncode
try:
run_with_timeout(run_process, timeout=timeout_per_file)
if p.exitcode != 0:
success = False
break
ret_code = run_with_timeout(
run_one_file, args=(filename,), timeout=timeout_per_file
)
assert ret_code == 0
except TimeoutError:
p.terminate()
kill_child_process(process.pid)
time.sleep(5)
print(
f"\nTimeout after {timeout_per_file} seconds when running {filename}\n"
)
return False
success = False
break
if success:
print(f"Success. Time elapsed: {time.time() - tic:.2f}s")
......
......@@ -77,4 +77,4 @@ if __name__ == "__main__":
except RuntimeError:
pass
unittest.main(warnings="ignore")
unittest.main()
......@@ -136,4 +136,4 @@ if __name__ == "__main__":
except RuntimeError:
pass
unittest.main(warnings="ignore")
unittest.main()
......@@ -7,7 +7,7 @@ from sglang.test.test_utils import run_unittest_files
suites = {
"minimal": [
"models/test_embedding_models.py",
# "models/test_generation_models.py",
"models/test_generation_models.py",
"sampling/penaltylib",
"test_chunked_prefill.py",
"test_embedding_openai_server.py",
......@@ -33,6 +33,7 @@ for target_suite_name, target_tests in suites.items():
tests.remove(target_suite_name)
tests.extend(target_tests)
if __name__ == "__main__":
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument(
......@@ -55,10 +56,5 @@ if __name__ == "__main__":
else:
files = suites[args.suite]
try:
mp.set_start_method("spawn")
except RuntimeError:
pass
exit_code = run_unittest_files(files, args.timeout_per_file)
exit(exit_code)
......@@ -112,4 +112,4 @@ class TestBatchPenalizerE2E(unittest.TestCase):
if __name__ == "__main__":
unittest.main(warnings="ignore")
unittest.main()
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment