"git@developer.sourcefind.cn:Wenxuan/LightX2V.git" did not exist on "fb686a901397c3dc8069e94457ff408b3e042c8a"
Unverified Commit 7514b9f8 authored by Mingyi's avatar Mingyi Committed by GitHub
Browse files

[CI] Fix CI (#1217)

parent 158e8f1e
...@@ -2,12 +2,10 @@ ...@@ -2,12 +2,10 @@
import argparse import argparse
import asyncio import asyncio
import multiprocessing
import os import os
import subprocess import subprocess
import threading import threading
import time import time
import unittest
from functools import partial from functools import partial
from typing import Callable, List, Optional from typing import Callable, List, Optional
...@@ -19,6 +17,7 @@ import torch.nn.functional as F ...@@ -19,6 +17,7 @@ import torch.nn.functional as F
from sglang.global_config import global_config from sglang.global_config import global_config
from sglang.lang.backend.openai import OpenAI from sglang.lang.backend.openai import OpenAI
from sglang.lang.backend.runtime_endpoint import RuntimeEndpoint from sglang.lang.backend.runtime_endpoint import RuntimeEndpoint
from sglang.srt.utils import kill_child_process
from sglang.utils import get_exception_traceback from sglang.utils import get_exception_traceback
DEFAULT_MODEL_NAME_FOR_TEST = "meta-llama/Meta-Llama-3.1-8B-Instruct" DEFAULT_MODEL_NAME_FOR_TEST = "meta-llama/Meta-Llama-3.1-8B-Instruct"
...@@ -457,35 +456,35 @@ def run_with_timeout( ...@@ -457,35 +456,35 @@ def run_with_timeout(
return ret_value[0] return ret_value[0]
def run_one_file(filename, out_queue):
print(f"\n\nRun {filename}\n\n")
ret = unittest.main(module=None, argv=["", "-vb"] + [filename])
def run_unittest_files(files: List[str], timeout_per_file: float): def run_unittest_files(files: List[str], timeout_per_file: float):
tic = time.time() tic = time.time()
success = True success = True
for filename in files: for filename in files:
out_queue = multiprocessing.Queue() global process
p = multiprocessing.Process(target=run_one_file, args=(filename, out_queue))
def run_process(): def run_one_file(filename):
p.start() filename = os.path.join(os.getcwd(), filename)
p.join() print(f"\n\nRun {filename}\n\n")
process = subprocess.Popen(
["python3", filename], stdout=None, stderr=None, env=os.environ
)
process.wait()
return process.returncode
try: try:
run_with_timeout(run_process, timeout=timeout_per_file) ret_code = run_with_timeout(
if p.exitcode != 0: run_one_file, args=(filename,), timeout=timeout_per_file
success = False )
break assert ret_code == 0
except TimeoutError: except TimeoutError:
p.terminate() kill_child_process(process.pid)
time.sleep(5) time.sleep(5)
print( print(
f"\nTimeout after {timeout_per_file} seconds when running {filename}\n" f"\nTimeout after {timeout_per_file} seconds when running {filename}\n"
) )
return False success = False
break
if success: if success:
print(f"Success. Time elapsed: {time.time() - tic:.2f}s") print(f"Success. Time elapsed: {time.time() - tic:.2f}s")
......
...@@ -77,4 +77,4 @@ if __name__ == "__main__": ...@@ -77,4 +77,4 @@ if __name__ == "__main__":
except RuntimeError: except RuntimeError:
pass pass
unittest.main(warnings="ignore") unittest.main()
...@@ -136,4 +136,4 @@ if __name__ == "__main__": ...@@ -136,4 +136,4 @@ if __name__ == "__main__":
except RuntimeError: except RuntimeError:
pass pass
unittest.main(warnings="ignore") unittest.main()
...@@ -7,7 +7,7 @@ from sglang.test.test_utils import run_unittest_files ...@@ -7,7 +7,7 @@ from sglang.test.test_utils import run_unittest_files
suites = { suites = {
"minimal": [ "minimal": [
"models/test_embedding_models.py", "models/test_embedding_models.py",
# "models/test_generation_models.py", "models/test_generation_models.py",
"sampling/penaltylib", "sampling/penaltylib",
"test_chunked_prefill.py", "test_chunked_prefill.py",
"test_embedding_openai_server.py", "test_embedding_openai_server.py",
...@@ -33,6 +33,7 @@ for target_suite_name, target_tests in suites.items(): ...@@ -33,6 +33,7 @@ for target_suite_name, target_tests in suites.items():
tests.remove(target_suite_name) tests.remove(target_suite_name)
tests.extend(target_tests) tests.extend(target_tests)
if __name__ == "__main__": if __name__ == "__main__":
arg_parser = argparse.ArgumentParser() arg_parser = argparse.ArgumentParser()
arg_parser.add_argument( arg_parser.add_argument(
...@@ -55,10 +56,5 @@ if __name__ == "__main__": ...@@ -55,10 +56,5 @@ if __name__ == "__main__":
else: else:
files = suites[args.suite] files = suites[args.suite]
try:
mp.set_start_method("spawn")
except RuntimeError:
pass
exit_code = run_unittest_files(files, args.timeout_per_file) exit_code = run_unittest_files(files, args.timeout_per_file)
exit(exit_code) exit(exit_code)
...@@ -112,4 +112,4 @@ class TestBatchPenalizerE2E(unittest.TestCase): ...@@ -112,4 +112,4 @@ class TestBatchPenalizerE2E(unittest.TestCase):
if __name__ == "__main__": if __name__ == "__main__":
unittest.main(warnings="ignore") unittest.main()
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment