test_json_constrained.py 4.46 KB
Newer Older
1
"""
2
3
python3 -m unittest test_json_constrained.TestJSONConstrainedOutlinesBackend.test_json_generate
python3 -m unittest test_json_constrained.TestJSONConstrainedXGrammarBackend.test_json_generate
4
5
"""

6
7
import json
import unittest
8
from concurrent.futures import ThreadPoolExecutor
9
10
11
12

import openai
import requests

13
from sglang.srt.utils import kill_process_tree
14
from sglang.test.test_utils import (
15
16
    DEFAULT_SMALL_MODEL_NAME_FOR_TEST,
    DEFAULT_TIMEOUT_FOR_SERVER_LAUNCH,
17
18
19
20
21
    DEFAULT_URL_FOR_TEST,
    popen_launch_server,
)


22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
def setup_class(cls, backend: str, disable_overlap: bool):
    cls.model = DEFAULT_SMALL_MODEL_NAME_FOR_TEST
    cls.base_url = DEFAULT_URL_FOR_TEST
    cls.json_schema = json.dumps(
        {
            "type": "object",
            "properties": {
                "name": {"type": "string", "pattern": "^[\\w]+$"},
                "population": {"type": "integer"},
            },
            "required": ["name", "population"],
        }
    )

    other_args = [
        "--max-running-requests",
        "10",
        "--grammar-backend",
        backend,
    ]

    if disable_overlap:
        other_args += ["--disable-overlap-schedule"]

    cls.process = popen_launch_server(
        cls.model,
        cls.base_url,
        timeout=DEFAULT_TIMEOUT_FOR_SERVER_LAUNCH,
        other_args=other_args,
    )


54
class TestJSONConstrainedOutlinesBackend(unittest.TestCase):
55
56
    @classmethod
    def setUpClass(cls):
57
58
        setup_class(cls, backend="outlines", disable_overlap=False)
        cls.check_jump_forward = False
59
60
61

    @classmethod
    def tearDownClass(cls):
62
        kill_process_tree(cls.process.pid)
63

64
    def run_decode(self, json_schema, return_logprob=False, top_logprobs_num=0, n=1):
65
66
67
68
69
70
71
72
73
        response = requests.post(
            self.base_url + "/generate",
            json={
                "text": "The capital of France is",
                "sampling_params": {
                    "temperature": 0 if n == 1 else 0.5,
                    "max_new_tokens": 128,
                    "n": n,
                    "stop_token_ids": [119690],
74
                    "json_schema": json_schema,
75
76
77
78
79
80
81
                },
                "stream": False,
                "return_logprob": return_logprob,
                "top_logprobs_num": top_logprobs_num,
                "logprob_start_len": 0,
            },
        )
Lianmin Zheng's avatar
Lianmin Zheng committed
82
83
        ret = response.json()
        print(json.dumps(ret))
84
        print("=" * 100)
85
86
87
88

        if not json_schema:
            return

Lianmin Zheng's avatar
Lianmin Zheng committed
89
        # Make sure the json output is valid
90
        try:
Lianmin Zheng's avatar
Lianmin Zheng committed
91
            js_obj = json.loads(ret["text"])
92
93
        except (TypeError, json.decoder.JSONDecodeError):
            raise
Lianmin Zheng's avatar
Lianmin Zheng committed
94
95
96
97

        self.assertIsInstance(js_obj["name"], str)
        self.assertIsInstance(js_obj["population"], int)

98
    def test_json_generate(self):
99
        self.run_decode(json_schema=self.json_schema)
100
101

    def test_json_openai(self):
102
        client = openai.Client(api_key="EMPTY", base_url=f"{self.base_url}/v1")
103
104
105
106
107
108
109
110
111

        response = client.chat.completions.create(
            model=self.model,
            messages=[
                {"role": "system", "content": "You are a helpful AI assistant"},
                {"role": "user", "content": "Introduce the capital of France."},
            ],
            temperature=0,
            max_tokens=128,
112
113
114
115
            response_format={
                "type": "json_schema",
                "json_schema": {"name": "foo", "schema": json.loads(self.json_schema)},
            },
116
117
118
119
120
121
122
123
        )
        text = response.choices[0].message.content

        try:
            js_obj = json.loads(text)
        except (TypeError, json.decoder.JSONDecodeError):
            print("JSONDecodeError", text)
            raise
Lianmin Zheng's avatar
Lianmin Zheng committed
124
125
126

        self.assertIsInstance(js_obj["name"], str)
        self.assertIsInstance(js_obj["population"], int)
127

128
129
130
131
132
133
    def test_mix_json_and_other(self):
        json_schemas = [None, None, self.json_schema, self.json_schema] * 10

        with ThreadPoolExecutor(len(json_schemas)) as executor:
            list(executor.map(self.run_decode, json_schemas))

134

135
136
137
138
139
140
141
class TestJumpForwardOutlinesBackend(unittest.TestCase):
    @classmethod
    def setUpClass(cls):
        setup_class(cls, backend="outlines", disable_overlap=True)
        cls.check_jump_forward = True


142
143
144
class TestJSONConstrainedXGrammarBackend(TestJSONConstrainedOutlinesBackend):
    @classmethod
    def setUpClass(cls):
145
146
        setup_class(cls, backend="xgrammar", disable_overlap=False)
        cls.check_jump_forward = False
147
148


149
150
if __name__ == "__main__":
    unittest.main()