test_json_constrained.py 3.5 KB
Newer Older
1
2
3
4
"""
python3 -m unittest test_json_constrained.TestJSONConstrained.test_json_generate
"""

5
6
import json
import unittest
7
from concurrent.futures import ThreadPoolExecutor
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34

import openai
import requests

from sglang.srt.utils import kill_child_process
from sglang.test.test_utils import (
    DEFAULT_MODEL_NAME_FOR_TEST,
    DEFAULT_URL_FOR_TEST,
    popen_launch_server,
)


class TestJSONConstrained(unittest.TestCase):
    @classmethod
    def setUpClass(cls):
        cls.model = DEFAULT_MODEL_NAME_FOR_TEST
        cls.base_url = DEFAULT_URL_FOR_TEST
        cls.json_schema = json.dumps(
            {
                "type": "object",
                "properties": {
                    "name": {"type": "string", "pattern": "^[\\w]+$"},
                    "population": {"type": "integer"},
                },
                "required": ["name", "population"],
            }
        )
35
36
37
38
39
40
        cls.process = popen_launch_server(
            cls.model,
            cls.base_url,
            timeout=300,
            other_args=["--max-running-requests", "10"],
        )
41
42
43
44
45

    @classmethod
    def tearDownClass(cls):
        kill_child_process(cls.process.pid)

46
    def run_decode(self, json_schema, return_logprob=False, top_logprobs_num=0, n=1):
47
48
49
50
51
52
53
54
55
        response = requests.post(
            self.base_url + "/generate",
            json={
                "text": "The capital of France is",
                "sampling_params": {
                    "temperature": 0 if n == 1 else 0.5,
                    "max_new_tokens": 128,
                    "n": n,
                    "stop_token_ids": [119690],
56
                    "json_schema": json_schema,
57
58
59
60
61
62
63
64
65
                },
                "stream": False,
                "return_logprob": return_logprob,
                "top_logprobs_num": top_logprobs_num,
                "logprob_start_len": 0,
            },
        )
        print(json.dumps(response.json()))
        print("=" * 100)
66
67
68
69

        if not json_schema:
            return

70
71
72
73
74
75
76
77
        try:
            js_obj = json.loads(response.json()["text"])
        except (TypeError, json.decoder.JSONDecodeError):
            raise
        assert isinstance(js_obj["name"], str)
        assert isinstance(js_obj["population"], int)

    def test_json_generate(self):
78
        self.run_decode(json_schema=self.json_schema)
79
80

    def test_json_openai(self):
81
        client = openai.Client(api_key="EMPTY", base_url=f"{self.base_url}/v1")
82
83
84
85
86
87
88
89
90

        response = client.chat.completions.create(
            model=self.model,
            messages=[
                {"role": "system", "content": "You are a helpful AI assistant"},
                {"role": "user", "content": "Introduce the capital of France."},
            ],
            temperature=0,
            max_tokens=128,
91
92
93
94
            response_format={
                "type": "json_schema",
                "json_schema": {"name": "foo", "schema": json.loads(self.json_schema)},
            },
95
96
97
98
99
100
101
102
103
104
105
        )
        text = response.choices[0].message.content

        try:
            js_obj = json.loads(text)
        except (TypeError, json.decoder.JSONDecodeError):
            print("JSONDecodeError", text)
            raise
        assert isinstance(js_obj["name"], str)
        assert isinstance(js_obj["population"], int)

106
107
108
109
110
111
    def test_mix_json_and_other(self):
        json_schemas = [None, None, self.json_schema, self.json_schema] * 10

        with ThreadPoolExecutor(len(json_schemas)) as executor:
            list(executor.map(self.run_decode, json_schemas))

112
113
114

if __name__ == "__main__":
    unittest.main()