send_request.ipynb 6.64 KB
Newer Older
Chayenne's avatar
Chayenne committed
1
2
3
4
5
6
{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
7
    "# Sending Requests\n",
Chayenne's avatar
Chayenne committed
8
9
    "This notebook provides a quick-start guide to use SGLang in chat completions after installation.\n",
    "\n",
Lianmin Zheng's avatar
Lianmin Zheng committed
10
11
12
    "- For Vision Language Models, see [OpenAI APIs - Vision](openai_api_vision.ipynb).\n",
    "- For Embedding Models, see [OpenAI APIs - Embedding](openai_api_embeddings.ipynb) and [Encode (embedding model)](native_api.html#Encode-(embedding-model)).\n",
    "- For Reward Models, see [Classify (reward model)](native_api.html#Classify-(reward-model))."
Chayenne's avatar
Chayenne committed
13
14
15
16
17
18
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
19
    "## Launch A Server"
Chayenne's avatar
Chayenne committed
20
21
22
23
   ]
  },
  {
   "cell_type": "code",
Chayenne's avatar
Chayenne committed
24
   "execution_count": null,
25
   "metadata": {},
Chayenne's avatar
Chayenne committed
26
   "outputs": [],
Chayenne's avatar
Chayenne committed
27
   "source": [
Lianmin Zheng's avatar
Lianmin Zheng committed
28
    "from sglang.test.doc_patch import launch_server_cmd\n",
29
30
    "from sglang.utils import wait_for_server, print_highlight, terminate_process\n",
    "\n",
31
    "# This is equivalent to running the following command in your terminal\n",
32
    "# python3 -m sglang.launch_server --model-path qwen/qwen2.5-0.5b-instruct --host 0.0.0.0\n",
Chayenne's avatar
Chayenne committed
33
    "\n",
34
    "server_process, port = launch_server_cmd(\n",
Chayenne's avatar
Chayenne committed
35
    "    \"\"\"\n",
36
    "python3 -m sglang.launch_server --model-path qwen/qwen2.5-0.5b-instruct \\\n",
37
    " --host 0.0.0.0\n",
Chayenne's avatar
Chayenne committed
38
39
40
    "\"\"\"\n",
    ")\n",
    "\n",
41
    "wait_for_server(f\"http://localhost:{port}\")"
Chayenne's avatar
Chayenne committed
42
43
44
45
46
47
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
Chayenne's avatar
Chayenne committed
48
49
50
51
52
53
    "## Using cURL\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
54
   "metadata": {},
Chayenne's avatar
Chayenne committed
55
56
57
   "outputs": [],
   "source": [
    "import subprocess, json\n",
Chayenne's avatar
Chayenne committed
58
    "\n",
59
60
61
    "curl_command = f\"\"\"\n",
    "curl -s http://localhost:{port}/v1/chat/completions \\\n",
    "  -H \"Content-Type: application/json\" \\\n",
62
    "  -d '{{\"model\": \"qwen/qwen2.5-0.5b-instruct\", \"messages\": [{{\"role\": \"user\", \"content\": \"What is the capital of France?\"}}]}}'\n",
Chayenne's avatar
Chayenne committed
63
64
65
66
67
68
69
70
71
72
    "\"\"\"\n",
    "\n",
    "response = json.loads(subprocess.check_output(curl_command, shell=True))\n",
    "print_highlight(response)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
Chayenne's avatar
Chayenne committed
73
    "## Using Python Requests"
Chayenne's avatar
Chayenne committed
74
75
76
77
   ]
  },
  {
   "cell_type": "code",
Chayenne's avatar
Chayenne committed
78
   "execution_count": null,
79
   "metadata": {},
Chayenne's avatar
Chayenne committed
80
   "outputs": [],
Chayenne's avatar
Chayenne committed
81
   "source": [
Chayenne's avatar
Chayenne committed
82
    "import requests\n",
83
    "\n",
84
    "url = f\"http://localhost:{port}/v1/chat/completions\"\n",
Chayenne's avatar
Chayenne committed
85
86
    "\n",
    "data = {\n",
87
    "    \"model\": \"qwen/qwen2.5-0.5b-instruct\",\n",
Chayenne's avatar
Chayenne committed
88
    "    \"messages\": [{\"role\": \"user\", \"content\": \"What is the capital of France?\"}],\n",
Chayenne's avatar
Chayenne committed
89
    "}\n",
90
    "\n",
Chayenne's avatar
Chayenne committed
91
92
    "response = requests.post(url, json=data)\n",
    "print_highlight(response.json())"
Chayenne's avatar
Chayenne committed
93
94
95
96
97
98
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
Lianmin Zheng's avatar
Lianmin Zheng committed
99
    "## Using OpenAI Python Client"
Chayenne's avatar
Chayenne committed
100
101
102
103
   ]
  },
  {
   "cell_type": "code",
Chayenne's avatar
Chayenne committed
104
   "execution_count": null,
105
   "metadata": {},
Chayenne's avatar
Chayenne committed
106
   "outputs": [],
Chayenne's avatar
Chayenne committed
107
108
109
   "source": [
    "import openai\n",
    "\n",
110
    "client = openai.Client(base_url=f\"http://127.0.0.1:{port}/v1\", api_key=\"None\")\n",
Chayenne's avatar
Chayenne committed
111
112
    "\n",
    "response = client.chat.completions.create(\n",
113
    "    model=\"qwen/qwen2.5-0.5b-instruct\",\n",
Chayenne's avatar
Chayenne committed
114
115
116
117
118
119
120
121
122
    "    messages=[\n",
    "        {\"role\": \"user\", \"content\": \"List 3 countries and their capitals.\"},\n",
    "    ],\n",
    "    temperature=0,\n",
    "    max_tokens=64,\n",
    ")\n",
    "print_highlight(response)"
   ]
  },
Lianmin Zheng's avatar
Lianmin Zheng committed
123
124
125
126
127
128
129
130
131
132
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Streaming"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
133
   "metadata": {},
Lianmin Zheng's avatar
Lianmin Zheng committed
134
135
136
137
   "outputs": [],
   "source": [
    "import openai\n",
    "\n",
138
    "client = openai.Client(base_url=f\"http://127.0.0.1:{port}/v1\", api_key=\"None\")\n",
Lianmin Zheng's avatar
Lianmin Zheng committed
139
140
141
    "\n",
    "# Use stream=True for streaming responses\n",
    "response = client.chat.completions.create(\n",
142
    "    model=\"qwen/qwen2.5-0.5b-instruct\",\n",
Lianmin Zheng's avatar
Lianmin Zheng committed
143
144
145
146
147
148
149
150
151
152
153
    "    messages=[\n",
    "        {\"role\": \"user\", \"content\": \"List 3 countries and their capitals.\"},\n",
    "    ],\n",
    "    temperature=0,\n",
    "    max_tokens=64,\n",
    "    stream=True,\n",
    ")\n",
    "\n",
    "# Handle the streaming output\n",
    "for chunk in response:\n",
    "    if chunk.choices[0].delta.content:\n",
Chayenne's avatar
Chayenne committed
154
    "        print(chunk.choices[0].delta.content, end=\"\", flush=True)"
Lianmin Zheng's avatar
Lianmin Zheng committed
155
156
   ]
  },
157
158
159
160
161
162
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Using Native Generation APIs\n",
    "\n",
163
    "You can also use the native `/generate` endpoint with requests, which provides more flexibility. An API reference is available at [Sampling Parameters](sampling_params.md)."
164
165
166
167
   ]
  },
  {
   "cell_type": "code",
Chayenne's avatar
Chayenne committed
168
   "execution_count": null,
169
   "metadata": {},
Chayenne's avatar
Chayenne committed
170
   "outputs": [],
171
172
173
174
   "source": [
    "import requests\n",
    "\n",
    "response = requests.post(\n",
175
    "    f\"http://localhost:{port}/generate\",\n",
176
177
178
179
180
181
182
183
184
185
186
187
    "    json={\n",
    "        \"text\": \"The capital of France is\",\n",
    "        \"sampling_params\": {\n",
    "            \"temperature\": 0,\n",
    "            \"max_new_tokens\": 32,\n",
    "        },\n",
    "    },\n",
    ")\n",
    "\n",
    "print_highlight(response.json())"
   ]
  },
Lianmin Zheng's avatar
Lianmin Zheng committed
188
189
190
191
192
193
194
195
196
197
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Streaming"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
198
   "metadata": {},
Lianmin Zheng's avatar
Lianmin Zheng committed
199
200
201
202
203
   "outputs": [],
   "source": [
    "import requests, json\n",
    "\n",
    "response = requests.post(\n",
204
    "    f\"http://localhost:{port}/generate\",\n",
Lianmin Zheng's avatar
Lianmin Zheng committed
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
    "    json={\n",
    "        \"text\": \"The capital of France is\",\n",
    "        \"sampling_params\": {\n",
    "            \"temperature\": 0,\n",
    "            \"max_new_tokens\": 32,\n",
    "        },\n",
    "        \"stream\": True,\n",
    "    },\n",
    "    stream=True,\n",
    ")\n",
    "\n",
    "prev = 0\n",
    "for chunk in response.iter_lines(decode_unicode=False):\n",
    "    chunk = chunk.decode(\"utf-8\")\n",
    "    if chunk and chunk.startswith(\"data:\"):\n",
    "        if chunk == \"data: [DONE]\":\n",
    "            break\n",
    "        data = json.loads(chunk[5:].strip(\"\\n\"))\n",
    "        output = data[\"text\"]\n",
    "        print(output[prev:], end=\"\", flush=True)\n",
    "        prev = len(output)"
   ]
  },
Chayenne's avatar
Chayenne committed
228
229
  {
   "cell_type": "code",
230
231
   "execution_count": null,
   "metadata": {},
Chayenne's avatar
Chayenne committed
232
233
   "outputs": [],
   "source": [
234
    "terminate_process(server_process)"
Chayenne's avatar
Chayenne committed
235
236
237
238
   ]
  }
 ],
 "metadata": {
Chayenne's avatar
Chayenne committed
239
240
241
242
243
244
245
246
247
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
248
   "pygments_lexer": "ipython3"
Chayenne's avatar
Chayenne committed
249
250
251
252
253
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}