send_request.ipynb 6.91 KB
Newer Older
Chayenne's avatar
Chayenne committed
1
2
3
4
5
6
{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
7
    "# Sending Requests\n",
Chayenne's avatar
Chayenne committed
8
9
10
11
    "This notebook provides a quick-start guide to use SGLang in chat completions after installation.\n",
    "\n",
    "- For Vision Language Models, see [OpenAI APIs - Vision](../backend/openai_api_vision.ipynb).\n",
    "- For Embedding Models, see [OpenAI APIs - Embedding](../backend/openai_api_embeddings.ipynb) and [Encode (embedding model)](../backend/native_api.html#Encode-(embedding-model)).\n",
12
    "- For Reward Models, see [Classify (reward model)](../backend/native_api.html#Classify-(reward-model))."
Chayenne's avatar
Chayenne committed
13
14
15
16
17
18
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
19
    "## Launch A Server"
Chayenne's avatar
Chayenne committed
20
21
22
23
   ]
  },
  {
   "cell_type": "code",
Chayenne's avatar
Chayenne committed
24
   "execution_count": null,
25
   "metadata": {},
Chayenne's avatar
Chayenne committed
26
   "outputs": [],
Chayenne's avatar
Chayenne committed
27
   "source": [
28
29
30
31
32
33
34
35
    "from sglang.test.test_utils import is_in_ci\n",
    "from sglang.utils import wait_for_server, print_highlight, terminate_process\n",
    "\n",
    "if is_in_ci():\n",
    "    from patch import launch_server_cmd\n",
    "else:\n",
    "    from sglang.utils import launch_server_cmd\n",
    "\n",
36
37
38
    "# This is equivalent to running the following command in your terminal\n",
    "\n",
    "# python -m sglang.launch_server --model-path meta-llama/Meta-Llama-3.1-8B-Instruct --host 0.0.0.0\n",
Chayenne's avatar
Chayenne committed
39
    "\n",
40
    "server_process, port = launch_server_cmd(\n",
Chayenne's avatar
Chayenne committed
41
    "    \"\"\"\n",
Chayenne's avatar
Chayenne committed
42
    "python -m sglang.launch_server --model-path meta-llama/Meta-Llama-3.1-8B-Instruct \\\n",
43
    " --host 0.0.0.0\n",
Chayenne's avatar
Chayenne committed
44
45
46
    "\"\"\"\n",
    ")\n",
    "\n",
47
    "wait_for_server(f\"http://localhost:{port}\")"
Chayenne's avatar
Chayenne committed
48
49
50
51
52
53
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
Chayenne's avatar
Chayenne committed
54
55
56
57
58
59
    "## Using cURL\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
60
   "metadata": {},
Chayenne's avatar
Chayenne committed
61
62
63
   "outputs": [],
   "source": [
    "import subprocess, json\n",
Chayenne's avatar
Chayenne committed
64
    "\n",
65
66
67
68
    "curl_command = f\"\"\"\n",
    "curl -s http://localhost:{port}/v1/chat/completions \\\n",
    "  -H \"Content-Type: application/json\" \\\n",
    "  -d '{{\"model\": \"meta-llama/Meta-Llama-3.1-8B-Instruct\", \"messages\": [{{\"role\": \"user\", \"content\": \"What is the capital of France?\"}}]}}'\n",
Chayenne's avatar
Chayenne committed
69
70
71
72
73
74
75
76
77
78
    "\"\"\"\n",
    "\n",
    "response = json.loads(subprocess.check_output(curl_command, shell=True))\n",
    "print_highlight(response)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
Chayenne's avatar
Chayenne committed
79
    "## Using Python Requests"
Chayenne's avatar
Chayenne committed
80
81
82
83
   ]
  },
  {
   "cell_type": "code",
Chayenne's avatar
Chayenne committed
84
   "execution_count": null,
85
   "metadata": {},
Chayenne's avatar
Chayenne committed
86
   "outputs": [],
Chayenne's avatar
Chayenne committed
87
   "source": [
Chayenne's avatar
Chayenne committed
88
    "import requests\n",
89
    "\n",
90
    "url = f\"http://localhost:{port}/v1/chat/completions\"\n",
Chayenne's avatar
Chayenne committed
91
92
    "\n",
    "data = {\n",
93
    "    \"model\": \"meta-llama/Meta-Llama-3.1-8B-Instruct\",\n",
Chayenne's avatar
Chayenne committed
94
    "    \"messages\": [{\"role\": \"user\", \"content\": \"What is the capital of France?\"}],\n",
Chayenne's avatar
Chayenne committed
95
    "}\n",
96
    "\n",
Chayenne's avatar
Chayenne committed
97
98
    "response = requests.post(url, json=data)\n",
    "print_highlight(response.json())"
Chayenne's avatar
Chayenne committed
99
100
101
102
103
104
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
Lianmin Zheng's avatar
Lianmin Zheng committed
105
    "## Using OpenAI Python Client"
Chayenne's avatar
Chayenne committed
106
107
108
109
   ]
  },
  {
   "cell_type": "code",
Chayenne's avatar
Chayenne committed
110
   "execution_count": null,
111
   "metadata": {},
Chayenne's avatar
Chayenne committed
112
   "outputs": [],
Chayenne's avatar
Chayenne committed
113
114
115
   "source": [
    "import openai\n",
    "\n",
116
    "client = openai.Client(base_url=f\"http://127.0.0.1:{port}/v1\", api_key=\"None\")\n",
Chayenne's avatar
Chayenne committed
117
118
119
120
121
122
123
124
125
126
127
128
    "\n",
    "response = client.chat.completions.create(\n",
    "    model=\"meta-llama/Meta-Llama-3.1-8B-Instruct\",\n",
    "    messages=[\n",
    "        {\"role\": \"user\", \"content\": \"List 3 countries and their capitals.\"},\n",
    "    ],\n",
    "    temperature=0,\n",
    "    max_tokens=64,\n",
    ")\n",
    "print_highlight(response)"
   ]
  },
Lianmin Zheng's avatar
Lianmin Zheng committed
129
130
131
132
133
134
135
136
137
138
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Streaming"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
139
   "metadata": {},
Lianmin Zheng's avatar
Lianmin Zheng committed
140
141
142
143
   "outputs": [],
   "source": [
    "import openai\n",
    "\n",
144
    "client = openai.Client(base_url=f\"http://127.0.0.1:{port}/v1\", api_key=\"None\")\n",
Lianmin Zheng's avatar
Lianmin Zheng committed
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
    "\n",
    "# Use stream=True for streaming responses\n",
    "response = client.chat.completions.create(\n",
    "    model=\"meta-llama/Meta-Llama-3.1-8B-Instruct\",\n",
    "    messages=[\n",
    "        {\"role\": \"user\", \"content\": \"List 3 countries and their capitals.\"},\n",
    "    ],\n",
    "    temperature=0,\n",
    "    max_tokens=64,\n",
    "    stream=True,\n",
    ")\n",
    "\n",
    "# Handle the streaming output\n",
    "for chunk in response:\n",
    "    if chunk.choices[0].delta.content:\n",
Chayenne's avatar
Chayenne committed
160
    "        print(chunk.choices[0].delta.content, end=\"\", flush=True)"
Lianmin Zheng's avatar
Lianmin Zheng committed
161
162
   ]
  },
163
164
165
166
167
168
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Using Native Generation APIs\n",
    "\n",
169
    "You can also use the native `/generate` endpoint with requests, which provides more flexiblity. An API reference is available at [Sampling Parameters](../references/sampling_params.md)."
170
171
172
173
   ]
  },
  {
   "cell_type": "code",
Chayenne's avatar
Chayenne committed
174
   "execution_count": null,
175
   "metadata": {},
Chayenne's avatar
Chayenne committed
176
   "outputs": [],
177
178
179
180
   "source": [
    "import requests\n",
    "\n",
    "response = requests.post(\n",
181
    "    f\"http://localhost:{port}/generate\",\n",
182
183
184
185
186
187
188
189
190
191
192
193
    "    json={\n",
    "        \"text\": \"The capital of France is\",\n",
    "        \"sampling_params\": {\n",
    "            \"temperature\": 0,\n",
    "            \"max_new_tokens\": 32,\n",
    "        },\n",
    "    },\n",
    ")\n",
    "\n",
    "print_highlight(response.json())"
   ]
  },
Lianmin Zheng's avatar
Lianmin Zheng committed
194
195
196
197
198
199
200
201
202
203
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Streaming"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
204
   "metadata": {},
Lianmin Zheng's avatar
Lianmin Zheng committed
205
206
207
208
209
   "outputs": [],
   "source": [
    "import requests, json\n",
    "\n",
    "response = requests.post(\n",
210
    "    f\"http://localhost:{port}/generate\",\n",
Lianmin Zheng's avatar
Lianmin Zheng committed
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
    "    json={\n",
    "        \"text\": \"The capital of France is\",\n",
    "        \"sampling_params\": {\n",
    "            \"temperature\": 0,\n",
    "            \"max_new_tokens\": 32,\n",
    "        },\n",
    "        \"stream\": True,\n",
    "    },\n",
    "    stream=True,\n",
    ")\n",
    "\n",
    "prev = 0\n",
    "for chunk in response.iter_lines(decode_unicode=False):\n",
    "    chunk = chunk.decode(\"utf-8\")\n",
    "    if chunk and chunk.startswith(\"data:\"):\n",
    "        if chunk == \"data: [DONE]\":\n",
    "            break\n",
    "        data = json.loads(chunk[5:].strip(\"\\n\"))\n",
    "        output = data[\"text\"]\n",
    "        print(output[prev:], end=\"\", flush=True)\n",
    "        prev = len(output)"
   ]
  },
Chayenne's avatar
Chayenne committed
234
235
  {
   "cell_type": "code",
236
237
   "execution_count": null,
   "metadata": {},
Chayenne's avatar
Chayenne committed
238
239
   "outputs": [],
   "source": [
240
    "terminate_process(server_process)"
Chayenne's avatar
Chayenne committed
241
242
243
244
   ]
  }
 ],
 "metadata": {
Chayenne's avatar
Chayenne committed
245
246
247
248
249
250
251
252
253
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
254
   "pygments_lexer": "ipython3"
Chayenne's avatar
Chayenne committed
255
256
257
258
259
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}