gemini_example_complete.py 1.42 KB
Newer Older
1
2
3
4
5
"""
Usage:
export GCP_PROJECT_ID=******
python3 gemini_example_complete.py
"""
shiyi.c_98's avatar
shiyi.c_98 committed
6

7
import sglang as sgl
shiyi.c_98's avatar
shiyi.c_98 committed
8

9
10

@sgl.function
shiyi.c_98's avatar
shiyi.c_98 committed
11
12
13
14
15
16
17
18
19
20
21
def few_shot_qa(s, question):
    s += (
"""The following are questions with answers.
Q: What is the capital of France?
A: Paris
Q: What is the capital of Germany?
A: Berlin
Q: What is the capital of Italy?
A: Rome
""")
    s += "Q: " + question + "\n"
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
    s += "A:" + sgl.gen("answer", stop="\n", temperature=0)


def single():
    state = few_shot_qa.run(question="What is the capital of the United States?")
    answer = state["answer"].strip().lower()

    assert "washington" in answer, f"answer: {state['answer']}"

    print(state.text())


def stream():
    state = few_shot_qa.run(
        question="What is the capital of the United States?",
        stream=True)

    for out in state.text_iter("answer"):
        print(out, end="", flush=True)
    print()


def batch():
    states = few_shot_qa.run_batch([
        {"question": "What is the capital of the United States?"},
        {"question": "What is the capital of China?"},
    ])

    for s in states:
        print(s["answer"])
shiyi.c_98's avatar
shiyi.c_98 committed
52
53


54
55
if __name__ == "__main__":
    sgl.set_default_backend(sgl.VertexAI("gemini-pro"))
shiyi.c_98's avatar
shiyi.c_98 committed
56

57
58
59
    # Run a single request
    print("\n========== single ==========\n")
    single()
shiyi.c_98's avatar
shiyi.c_98 committed
60

61
62
63
    # Stream output
    print("\n========== stream ==========\n")
    stream()
shiyi.c_98's avatar
shiyi.c_98 committed
64

65
66
67
    # Run a batch of requests
    print("\n========== batch ==========\n")
    batch()