Unverified Commit 17e998f1 authored by Byron Hsu's avatar Byron Hsu Committed by GitHub
Browse files

Test consistency for single and batch seperately (#1590)

parent c98e84c2
...@@ -43,16 +43,29 @@ class TestPyTorchSamplingBackend(unittest.TestCase): ...@@ -43,16 +43,29 @@ class TestPyTorchSamplingBackend(unittest.TestCase):
assert metrics["score"] >= 0.65 assert metrics["score"] >= 0.65
def test_greedy(self): def test_greedy(self):
response_single = requests.post(
self.base_url + "/generate", first_text = None
json={
"text": "The capital of France is", # ensure the answer is identical across single response
"sampling_params": { for _ in range(5):
"temperature": 0, response_single = requests.post(
"max_new_tokens": 32, self.base_url + "/generate",
json={
"text": "The capital of France is",
"sampling_params": {
"temperature": 0,
"max_new_tokens": 32,
},
}, },
}, ).json()
).json() text = response_single["text"]
if first_text is None:
first_text = text
assert text == first_text, f'"{text}" is not identical to "{first_text}"'
first_text = None
response_batch = requests.post( response_batch = requests.post(
self.base_url + "/generate", self.base_url + "/generate",
json={ json={
...@@ -63,10 +76,13 @@ class TestPyTorchSamplingBackend(unittest.TestCase): ...@@ -63,10 +76,13 @@ class TestPyTorchSamplingBackend(unittest.TestCase):
}, },
}, },
).json() ).json()
text = response_single["text"]
print(text) # ensure the answer is identical among the batch
for i in range(10): for i in range(10):
assert response_batch[i]["text"] == text text = response_batch[i]["text"]
if first_text is None:
first_text = text
assert text == first_text, f'"{text}" is not identical to "{first_text}"'
if __name__ == "__main__": if __name__ == "__main__":
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment