Commit 697bea69 authored by Daniel Hiltgen's avatar Daniel Hiltgen
Browse files

Guard integration tests with a tag

This should help CI avoid running the integration test logic in a
container where it's not currently possible.
parent 10da41d6
...@@ -2,6 +2,9 @@ ...@@ -2,6 +2,9 @@
# This script sets up integration tests which run the full stack to verify # This script sets up integration tests which run the full stack to verify
# inference locally # inference locally
#
# To run the relevant tests use
# go test -tags=integration ./server
set -e set -e
set -o pipefail set -o pipefail
...@@ -29,7 +32,7 @@ for model in ${TEST_MODELS[@]}; do ...@@ -29,7 +32,7 @@ for model in ${TEST_MODELS[@]}; do
-o ${OLLAMA_MODELS}/blobs/${CFG_HASH} \ -o ${OLLAMA_MODELS}/blobs/${CFG_HASH} \
${REGISTRY_SCHEME}://${REGISTRY}/v2/${TEST_MODEL}/blobs/${CFG_HASH} ${REGISTRY_SCHEME}://${REGISTRY}/v2/${TEST_MODEL}/blobs/${CFG_HASH}
for LAYER in $(cat ${OLLAMA_MODELS}/manifests/${REGISTRY}/${TEST_MODEL}/${TEST_MODEL_TAG} | jq -r ".layers[].digest" ) ; do for LAYER in $(cat ${OLLAMA_MODELS}/manifests/${REGISTRY}/${TEST_MODEL}/${TEST_MODEL_TAG} | jq -r ".layers[].digest"); do
echo "Pulling blob ${LAYER}" echo "Pulling blob ${LAYER}"
curl -L -C - --header "${ACCEPT_HEADER}" \ curl -L -C - --header "${ACCEPT_HEADER}" \
-o ${OLLAMA_MODELS}/blobs/${LAYER} \ -o ${OLLAMA_MODELS}/blobs/${LAYER} \
......
//go:build integration
package server package server
import ( import (
......
//go:build integration
package server package server
import ( import (
......
//go:build integration
package server package server
import ( import (
...@@ -38,7 +40,7 @@ func PrepareModelForPrompts(t *testing.T, modelName string, opts api.Options) (* ...@@ -38,7 +40,7 @@ func PrepareModelForPrompts(t *testing.T, modelName string, opts api.Options) (*
} }
func OneShotPromptResponse(t *testing.T, ctx context.Context, req api.GenerateRequest, model *Model, runner llm.LLM) string { func OneShotPromptResponse(t *testing.T, ctx context.Context, req api.GenerateRequest, model *Model, runner llm.LLM) string {
prompt, err := model.Prompt(PromptVars{ prompt, err := model.PreResponsePrompt(PromptVars{
System: req.System, System: req.System,
Prompt: req.Prompt, Prompt: req.Prompt,
First: len(req.Context) == 0, First: len(req.Context) == 0,
...@@ -54,6 +56,7 @@ func OneShotPromptResponse(t *testing.T, ctx context.Context, req api.GenerateRe ...@@ -54,6 +56,7 @@ func OneShotPromptResponse(t *testing.T, ctx context.Context, req api.GenerateRe
success <- true success <- true
} }
} }
predictReq := llm.PredictOpts{ predictReq := llm.PredictOpts{
Prompt: prompt, Prompt: prompt,
Format: req.Format, Format: req.Format,
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment