"driver/olCompiling/include/md5.hpp" did not exist on "d2315b0dfcd6f31cca4328819eaf60d77e952dd6"
llama.go 4.28 KB
Newer Older
1
2
3
4
5
package llm

import (
	"bytes"
	"context"
6
	_ "embed"
7
8
9
10
	"errors"
	"fmt"
	"os"
	"os/exec"
11
	"sync"
12
13
14
	"time"

	"github.com/jmorganca/ollama/api"
Michael Yang's avatar
Michael Yang committed
15
	"github.com/jmorganca/ollama/format"
16
17
)

18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
const jsonGrammar = `
root   ::= object
value  ::= object | array | string | number | ("true" | "false" | "null") ws

object ::=
  "{" ws (
            string ":" ws value
    ("," ws string ":" ws value)*
  )? "}" ws

array  ::=
  "[" ws (
            value
    ("," ws value)*
  )? "]" ws

string ::=
  "\"" (
    [^"\\] |
    "\\" (["\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F]) # escapes
  )* "\"" ws

number ::= ("-"? ([0-9] | [1-9] [0-9]*)) ("." [0-9]+)? ([eE] [-+]? [0-9]+)? ws

# Optional space: by convention, applied in this grammar after literal chars when allowed
ws ::= ([ \t\n] ws)?
`

46
47
48
49
type llamaModel struct {
	hyperparameters llamaHyperparameters
}

Michael Yang's avatar
Michael Yang committed
50
51
func (llm *llamaModel) ModelFamily() string {
	return "llama"
52
53
}

Michael Yang's avatar
Michael Yang committed
54
55
func llamaModelType(numLayer uint32) string {
	switch numLayer {
56
	case 26:
Michael Yang's avatar
Michael Yang committed
57
		return "3B"
58
	case 32:
Michael Yang's avatar
Michael Yang committed
59
		return "7B"
60
	case 40:
Michael Yang's avatar
Michael Yang committed
61
		return "13B"
62
	case 48:
Michael Yang's avatar
Michael Yang committed
63
		return "34B"
64
	case 60:
Michael Yang's avatar
Michael Yang committed
65
		return "30B"
66
	case 80:
Michael Yang's avatar
Michael Yang committed
67
68
		return "65B"
	default:
Michael Yang's avatar
Michael Yang committed
69
		return "unknown"
70
	}
Michael Yang's avatar
Michael Yang committed
71
}
72

Michael Yang's avatar
Michael Yang committed
73
74
func (llm *llamaModel) ModelType() string {
	return llamaModelType(llm.hyperparameters.NumLayer)
75
76
}

Michael Yang's avatar
Michael Yang committed
77
78
func (llm *llamaModel) FileType() string {
	return fileType(llm.hyperparameters.FileType)
79
80
}

81
82
83
84
func (llm *llamaModel) NumLayers() int64 {
	return int64(llm.hyperparameters.NumLayer)
}

85
86
87
88
89
90
91
92
93
94
95
96
97
98
type llamaHyperparameters struct {
	// NumVocab is the size of the model's vocabulary.
	NumVocab uint32

	// NumEmbd is the size of the model's embedding layer.
	NumEmbd uint32
	NumMult uint32
	NumHead uint32

	// NumLayer is the number of layers in the model.
	NumLayer uint32
	NumRot   uint32

	// FileType describes the quantization level of the model, e.g. Q4_0, Q5_K, etc.
Michael Yang's avatar
Michael Yang committed
99
	FileType uint32
100
101
102
}

type Running struct {
103
104
105
106
107
108
	Port          int
	Cmd           *exec.Cmd
	Cancel        context.CancelFunc
	exitOnce      sync.Once
	exitCh        chan error // channel to receive the exit status of the subprocess
	*StatusWriter            // captures error messages from the llama runner process
109
110
}

Patrick Devine's avatar
Patrick Devine committed
111
112
113
114
115
type ImageData struct {
	Data []byte `json:"data"`
	ID   int    `json:"id"`
}

116
var (
Jeffrey Morgan's avatar
Jeffrey Morgan committed
117
	errNvidiaSMI     = errors.New("warning: gpu support may not be enabled, check that you have installed GPU drivers: nvidia-smi command failed")
118
	errAvailableVRAM = errors.New("not enough VRAM available, falling back to CPU only")
119
	payloadMissing   = fmt.Errorf("expected dynamic library payloads not included in this build of ollama")
120
)
121

122
123
// StatusWriter is a writer that captures error messages from the llama runner process
type StatusWriter struct {
124
125
	ErrCh      chan error
	LastErrMsg string
126
127
128
129
130
131
132
133
134
}

func NewStatusWriter() *StatusWriter {
	return &StatusWriter{
		ErrCh: make(chan error, 1),
	}
}

func (w *StatusWriter) Write(b []byte) (int, error) {
135
	var errMsg string
136
	if _, after, ok := bytes.Cut(b, []byte("error:")); ok {
137
138
139
		errMsg = string(bytes.TrimSpace(after))
	} else if _, after, ok := bytes.Cut(b, []byte("CUDA error")); ok {
		errMsg = string(bytes.TrimSpace(after))
140
	}
141
142
143
144
145
146

	if errMsg != "" {
		w.LastErrMsg = errMsg
		w.ErrCh <- fmt.Errorf("llama runner: %s", errMsg)
	}

147
148
149
	return os.Stderr.Write(b)
}

Michael Yang's avatar
Michael Yang committed
150
type prediction struct {
Michael Yang's avatar
Michael Yang committed
151
152
153
154
155
	Content string `json:"content"`
	Model   string `json:"model"`
	Prompt  string `json:"prompt"`
	Stop    bool   `json:"stop"`

Michael Yang's avatar
Michael Yang committed
156
157
158
159
160
161
	Timings struct {
		PredictedN  int     `json:"predicted_n"`
		PredictedMS float64 `json:"predicted_ms"`
		PromptN     int     `json:"prompt_n"`
		PromptMS    float64 `json:"prompt_ms"`
	}
162
163
}

Michael Yang's avatar
Michael Yang committed
164
const maxBufferSize = 512 * format.KiloByte
165
166
const maxRetries = 3
const retryDelay = 1 * time.Second
167

Bruce MacDonald's avatar
Bruce MacDonald committed
168
type PredictOpts struct {
169
170
171
	Prompt string
	Format string
	Images []api.ImageData
Bruce MacDonald's avatar
Bruce MacDonald committed
172
}
173

Bruce MacDonald's avatar
Bruce MacDonald committed
174
175
176
177
178
179
180
181
type PredictResult struct {
	Content            string
	Done               bool
	PromptEvalCount    int
	PromptEvalDuration time.Duration
	EvalCount          int
	EvalDuration       time.Duration
}
182

183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
type TokenizeRequest struct {
	Content string `json:"content"`
}

type TokenizeResponse struct {
	Tokens []int `json:"tokens"`
}

type DetokenizeRequest struct {
	Tokens []int `json:"tokens"`
}

type DetokenizeResponse struct {
	Content string `json:"content"`
}

type EmbeddingRequest struct {
	Content string `json:"content"`
}

type EmbeddingResponse struct {
	Embedding []float64 `json:"embedding"`
}