Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
ollama
Commits
58e3fff3
Commit
58e3fff3
authored
Jun 10, 2024
by
Michael Yang
Browse files
rename templates to template
parent
3f0b309a
Changes
27
Hide whitespace changes
Inline
Side-by-side
Showing
20 changed files
with
53 additions
and
34 deletions
+53
-34
server/images.go
server/images.go
+11
-13
server/model.go
server/model.go
+2
-2
server/prompt.go
server/prompt.go
+7
-11
server/prompt_test.go
server/prompt_test.go
+13
-2
server/routes.go
server/routes.go
+20
-6
template/alfred.gotmpl
template/alfred.gotmpl
+0
-0
template/alpaca.gotmpl
template/alpaca.gotmpl
+0
-0
template/chatml.gotmpl
template/chatml.gotmpl
+0
-0
template/chatqa.gotmpl
template/chatqa.gotmpl
+0
-0
template/codellama-70b-instruct.gotmpl
template/codellama-70b-instruct.gotmpl
+0
-0
template/falcon-instruct.gotmpl
template/falcon-instruct.gotmpl
+0
-0
template/gemma-instruct.gotmpl
template/gemma-instruct.gotmpl
+0
-0
template/granite-instruct.gotmpl
template/granite-instruct.gotmpl
+0
-0
template/index.json
template/index.json
+0
-0
template/llama2-chat.gotmpl
template/llama2-chat.gotmpl
+0
-0
template/llama3-instruct.gotmpl
template/llama3-instruct.gotmpl
+0
-0
template/magicoder.gotmpl
template/magicoder.gotmpl
+0
-0
template/mistral-instruct.gotmpl
template/mistral-instruct.gotmpl
+0
-0
template/openchat.gotmpl
template/openchat.gotmpl
+0
-0
template/phi-3.gotmpl
template/phi-3.gotmpl
+0
-0
No files found.
server/images.go
View file @
58e3fff3
...
...
@@ -28,6 +28,7 @@ import (
"github.com/ollama/ollama/format"
"github.com/ollama/ollama/llm"
"github.com/ollama/ollama/parser"
"github.com/ollama/ollama/template"
"github.com/ollama/ollama/types/errtypes"
"github.com/ollama/ollama/types/model"
"github.com/ollama/ollama/version"
...
...
@@ -48,12 +49,13 @@ type Model struct {
ParentModel
string
AdapterPaths
[]
string
ProjectorPaths
[]
string
Template
string
System
string
License
[]
string
Digest
string
Options
map
[
string
]
interface
{}
Messages
[]
Message
Template
*
template
.
Template
}
func
(
m
*
Model
)
IsEmbedding
()
bool
{
...
...
@@ -82,10 +84,10 @@ func (m *Model) String() string {
})
}
if
m
.
Template
!=
""
{
if
m
.
Template
!=
nil
{
modelfile
.
Commands
=
append
(
modelfile
.
Commands
,
parser
.
Command
{
Name
:
"template"
,
Args
:
m
.
Template
,
Args
:
m
.
Template
.
String
()
,
})
}
...
...
@@ -191,8 +193,7 @@ func GetModel(name string) (*Model, error) {
Name
:
mp
.
GetFullTagname
(),
ShortName
:
mp
.
GetShortTagname
(),
Digest
:
digest
,
Template
:
"{{ .Prompt }}"
,
License
:
[]
string
{},
Template
:
template
.
DefaultTemplate
,
}
filename
,
err
:=
GetBlobsPath
(
manifest
.
Config
.
Digest
)
...
...
@@ -228,27 +229,24 @@ func GetModel(name string) (*Model, error) {
model
.
AdapterPaths
=
append
(
model
.
AdapterPaths
,
filename
)
case
"application/vnd.ollama.image.projector"
:
model
.
ProjectorPaths
=
append
(
model
.
ProjectorPaths
,
filename
)
case
"application/vnd.ollama.image.template"
:
case
"application/vnd.ollama.image.prompt"
,
"application/vnd.ollama.image.template"
:
bts
,
err
:=
os
.
ReadFile
(
filename
)
if
err
!=
nil
{
return
nil
,
err
}
model
.
Template
=
string
(
bts
)
case
"application/vnd.ollama.image.system"
:
bts
,
err
:=
os
.
ReadFile
(
filename
)
model
.
Template
,
err
=
template
.
Parse
(
string
(
bts
))
if
err
!=
nil
{
return
nil
,
err
}
model
.
System
=
string
(
bts
)
case
"application/vnd.ollama.image.prompt"
:
case
"application/vnd.ollama.image.system"
:
bts
,
err
:=
os
.
ReadFile
(
filename
)
if
err
!=
nil
{
return
nil
,
err
}
model
.
Templa
te
=
string
(
bts
)
model
.
Sys
te
m
=
string
(
bts
)
case
"application/vnd.ollama.image.params"
:
params
,
err
:=
os
.
Open
(
filename
)
if
err
!=
nil
{
...
...
server/model.go
View file @
58e3fff3
...
...
@@ -16,7 +16,7 @@ import (
"github.com/ollama/ollama/api"
"github.com/ollama/ollama/convert"
"github.com/ollama/ollama/llm"
"github.com/ollama/ollama/template
s
"
"github.com/ollama/ollama/template"
"github.com/ollama/ollama/types/model"
)
...
...
@@ -258,7 +258,7 @@ func parseFromFile(ctx context.Context, file *os.File, digest string, fn func(ap
func
detectChatTemplate
(
layers
[]
*
layerGGML
)
([]
*
layerGGML
,
error
)
{
for
_
,
layer
:=
range
layers
{
if
s
:=
layer
.
GGML
.
KV
()
.
ChatTemplate
();
s
!=
""
{
if
t
,
err
:=
template
s
.
Named
Template
(
s
);
err
!=
nil
{
if
t
,
err
:=
template
.
Named
(
s
);
err
!=
nil
{
slog
.
Debug
(
"template detection"
,
"error"
,
err
)
}
else
{
tmpl
,
err
:=
NewLayer
(
t
.
Reader
(),
"application/vnd.ollama.image.template"
)
...
...
server/prompt.go
View file @
58e3fff3
...
...
@@ -4,10 +4,11 @@ import (
"fmt"
"log/slog"
"strings"
"text/template"
"text/template/parse"
"github.com/ollama/ollama/api"
"github.com/ollama/ollama/template"
)
// isResponseNode checks if the node contains .Response
...
...
@@ -53,13 +54,8 @@ func formatTemplateForResponse(tmpl *template.Template, generate bool) {
// Prompt renders a prompt from a template. If generate is set to true,
// the response and parts of the template following it are not rendered
func
Prompt
(
tmpl
,
system
,
prompt
,
response
string
,
generate
bool
)
(
string
,
error
)
{
parsed
,
err
:=
template
.
New
(
""
)
.
Option
(
"missingkey=zero"
)
.
Parse
(
tmpl
)
if
err
!=
nil
{
return
""
,
err
}
formatTemplateForResponse
(
parsed
,
generate
)
func
Prompt
(
tmpl
*
template
.
Template
,
system
,
prompt
,
response
string
,
generate
bool
)
(
string
,
error
)
{
formatTemplateForResponse
(
tmpl
,
generate
)
vars
:=
map
[
string
]
any
{
"System"
:
system
,
...
...
@@ -68,14 +64,14 @@ func Prompt(tmpl, system, prompt, response string, generate bool) (string, error
}
var
sb
strings
.
Builder
if
err
:=
parsed
.
Execute
(
&
sb
,
vars
);
err
!=
nil
{
if
err
:=
tmpl
.
Execute
(
&
sb
,
vars
);
err
!=
nil
{
return
""
,
err
}
return
sb
.
String
(),
nil
}
func
countTokens
(
tmpl
string
,
system
string
,
prompt
string
,
response
string
,
encode
func
(
string
)
([]
int
,
error
))
(
int
,
error
)
{
func
countTokens
(
tmpl
*
template
.
Template
,
system
string
,
prompt
string
,
response
string
,
encode
func
(
string
)
([]
int
,
error
))
(
int
,
error
)
{
rendered
,
err
:=
Prompt
(
tmpl
,
system
,
prompt
,
response
,
false
)
if
err
!=
nil
{
return
0
,
err
...
...
@@ -91,7 +87,7 @@ func countTokens(tmpl string, system string, prompt string, response string, enc
}
// ChatPrompt builds up a prompt from a series of messages, truncating based on context window size
func
ChatPrompt
(
tmpl
string
,
messages
[]
api
.
Message
,
window
int
,
encode
func
(
string
)
([]
int
,
error
))
(
string
,
error
)
{
func
ChatPrompt
(
tmpl
*
template
.
Template
,
messages
[]
api
.
Message
,
window
int
,
encode
func
(
string
)
([]
int
,
error
))
(
string
,
error
)
{
type
prompt
struct
{
System
string
Prompt
string
...
...
server/prompt_test.go
View file @
58e3fff3
...
...
@@ -5,6 +5,7 @@ import (
"testing"
"github.com/ollama/ollama/api"
"github.com/ollama/ollama/template"
)
func
TestPrompt
(
t
*
testing
.
T
)
{
...
...
@@ -61,7 +62,12 @@ func TestPrompt(t *testing.T) {
for
_
,
tc
:=
range
tests
{
t
.
Run
(
tc
.
name
,
func
(
t
*
testing
.
T
)
{
got
,
err
:=
Prompt
(
tc
.
template
,
tc
.
system
,
tc
.
prompt
,
tc
.
response
,
tc
.
generate
)
tmpl
,
err
:=
template
.
Parse
(
tc
.
template
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
got
,
err
:=
Prompt
(
tmpl
,
tc
.
system
,
tc
.
prompt
,
tc
.
response
,
tc
.
generate
)
if
err
!=
nil
{
t
.
Errorf
(
"error = %v"
,
err
)
}
...
...
@@ -192,7 +198,12 @@ func TestChatPrompt(t *testing.T) {
for
_
,
tc
:=
range
tests
{
t
.
Run
(
tc
.
name
,
func
(
t
*
testing
.
T
)
{
got
,
err
:=
ChatPrompt
(
tc
.
template
,
tc
.
messages
,
tc
.
window
,
encode
)
tmpl
,
err
:=
template
.
Parse
(
tc
.
template
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
got
,
err
:=
ChatPrompt
(
tmpl
,
tc
.
messages
,
tc
.
window
,
encode
)
if
err
!=
nil
{
t
.
Errorf
(
"error = %v"
,
err
)
}
...
...
server/routes.go
View file @
58e3fff3
...
...
@@ -31,6 +31,7 @@ import (
"github.com/ollama/ollama/llm"
"github.com/ollama/ollama/openai"
"github.com/ollama/ollama/parser"
"github.com/ollama/ollama/template"
"github.com/ollama/ollama/types/errtypes"
"github.com/ollama/ollama/types/model"
"github.com/ollama/ollama/version"
...
...
@@ -161,6 +162,12 @@ func (s *Server) GenerateHandler(c *gin.Context) {
return
}
tmpl
,
err
:=
template
.
Parse
(
req
.
Template
)
if
err
!=
nil
{
c
.
JSON
(
http
.
StatusInternalServerError
,
gin
.
H
{
"error"
:
err
.
Error
()})
return
}
checkpointLoaded
:=
time
.
Now
()
var
prompt
string
...
...
@@ -169,7 +176,11 @@ func (s *Server) GenerateHandler(c *gin.Context) {
prompt
=
req
.
Prompt
case
req
.
Prompt
!=
""
:
if
req
.
Template
==
""
{
req
.
Template
=
model
.
Template
model
.
Template
,
err
=
template
.
Parse
(
req
.
Template
)
if
err
!=
nil
{
c
.
JSON
(
http
.
StatusInternalServerError
,
gin
.
H
{
"error"
:
err
.
Error
()})
return
}
}
if
req
.
System
==
""
{
...
...
@@ -187,7 +198,7 @@ func (s *Server) GenerateHandler(c *gin.Context) {
sb
.
WriteString
(
req
.
Prompt
)
p
,
err
:=
Prompt
(
req
.
Template
,
req
.
System
,
sb
.
String
(),
""
,
true
)
p
,
err
:=
Prompt
(
tmpl
,
req
.
System
,
sb
.
String
(),
""
,
true
)
if
err
!=
nil
{
c
.
JSON
(
http
.
StatusInternalServerError
,
gin
.
H
{
"error"
:
err
.
Error
()})
return
...
...
@@ -242,7 +253,7 @@ func (s *Server) GenerateHandler(c *gin.Context) {
resp
.
LoadDuration
=
checkpointLoaded
.
Sub
(
checkpointStart
)
if
!
req
.
Raw
{
p
,
err
:=
Prompt
(
req
.
Template
,
req
.
System
,
req
.
Prompt
,
generated
.
String
(),
false
)
p
,
err
:=
Prompt
(
tmpl
,
req
.
System
,
req
.
Prompt
,
generated
.
String
(),
false
)
if
err
!=
nil
{
c
.
JSON
(
http
.
StatusInternalServerError
,
gin
.
H
{
"error"
:
err
.
Error
()})
return
...
...
@@ -680,7 +691,10 @@ func GetModelInfo(req api.ShowRequest) (*api.ShowResponse, error) {
}
if
req
.
Template
!=
""
{
m
.
Template
=
req
.
Template
m
.
Template
,
err
=
template
.
Parse
(
req
.
Template
)
if
err
!=
nil
{
return
nil
,
err
}
}
msgs
:=
make
([]
api
.
Message
,
0
)
...
...
@@ -701,7 +715,7 @@ func GetModelInfo(req api.ShowRequest) (*api.ShowResponse, error) {
resp
:=
&
api
.
ShowResponse
{
License
:
strings
.
Join
(
m
.
License
,
"
\n
"
),
System
:
m
.
System
,
Template
:
m
.
Template
,
Template
:
m
.
Template
.
String
()
,
Details
:
modelDetails
,
Messages
:
msgs
,
ModifiedAt
:
manifest
.
fi
.
ModTime
(),
...
...
@@ -1246,7 +1260,7 @@ func (s *Server) ProcessHandler(c *gin.Context) {
}
// ChatPrompt builds up a prompt from a series of messages for the currently `loaded` model
func
chatPrompt
(
ctx
context
.
Context
,
runner
*
runnerRef
,
template
string
,
messages
[]
api
.
Message
,
numCtx
int
)
(
string
,
error
)
{
func
chatPrompt
(
ctx
context
.
Context
,
runner
*
runnerRef
,
template
*
template
.
Template
,
messages
[]
api
.
Message
,
numCtx
int
)
(
string
,
error
)
{
encode
:=
func
(
s
string
)
([]
int
,
error
)
{
return
runner
.
llama
.
Tokenize
(
ctx
,
s
)
}
...
...
template
s
/alfred.gotmpl
→
template/alfred.gotmpl
View file @
58e3fff3
File moved
template
s
/alpaca.gotmpl
→
template/alpaca.gotmpl
View file @
58e3fff3
File moved
template
s
/chatml.gotmpl
→
template/chatml.gotmpl
View file @
58e3fff3
File moved
template
s
/chatqa.gotmpl
→
template/chatqa.gotmpl
View file @
58e3fff3
File moved
template
s
/codellama-70b-instruct.gotmpl
→
template/codellama-70b-instruct.gotmpl
View file @
58e3fff3
File moved
template
s
/falcon-instruct.gotmpl
→
template/falcon-instruct.gotmpl
View file @
58e3fff3
File moved
template
s
/gemma-instruct.gotmpl
→
template/gemma-instruct.gotmpl
View file @
58e3fff3
File moved
template
s
/granite-instruct.gotmpl
→
template/granite-instruct.gotmpl
View file @
58e3fff3
File moved
template
s
/index.json
→
template/index.json
View file @
58e3fff3
File moved
template
s
/llama2-chat.gotmpl
→
template/llama2-chat.gotmpl
View file @
58e3fff3
File moved
template
s
/llama3-instruct.gotmpl
→
template/llama3-instruct.gotmpl
View file @
58e3fff3
File moved
template
s
/magicoder.gotmpl
→
template/magicoder.gotmpl
View file @
58e3fff3
File moved
template
s
/mistral-instruct.gotmpl
→
template/mistral-instruct.gotmpl
View file @
58e3fff3
File moved
template
s
/openchat.gotmpl
→
template/openchat.gotmpl
View file @
58e3fff3
File moved
template
s
/phi-3.gotmpl
→
template/phi-3.gotmpl
View file @
58e3fff3
File moved
Prev
1
2
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment