Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
ollama
Commits
0e886595
Commit
0e886595
authored
Mar 07, 2025
by
Jesse Gross
Committed by
Michael Yang
Mar 11, 2025
Browse files
Fix tests and drift from main
parent
c62861f4
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
6 additions
and
2 deletions
+6
-2
kvcache/causal_test.go
kvcache/causal_test.go
+4
-0
model/models/gemma2/model.go
model/models/gemma2/model.go
+1
-1
model/models/mllama/model_text.go
model/models/mllama/model_text.go
+1
-1
No files found.
kvcache/causal_test.go
View file @
0e886595
...
@@ -499,6 +499,10 @@ func (t *testTensor) Contiguous(ctx ml.Context) ml.Tensor {
...
@@ -499,6 +499,10 @@ func (t *testTensor) Contiguous(ctx ml.Context) ml.Tensor {
panic
(
"not implemented"
)
panic
(
"not implemented"
)
}
}
func
(
t
*
testTensor
)
Set
(
ctx
ml
.
Context
,
t2
ml
.
Tensor
,
offset
int
,
strides
...
int
)
ml
.
Tensor
{
panic
(
"not implemented"
)
}
func
(
t
*
testTensor
)
Pad
(
ctx
ml
.
Context
,
shape
...
int
)
ml
.
Tensor
{
func
(
t
*
testTensor
)
Pad
(
ctx
ml
.
Context
,
shape
...
int
)
ml
.
Tensor
{
panic
(
"not implemented"
)
panic
(
"not implemented"
)
}
}
...
...
model/models/gemma2/model.go
View file @
0e886595
...
@@ -179,7 +179,7 @@ func (m *Model) Forward(ctx ml.Context, opts input.Options) (ml.Tensor, error) {
...
@@ -179,7 +179,7 @@ func (m *Model) Forward(ctx ml.Context, opts input.Options) (ml.Tensor, error) {
return
nil
,
err
return
nil
,
err
}
}
outputs
,
err
:=
ctx
.
FromIntSlice
(
opts
.
Outputs
,
len
(
opts
.
Outputs
))
outputs
,
err
:=
ctx
.
Output
()
.
FromIntSlice
(
opts
.
Outputs
,
len
(
opts
.
Outputs
))
if
err
!=
nil
{
if
err
!=
nil
{
return
nil
,
err
return
nil
,
err
}
}
...
...
model/models/mllama/model_text.go
View file @
0e886595
...
@@ -28,7 +28,7 @@ func (sa *TextSelfAttention) Forward(ctx ml.Context, hiddenState, positions, _ m
...
@@ -28,7 +28,7 @@ func (sa *TextSelfAttention) Forward(ctx ml.Context, hiddenState, positions, _ m
key
:=
sa
.
Key
.
Forward
(
ctx
,
hiddenState
)
key
:=
sa
.
Key
.
Forward
(
ctx
,
hiddenState
)
key
=
key
.
Reshape
(
ctx
,
headDim
,
opts
.
numKVHeads
,
batchSize
)
key
=
key
.
Reshape
(
ctx
,
headDim
,
opts
.
numKVHeads
,
batchSize
)
key
=
key
.
RoPE
(
ctx
,
positions
,
sa
.
RopeFactors
,
opts
.
ropeDim
,
ropeType
,
opts
.
ropeBase
,
opts
.
ropeScale
)
key
=
key
.
RoPE
(
ctx
,
positions
,
sa
.
RopeFactors
,
opts
.
ropeDim
,
ropeType
,
opts
.
ropeBase
,
opts
.
ropeScale
)
value
:=
sa
.
Value
.
Forward
(
ctx
,
hiddenState
)
value
:=
sa
.
Value
.
Forward
(
ctx
,
hiddenState
)
value
=
value
.
Reshape
(
ctx
,
headDim
,
opts
.
numKVHeads
,
batchSize
)
value
=
value
.
Reshape
(
ctx
,
headDim
,
opts
.
numKVHeads
,
batchSize
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment