Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
ollama
Commits
d28b244d
Unverified
Commit
d28b244d
authored
Jun 28, 2023
by
Michael Yang
Committed by
GitHub
Jun 28, 2023
Browse files
Merge pull request #15 from jmorganca/batch
batch model
parents
8be9071e
e63ad693
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
22 additions
and
13 deletions
+22
-13
ollama/cmd/cli.py
ollama/cmd/cli.py
+21
-12
ollama/engine.py
ollama/engine.py
+1
-1
No files found.
ollama/cmd/cli.py
View file @
d28b244d
...
...
@@ -54,15 +54,18 @@ def list_models(*args, **kwargs):
def
generate
(
*
args
,
**
kwargs
):
if
prompt
:
=
kwargs
.
get
(
'prompt'
):
print
(
'>>>'
,
prompt
,
flush
=
True
)
print
(
flush
=
True
)
generate_oneshot
(
*
args
,
**
kwargs
)
print
(
flush
=
True
)
return
return
generate_interactive
(
*
args
,
**
kwargs
)
if
sys
.
stdin
.
isatty
():
return
generate_interactive
(
*
args
,
**
kwargs
)
return
generate_batch
(
*
args
,
**
kwargs
)
def
generate_oneshot
(
*
args
,
**
kwargs
):
print
(
flush
=
True
)
for
output
in
engine
.
generate
(
*
args
,
**
kwargs
):
output
=
json
.
loads
(
output
)
choices
=
output
.
get
(
"choices"
,
[])
...
...
@@ -70,20 +73,26 @@ def generate_oneshot(*args, **kwargs):
print
(
choices
[
0
].
get
(
"text"
,
""
),
end
=
""
,
flush
=
True
)
# end with a new line
print
()
print
(
flush
=
True
)
print
(
flush
=
True
)
def
generate_interactive
(
*
args
,
**
kwargs
):
print
(
'>>> '
,
end
=
''
,
flush
=
True
)
for
line
in
sys
.
stdin
:
if
not
sys
.
stdin
.
isatty
():
print
(
line
,
end
=
''
)
while
True
:
print
(
'>>> '
,
end
=
''
,
flush
=
True
)
line
=
next
(
sys
.
stdin
)
if
not
line
:
return
print
(
flush
=
True
)
kwargs
.
update
({
'prompt'
:
line
})
kwargs
.
update
({
"prompt"
:
line
})
generate_oneshot
(
*
args
,
**
kwargs
)
def
generate_batch
(
*
args
,
**
kwargs
):
for
line
in
sys
.
stdin
:
print
(
'>>> '
,
line
,
end
=
''
,
flush
=
True
)
kwargs
.
update
({
"prompt"
:
line
})
generate_oneshot
(
*
args
,
**
kwargs
)
print
(
flush
=
True
)
print
(
'>>> '
,
end
=
''
,
flush
=
True
)
def
add
(
model
,
models_home
):
...
...
ollama/engine.py
View file @
d28b244d
...
...
@@ -45,7 +45,7 @@ def load(model, models_home=".", llms={}):
if
not
model_path
:
# try loading this as a path to a model, rather than a model name
model_path
=
model
model_path
=
os
.
path
.
abspath
(
model
)
# suppress LLM's output
with
suppress_stderr
():
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment