Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
ModelZoo
llama3_pytorch
Commits
f8832720
Commit
f8832720
authored
May 06, 2024
by
Rayyyyy
Browse files
modify chat
parent
e005d327
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
7 additions
and
5 deletions
+7
-5
llama3_chat.py
llama3_chat.py
+7
-5
No files found.
llama3_chat.py
View file @
f8832720
...
@@ -20,7 +20,7 @@ def main(
...
@@ -20,7 +20,7 @@ def main(
max_seq_len
=
max_seq_len
,
max_seq_len
=
max_seq_len
,
max_batch_size
=
max_batch_size
,
max_batch_size
=
max_batch_size
,
)
)
dialogs
:
List
[
Dialog
]
=
[
[]
]
# Start with an empty dialog
dialogs
:
List
[
Dialog
]
=
[]
# Start with an empty dialog
try
:
try
:
# Continue util the user decides to stop
# Continue util the user decides to stop
while
True
:
while
True
:
...
@@ -28,19 +28,21 @@ def main(
...
@@ -28,19 +28,21 @@ def main(
# Allow the user to quit the dialogue
# Allow the user to quit the dialogue
if
user_input
.
lower
()
in
[
'stop'
,
'exit'
]:
if
user_input
.
lower
()
in
[
'stop'
,
'exit'
]:
break
break
dialogs
[
0
]
.
append
({
"role"
:
"user"
,
"content"
:
user_input
})
dialogs
.
append
({
"role"
:
"user"
,
"content"
:
user_input
})
# Generate response based on the current dialog context
# Generate response based on the current dialog context
results
=
generator
.
chat_completion
(
results
=
generator
.
chat_completion
(
[
dialogs
],
[
dialogs
],
max_gen_len
=
max_gen_len
,
max_gen_len
=
max_gen_len
,
temperature
=
temperature
,
temperature
=
temperature
,
top_p
=
top_p
,)[
0
]
top_p
=
top_p
,
response
=
results
[
'generation'
][
'content'
]
)
response
=
results
[
0
][
'generation'
][
'content'
]
print
(
f
"Assistant:
{
response
}
\n
"
)
print
(
f
"Assistant:
{
response
}
\n
"
)
# Append the generated response to the dialog
# Append the generated response to the dialog
dialogs
[
0
]
.
append
({
"role"
:
"assistant"
,
"content"
:
response
})
dialogs
.
append
({
"role"
:
"assistant"
,
"content"
:
response
})
except
KeyboardInterrupt
:
except
KeyboardInterrupt
:
print
(
"Exiting dialogue."
)
print
(
"Exiting dialogue."
)
if
__name__
==
"__main__"
:
if
__name__
==
"__main__"
:
fire
.
Fire
(
main
)
fire
.
Fire
(
main
)
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment