Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
ColossalAI
Commits
b07a6f4e
Unverified
Commit
b07a6f4e
authored
Dec 11, 2023
by
Michelle
Committed by
GitHub
Dec 11, 2023
Browse files
[colossalqa] fix pangu api (#5170)
* fix pangu api * add comment
parent
21aa5de0
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
9 additions
and
5 deletions
+9
-5
applications/ColossalQA/colossalqa/memory.py
applications/ColossalQA/colossalqa/memory.py
+1
-1
applications/ColossalQA/examples/webui_demo/server.py
applications/ColossalQA/examples/webui_demo/server.py
+8
-4
No files found.
applications/ColossalQA/colossalqa/memory.py
View file @
b07a6f4e
...
@@ -154,7 +154,7 @@ class ConversationBufferWithSummary(ConversationSummaryMemory):
...
@@ -154,7 +154,7 @@ class ConversationBufferWithSummary(ConversationSummaryMemory):
remain
=
self
.
max_tokens
-
prompt_length
remain
=
self
.
max_tokens
-
prompt_length
while
self
.
get_conversation_length
()
>
remain
:
while
self
.
get_conversation_length
()
>
remain
:
if
len
(
self
.
buffered_history
.
messages
)
<=
2
:
if
len
(
self
.
buffered_history
.
messages
)
<=
2
:
raise
RuntimeError
(
"Ex
e
eed max_tokens, trun
c
k size of retrieved documents is too large"
)
raise
RuntimeError
(
"Ex
c
eed max_tokens, trunk size of retrieved documents is too large"
)
temp
=
self
.
buffered_history
.
messages
.
pop
(
0
)
temp
=
self
.
buffered_history
.
messages
.
pop
(
0
)
self
.
summarized_history_temp
.
messages
.
append
(
temp
)
self
.
summarized_history_temp
.
messages
.
append
(
temp
)
temp
=
self
.
buffered_history
.
messages
.
pop
(
0
)
temp
=
self
.
buffered_history
.
messages
.
pop
(
0
)
...
...
applications/ColossalQA/examples/webui_demo/server.py
View file @
b07a6f4e
...
@@ -77,12 +77,16 @@ if __name__ == "__main__":
...
@@ -77,12 +77,16 @@ if __name__ == "__main__":
colossal_api
=
ColossalAPI
(
model_name
,
all_config
[
"model"
][
"model_path"
])
colossal_api
=
ColossalAPI
(
model_name
,
all_config
[
"model"
][
"model_path"
])
llm
=
ColossalLLM
(
n
=
1
,
api
=
colossal_api
)
llm
=
ColossalLLM
(
n
=
1
,
api
=
colossal_api
)
elif
all_config
[
"model"
][
"mode"
]
==
"api"
:
elif
all_config
[
"model"
][
"mode"
]
==
"api"
:
all_config
[
"chain"
][
"mem_llm_kwargs"
]
=
None
all_config
[
"chain"
][
"disambig_llm_kwargs"
]
=
None
all_config
[
"chain"
][
"gen_llm_kwargs"
]
=
None
if
model_name
==
"pangu_api"
:
if
model_name
==
"pangu_api"
:
from
colossalqa.local.pangu_llm
import
Pangu
from
colossalqa.local.pangu_llm
import
Pangu
llm
=
Pangu
(
id
=
1
)
gen_config
=
{
"user"
:
"User"
,
"max_tokens"
:
all_config
[
"chain"
][
"disambig_llm_kwargs"
][
"max_new_tokens"
],
"temperature"
:
all_config
[
"chain"
][
"disambig_llm_kwargs"
][
"temperature"
],
"n"
:
1
# the number of responses generated
}
llm
=
Pangu
(
gen_config
=
gen_config
)
llm
.
set_auth_config
()
# verify user's auth info here
llm
.
set_auth_config
()
# verify user's auth info here
elif
model_name
==
"chatgpt_api"
:
elif
model_name
==
"chatgpt_api"
:
from
langchain.llms
import
OpenAI
from
langchain.llms
import
OpenAI
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment