Commit 741ed5dc authored by Timothy J. Baek's avatar Timothy J. Baek
Browse files

fix

parent b1b72441
...@@ -495,6 +495,7 @@ def store_docs_in_vector_db(docs, collection_name, overwrite: bool = False) -> b ...@@ -495,6 +495,7 @@ def store_docs_in_vector_db(docs, collection_name, overwrite: bool = False) -> b
ids=[str(uuid.uuid1()) for _ in texts], ids=[str(uuid.uuid1()) for _ in texts],
metadatas=metadatas, metadatas=metadatas,
embeddings=embeddings, embeddings=embeddings,
documents=texts,
): ):
collection.add(*batch) collection.add(*batch)
......
...@@ -43,6 +43,8 @@ def query_embeddings_doc(collection_name: str, query_embeddings, k: int): ...@@ -43,6 +43,8 @@ def query_embeddings_doc(collection_name: str, query_embeddings, k: int):
query_embeddings=[query_embeddings], query_embeddings=[query_embeddings],
n_results=k, n_results=k,
) )
log.info(f"query_embeddings_doc:result {result}")
return result return result
except Exception as e: except Exception as e:
raise e raise e
...@@ -155,7 +157,9 @@ def rag_messages( ...@@ -155,7 +157,9 @@ def rag_messages(
openai_key, openai_key,
openai_url, openai_url,
): ):
log.debug(f"docs: {docs}") log.debug(
f"docs: {docs} {messages} {embedding_engine} {embedding_model} {embedding_function} {openai_key} {openai_url}"
)
last_user_message_idx = None last_user_message_idx = None
for i in range(len(messages) - 1, -1, -1): for i in range(len(messages) - 1, -1, -1):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment