我正在尝试创建一个聊天机器人来使用 LangChain 和 MistralAI API 与网站进行对话。 但是我在使用 MistralAI API 时遇到问题,使用 OpenAI API 可以工作,但与 Mistral 不一样。
app.py:
import streamlit as st
from langchain_core.messages import AIMessage, HumanMessage
from langchain_community.document_loaders import WebBaseLoader
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain_community.vectorstores import Chroma
from langchain_mistralai import ChatMistralAI, MistralAIEmbeddings
from dotenv import load_dotenv
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain.chains import create_history_aware_retriever, create_retrieval_chain
from langchain.chains.combine_documents import create_stuff_documents_chain
load_dotenv()
def get_vectorstore_from_url(url):
# get the text in document form
loader = WebBaseLoader(url)
document = loader.load()
# split the document into chunks
text_splitter = RecursiveCharacterTextSplitter()
document_chunks = text_splitter.split_documents(document)
# create a vectorstore from the chunks
vector_store = Chroma.from_documents(document_chunks, MistralAIEmbeddings())
return vector_store
def get_context_retriever_chain(vector_store):
llm = ChatMistralAI()
retriever = vector_store.as_retriever()
prompt = ChatPromptTemplate.from_messages(
[
MessagesPlaceholder(variable_name="chat_history"),
("user", "{input}"),
(
"user",
"Given the above conversation, generate a search query to look up in order to get information relevant to the conversation",
),
]
)
retriever_chain = create_history_aware_retriever(llm, retriever, prompt)
return retriever_chain
def get_conversational_rag_chain(retriever_chain):
llm = ChatMistralAI()
prompt = ChatPromptTemplate.from_messages(
[
(
"system",
"Answer the user's questions based on the below context:\n\n{context}",
),
MessagesPlaceholder(variable_name="chat_history"),
("user", "{input}"),
]
)
stuff_documents_chain = create_stuff_documents_chain(llm, prompt)
return create_retrieval_chain(retriever_chain, stuff_documents_chain)
def get_response(user_input):
retriever_chain = get_context_retriever_chain(st.session_state.vector_store)
conversation_rag_chain = get_conversational_rag_chain(retriever_chain)
response = conversation_rag_chain.invoke(
{"chat_history": st.session_state.chat_history, "input": user_input}
)
return response["answer"]
# app config
st.set_page_config(page_title="Chat with websites", page_icon="🤖")
st.title("Chat with websites")
# sidebar
with st.sidebar:
st.header("Settings")
website_url = st.text_input("Website URL")
if website_url is None or website_url == "":
st.info("Please enter a website URL")
else:
# session state
if "chat_history" not in st.session_state:
st.session_state.chat_history = [
AIMessage(content="Hello, I am a bot. How can I help you?"),
]
if "vector_store" not in st.session_state:
st.session_state.vector_store = get_vectorstore_from_url(website_url)
# user input
user_query = st.chat_input("Type your message here...")
if user_query is not None and user_query != "":
response = get_response(user_query)
st.session_state.chat_history.append(HumanMessage(content=user_query))
st.session_state.chat_history.append(AIMessage(content=response))
# conversation
for message in st.session_state.chat_history:
if isinstance(message, AIMessage):
with st.chat_message("AI"):
st.write(message.content)
elif isinstance(message, HumanMessage):
with st.chat_message("Human"):
st.write(message.content)
特别是 Mistral 的嵌入正在工作,相反,LLM 不接受我给出的响应,事实上它给出了这个错误:
MistralAPIException: Cannot stream response. Status: 400
Traceback:
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\streamlit\runtime\scriptrunner\script_runner.py", line 535, in _run_script
exec(code, module.__dict__)
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\app.py", line 106, in <module>
response = get_response(user_query)
^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\app.py", line 75, in get_response
response = conversation_rag_chain.invoke(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\langchain_core\runnables\base.py", line 4041, in invoke
return self.bound.invoke(
^^^^^^^^^^^^^^^^^^
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\langchain_core\runnables\base.py", line 2053, in invoke
input = step.invoke(
^^^^^^^^^^^^
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\langchain_core\runnables\passthrough.py", line 415, in invoke
return self._call_with_config(self._invoke, input, config, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\langchain_core\runnables\base.py", line 1246, in _call_with_config
context.run(
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\langchain_core\runnables\config.py", line 326, in call_func_with_variable_args
return func(input, **kwargs) # type: ignore[call-arg]
^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\langchain_core\runnables\passthrough.py", line 402, in _invoke
**self.mapper.invoke(
^^^^^^^^^^^^^^^^^^^
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\langchain_core\runnables\base.py", line 2692, in invoke
output = {key: future.result() for key, future in zip(steps, futures)}
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\langchain_core\runnables\base.py", line 2692, in <dictcomp>
output = {key: future.result() for key, future in zip(steps, futures)}
^^^^^^^^^^^^^^^
File "C:\Users\LENOVO\AppData\Local\Programs\Python\Python311\Lib\concurrent\futures\_base.py", line 456, in result
return self.__get_result()
^^^^^^^^^^^^^^^^^^^
File "C:\Users\LENOVO\AppData\Local\Programs\Python\Python311\Lib\concurrent\futures\_base.py", line 401, in __get_result
raise self._exception
File "C:\Users\LENOVO\AppData\Local\Programs\Python\Python311\Lib\concurrent\futures\thread.py", line 58, in run
result = self.fn(*self.args, **self.kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\langchain_core\runnables\base.py", line 4041, in invoke
return self.bound.invoke(
^^^^^^^^^^^^^^^^^^
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\langchain_core\runnables\branch.py", line 211, in invoke
output = self.default.invoke(
^^^^^^^^^^^^^^^^^^^^
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\langchain_core\runnables\base.py", line 2053, in invoke
input = step.invoke(
^^^^^^^^^^^^
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\langchain_core\language_models\chat_models.py", line 166, in invoke
self.generate_prompt(
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\langchain_core\language_models\chat_models.py", line 544, in generate_prompt
return self.generate(prompt_messages, stop=stop, callbacks=callbacks, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\langchain_core\language_models\chat_models.py", line 408, in generate
raise e
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\langchain_core\language_models\chat_models.py", line 398, in generate
self._generate_with_cache(
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\langchain_core\language_models\chat_models.py", line 577, in _generate_with_cache
return self._generate(
^^^^^^^^^^^^^^^
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\langchain_mistralai\chat_models.py", line 263, in _generate
response = self.completion_with_retry(
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\langchain_mistralai\chat_models.py", line 207, in completion_with_retry
return _completion_with_retry(**kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\tenacity\__init__.py", line 289, in wrapped_f
return self(f, *args, **kw)
^^^^^^^^^^^^^^^^^^^^
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\tenacity\__init__.py", line 379, in __call__
do = self.iter(retry_state=retry_state)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\tenacity\__init__.py", line 325, in iter
raise retry_exc.reraise()
^^^^^^^^^^^^^^^^^^^
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\tenacity\__init__.py", line 158, in reraise
raise self.last_attempt.result()
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\LENOVO\AppData\Local\Programs\Python\Python311\Lib\concurrent\futures\_base.py", line 449, in result
return self.__get_result()
^^^^^^^^^^^^^^^^^^^
File "C:\Users\LENOVO\AppData\Local\Programs\Python\Python311\Lib\concurrent\futures\_base.py", line 401, in __get_result
raise self._exception
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\tenacity\__init__.py", line 382, in __call__
result = fn(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\langchain_mistralai\chat_models.py", line 205, in _completion_with_retry
return self.client.chat(**kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\mistralai\client.py", line 160, in chat
for response in single_response:
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\mistralai\client.py", line 93, in _request
yield self._check_response(response)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\mistralai\client_base.py", line 92, in _check_response
self._check_response_status_codes(response)
File "C:\Users\LENOVO\Desktop\LangChain\chatbot_2\chatbot_with_mistralAI\Lib\site-packages\mistralai\client_base.py", line 79, in _check_response_status_codes
raise MistralAPIException.from_response(
我尝试更改输入并检查文档,但问题仍然存在。 我应该如何修改代码来解决该错误?
您能否向我们展示使用 chat_history 和 user_input 完成后发送的提示?就像整个提示一样。