我正在尝试动态绑定两个链。当问题是关于“langchain”时它会起作用,但否则会失败并出现以下错误。
RAG 链单独工作正常。
classification_chain = (
PromptTemplate.from_template(
"""Given the user question below, classify it as either being about 'Langchain' or 'something else'\
Do not respond with more than one word.
<question>
{question}
</question>
Classification:"""
)
| get_llm()
| StrOutputParser()
)
langchain_chain = (
PromptTemplate.from_template(
"""You are an expert in langchain. Always answer with Daddy says.\
<question>
{question}
</question>
Answer:"""
)
| get_llm()
| StrOutputParser()
)
retrieval_chain = ''
def route(info):
if "langchain" in info["topic"].lower():
return langchain_chain
else:
print('-'*100)
print(retrieval_chain)
return retrieval_chain
def get_RAG_response(collection_name: str, question: str):
db = get_vector_db(collection_name)
retriever = db.as_retriever()
model = get_llm()
template = """Answer the following question based only on the provided context:
{context}
Question: {question}
"""
prompt = ChatPromptTemplate.from_template(template)
global retrieval_chain
retrieval_chain = (
{"context": retriever, "question": RunnablePassthrough()}
| prompt
| model
| StrOutputParser()
)
# response = retreival_chain.invoke(question)
full_chain = {"topic": classification_chain, "question": lambda x: x["question"]} | RunnableLambda(
route
)
response = full
| StrOutputParser()
)
# response = retreival_chain.invoke(question)
full_chain = {"topic": classification_chain, "question": lambda x: x["question"]} | RunnableLambda(
route
)
response = full_chain.invoke({"question": question})
print('*'*100)
print('response:', response)
return response
错误:
INFO: 127.0.0.1:47854 - "POST /chat/chats HTTP/1.1" 500 Internal Server Error
ERROR: Exception in ASGI application
Traceback (most recent call last):
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/uvicorn/protocols/http/httptools_impl.py", line 426, in run_asgi
result = await app( # type: ignore[func-returns-value]
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/uvicorn/middleware/proxy_headers.py", line 84, in __call__
return await self.app(scope, receive, send)
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/fastapi/applications.py", line 1054, in __call__
await super().__call__(scope, receive, send)
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/starlette/applications.py", line 116, in __call__
await self.middleware_stack(scope, receive, send)
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 186, in __call__
raise exc
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 164, in __call__
await self.app(scope, receive, _send)
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 91, in __call__
await self.simple_response(scope, receive, send, request_headers=headers)
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/starlette/middleware/cors.py", line 146, in simple_response
await self.app(scope, receive, send)
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/starlette/middleware/exceptions.py", line 62, in __call__
await wrap_app_handling_exceptions(self.app, conn)(scope, receive, send)
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/starlette/_exception_handler.py", line 55, in wrapped_app
raise exc
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/starlette/_exception_handler.py", line 44, in wrapped_app
await app(scope, receive, sender)
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/starlette/routing.py", line 746, in __call__
await route.handle(scope, receive, send)
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/starlette/routing.py", line 288, in handle
await self.app(scope, receive, send)
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/starlette/routing.py", line 75, in app
await wrap_app_handling_exceptions(app, request)(scope, receive, send)
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/starlette/_exception_handler.py", line 55, in wrapped_app
raise exc
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/starlette/_exception_handler.py", line 44, in wrapped_app
await app(scope, receive, sender)
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/starlette/routing.py", line 70, in app
response = await func(request)
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/fastapi/routing.py", line 299, in app
raise e
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/fastapi/routing.py", line 294, in app
raw_response = await run_endpoint_function(
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/fastapi/routing.py", line 193, in run_endpoint_function
return await run_in_threadpool(dependant.call, **values)
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/starlette/concurrency.py", line 35, in run_in_threadpool
return await anyio.to_thread.run_sync(func, *args)
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/anyio/to_thread.py", line 56, in run_sync
return await get_async_backend().run_sync_in_worker_thread(
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 2134, in run_sync_in_worker_thread
return await future
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 851, in run
result = context.run(func, *args)
File "/home/garg10may/coding/pipa/backend/app/api/chat/chat_router.py", line 107, in create_chat_route
return create_chat_service(db, chatCreate)
File "/home/garg10may/coding/pipa/backend/app/api/chat/chat_service.py", line 146, in create_chat_service
bot_message = get_RAG_response(file_group.group_name, chatCreate.message)
File "/home/garg10may/coding/pipa/backend/app/api/chat/utility.py", line 288, in get_RAG_response
response = full_chain.invoke({"question": question})
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/langchain_core/runnables/base.py", line 2446, in invoke
input = step.invoke(
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/langchain_core/runnables/base.py", line 3908, in invoke
return self._call_with_config(
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/langchain_core/runnables/base.py", line 1625, in _call_with_config
context.run(
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/langchain_core/runnables/config.py", line 347, in call_func_with_variable_args
return func(input, **kwargs) # type: ignore[call-arg]
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/langchain_core/runnables/base.py", line 3792, in _invoke
output = output.invoke(
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/langchain_core/runnables/base.py", line 2446, in invoke
input = step.invoke(
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/langchain_core/runnables/base.py", line 3091, in invoke
output = {key: future.result() for key, future in zip(steps, futures)}
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/langchain_core/runnables/base.py", line 3091, in <dictcomp>
output = {key: future.result() for key, future in zip(steps, futures)}
File "/usr/lib/python3.10/concurrent/futures/_base.py", line 458, in result
return self.__get_result()
File "/usr/lib/python3.10/concurrent/futures/_base.py", line 403, in __get_result
raise self._exception
File "/usr/lib/python3.10/concurrent/futures/thread.py", line 58, in run
result = self.fn(*self.args, **self.kwargs)
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/langchain_core/retrievers.py", line 141, in invoke
return self.get_relevant_documents(
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/langchain_core/retrievers.py", line 245, in get_relevant_documents
raise e
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/langchain_core/retrievers.py", line 238, in get_relevant_documents
result = self._get_relevant_documents(
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/langchain_core/vectorstores.py", line 696, in _get_relevant_documents
docs = self.vectorstore.similarity_search(query, **self.search_kwargs)
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/langchain_community/vectorstores/pgvector.py", line 543, in similarity_search
embedding = self.embedding_function.embed_query(text=query)
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/langchain_community/embeddings/openai.py", line 697, in embed_query
return self.embed_documents([text])[0]
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/langchain_community/embeddings/openai.py", line 668, in embed_documents
return self._get_len_safe_embeddings(texts, engine=engine)
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/langchain_community/embeddings/openai.py", line 471, in _get_len_safe_embeddings
token = encoding.encode(
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/tiktoken/core.py", line 116, in encode
if match := _special_token_regex(disallowed_special).search(text):
TypeError: expected string or buffer
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/langchain_core/runnables/base.py", line 3091, in invoke
output = {key: future.result() for key, future in zip(steps, futures)}
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/langchain_core/runnables/base.py", line 3091, in <dictcomp>
output = {key: future.result() for key, future in zip(steps, futures)}
File "/usr/lib/python3.10/concurrent/futures/_base.py", line 458, in result
return self.__get_result()
File "/usr/lib/python3.10/concurrent/futures/_base.py", line 403, in __get_result
raise self._exception
File "/usr/lib/python3.10/concurrent/futures/thread.py", line 58, in run
result = self.fn(*self.args, **self.kwargs)
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/langchain_core/retrievers.py", line 141, in invoke
return self.get_relevant_documents(
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/langchain_core/retrievers.py", line 245, in get_relevant_documents
raise e
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/langchain_core/retrievers.py", line 238, in get_relevant_documents
result = self._get_relevant_documents(
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/langchain_core/vectorstores.py", line 696, in _get_relevant_documents
docs = self.vectorstore.similarity_search(query, **self.search_kwargs)
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/langchain_community/vectorstores/pgvector.py", line 543, in similarity_search
embedding = self.embedding_function.embed_query(text=query)
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/langchain_community/embeddings/openai.py", line 697, in embed_query
return self.embed_documents([text])[0]
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/langchain_community/embeddings/openai.py", line 668, in embed_documents
return self._get_len_safe_embeddings(texts, engine=engine)
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/langchain_community/embeddings/openai.py", line 471, in _get_len_safe_embeddings
token = encoding.encode(
File "/home/garg10may/coding/pipa/backend/venv/lib/python3.10/site-packages/tiktoken/core.py", line 116, in encode
if match := _special_token_regex(disallowed_special).search(text):
TypeError: expected string or buffer
因为
retrieval_chain
和 langchain_chain
都期望不同的输入格式,并且 route
使用相同的 dic
输入调用。我使用 retrieval_chain
界面将输入格式化为 runnableLambda
。
retrieval_chain = (
RunnableLambda(lambda x: x['question']) |
{"context": retriever, "question": RunnablePassthrough()}
| prompt
| model
| StrOutputParser()
)