@router.post("/webhook")
async def webhook_events(request: Request):
payload = await request.json()
# print(json.dumps(payload, indent=4))
message = ""
# print(payload)
if "contacts" in payload["entry"][0]["changes"][0]["value"]:
user_name = payload["entry"][0]["changes"][0]["value"]["contacts"][0]["profile"]["name"]
wa_id = payload["entry"][0]["changes"][0]["value"]["contacts"][0]["wa_id"]
user_id = user_name.split(" ")[0] + "@" + wa_id
if "messages" in payload["entry"][0]["changes"][0]["value"]:
reciepient = payload["entry"][0]["changes"][0]["value"]["messages"][0]["from"]
chat_history = RedisChatMessageHistory(user_id)
memory = ConversationBufferMemory(
chat_memory=chat_history,
memory_key="chat_history",
return_messages=True)
if not chat_history.messages:
chat_history.add_ai_message("Hi! What's your name? I'm MobSecBot, designed to help you with your mobile and security questions.")
chat_history.add_user_message(f"Hi! My name is {user_name}.")
expire_chat_history(r, f'message_store:{user_id}')
agent_chain = agent_handler.init_chain(agent, memory)
# agent_chain = initialize_agent(
# tools,
# llm,
# agent=AgentType.CHAT_CONVERSATIONAL_REACT_DESCRIPTION,
# memory=memory,
# handle_parsing_errors="Check your output and make sure it conforms!")
if "text" in payload["entry"][0]["changes"][0]["value"]["messages"][0]:
message = payload["entry"][0]["changes"][0]["value"]["messages"][0]["text"]["body"]
print(f"{reciepient}: {message}")
try:
response = agent_chain.run(message)
except Exception as e:
response = str(e)
# response = response.removeprefix("Could not parse LLM output: `").removesuffix("`")
print(f"MobSecBot: {response}")
else:
response = "Sorry! I can only respond to text messages for now."
await messenger.send_message(response, reciepient)
我正在使用 langchain、FastAPI 和 Meta Webhooks 开发 Whatsapp 聊天机器人。我收到针对 AgentExecutor 链中提出的单个问题的多个帖子请求。我在下面附上了图片供参考。
'一些法学硕士提供流式响应。这意味着您不必等待整个响应返回,而是可以在它可用时立即开始处理。如果您想在生成响应时向用户显示响应,或者您想在生成响应时处理响应,这非常有用'