from langchain_core.utils.function_calling import convert_to_openai_function from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder from langchain.memory import ConversationBufferWindowMemory from langchain.schema.runnable import RunnablePassthrough from langchain.agents.format_scratchpad import format_to_openai_functions from langchain.agents.output_parsers import OpenAIFunctionsAgentOutputParser from langchain.agents import AgentExecutor from langchain_groq import ChatGroq from functions import book_slot , check_slots , suggest_specialization , reschedule_event,delete_event def create_agent(general_prompt_template): # print("get user Id**********************",user_id) API_KEY = "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" tools = [book_slot , delete_event , check_slots , suggest_specialization , reschedule_event] functions = [convert_to_openai_function(f) for f in tools] llm = ChatGroq( model="llama-3.1-70b-versatile", temperature=0, max_tokens=None, timeout=None, max_retries=2, api_key=API_KEY ).bind_functions(functions=functions) prompt = ChatPromptTemplate.from_messages([("system", general_prompt_template), MessagesPlaceholder(variable_name="chat_history"), ("user", "{input}"), MessagesPlaceholder(variable_name="agent_scratchpad")]) memory = ConversationBufferWindowMemory(memory_key="chat_history" , return_messages=True, k=5) chain = RunnablePassthrough.assign(agent_scratchpad=lambda x: format_to_openai_functions(x["intermediate_steps"])) | prompt | llm | OpenAIFunctionsAgentOutputParser() agent_executor = AgentExecutor( agent=chain, tools=tools, memory=memory, verbose=True) return agent_executor # give me available slots at 17 september 2024 , book schedule at 10:30 AM 17 september 2024 ,