Advertisement
ketan17

using chatgroq with toolcalling using Llama model

Sep 19th, 2024
38
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 2.12 KB | Source Code | 0 0
  1. # want to see if formatting the tools is what will make the tool call work.
  2.  
  3. from pprint import pprint
  4. from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
  5. from dotenv import load_dotenv
  6. from langchain_groq import ChatGroq
  7. from IPython.display import Image, display
  8. from langgraph.graph import StateGraph, START, END
  9. from langgraph.graph import MessagesState
  10. from langgraph.prebuilt import ToolNode, tools_condition
  11. import json
  12.  
  13.  
  14. load_dotenv()
  15.  
  16.  
  17. MODEL = "llama3-8b-8192"
  18. # MODEL = "gemma2-9b-it"
  19. llm = ChatGroq(model=MODEL, temperature=0)
  20.  
  21.  
  22. def multiply(a: int, b: int) -> int:
  23.     """Multiply two integers.
  24.  
  25.    Args:
  26.        a: an integer
  27.        b: an integer
  28.  
  29.    Returns:
  30.        the product of a and b
  31.    """
  32.     return a * b
  33.  
  34. def add(a: int, b: int) -> int:
  35.     """Adds a and b.
  36.  
  37.    Args:
  38.        a: first int
  39.        b: second int
  40.    """
  41.     return a + b
  42.  
  43. def subtract(a: int, b: int) -> int:
  44.     """Subtracts a and b.
  45.  
  46.    Args:
  47.        a: first int
  48.        b: second int
  49.    """
  50.     return a - b
  51.  
  52.  
  53. tools = [multiply, add, subtract]
  54.  
  55. llm_with_tools = llm.bind_tools(tools)
  56.  
  57. # Node
  58. def tool_calling_llm(state: MessagesState):
  59.     return {"messages": [llm_with_tools.invoke(state["messages"])]}
  60.  
  61. # Build graph's nodes
  62. builder = StateGraph(MessagesState)
  63. builder.add_node("tool_calling_llm", tool_calling_llm)
  64. builder.add_node("tools", ToolNode(tools)) # for the tools
  65.  
  66. # Build graph's edges
  67. builder.add_edge(START, "tool_calling_llm")
  68. builder.add_conditional_edges(
  69.     "tool_calling_llm",
  70.     tools_condition,
  71. )
  72. builder.add_edge("tools", "tool_calling_llm")
  73. graph = builder.compile()
  74.  
  75. messages = graph.invoke({"messages": [SystemMessage(content="You are a calculator assistant. Use the multiply function to perform mathematical operations and provide the results."), HumanMessage(content="What is 4 * 4?, now add 3, and then subtract 2 from it.")]})
  76.  
  77. for m in messages['messages']:
  78.     m.pretty_print()
  79.  
  80.  
  81. # THIS PROGRAM SEEMS TO WORK ONLY WITH THE GEMMA MODEL, NOT WITH THE LLAMA ONE. I THINK I HAVE TO PASS TOOLS AS DICTIONARY TO THE TOOLS NODE. WILL TRY.
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement