{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "import os\n", "import utils\n", "\n", "utils.load_env()\n", "os.environ['LANGCHAIN_TRACING_V2'] = \"true\"" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [], "source": [ "from typing import Annotated, Literal, TypedDict\n", "\n", "from langchain_core.messages import HumanMessage\n", "\n", "# for llm model\n", "from langchain_anthropic import ChatAnthropic\n", "from langchain_openai import ChatOpenAI\n", "\n", "from langchain_core.tools import tool\n", "from langgraph.checkpoint.memory import MemorySaver\n", "from langgraph.graph import END, StateGraph, MessagesState\n", "from langgraph.prebuilt import ToolNode\n", "import tools\n", "\n", "\n", "tool_node = tools.tool_node\n", "tools.tools\n", "\n", "# model = ChatAnthropic(model=\"claude-3-5-sonnet-20240620\", temperature=0).bind_tools(tools)\n", "model = ChatOpenAI(model=\"gpt-4o-mini\")\n", "\n", "# Define the function that determines whether to continue or not\n", "def should_continue(state: MessagesState) -> Literal[\"tools\", END]:\n", " messages = state['messages']\n", " last_message = messages[-1]\n", " # If the LLM makes a tool call, then we route to the \"tools\" node\n", " if last_message.tool_calls:\n", " return \"tools\"\n", " # Otherwise, we stop (reply to the user)\n", " return END\n", "\n", "\n", "# Define the function that calls the model\n", "def call_model(state: MessagesState):\n", " messages = state['messages']\n", " response = model.invoke(messages)\n", " # We return a list, because this will get added to the existing list\n", " return {\"messages\": [response]}\n", "\n", "\n", "# Define a new graph\n", "workflow = StateGraph(MessagesState)\n", "\n", "# Define the two nodes we will cycle between\n", "workflow.add_node(\"agent\", call_model)\n", "workflow.add_node(\"tools\", tool_node)\n", "\n", "# Set the entrypoint as `agent`\n", "# This means that this node is the first one called\n", "workflow.set_entry_point(\"agent\")\n", "\n", "# We now add a conditional edge\n", "workflow.add_conditional_edges(\n", " # First, we define the start node. We use `agent`.\n", " # This means these are the edges taken after the `agent` node is called.\n", " \"agent\",\n", " # Next, we pass in the function that will determine which node is called next.\n", " should_continue,\n", ")\n", "\n", "# We now add a normal edge from `tools` to `agent`.\n", "# This means that after `tools` is called, `agent` node is called next.\n", "workflow.add_edge(\"tools\", 'agent')\n", "\n", "# Initialize memory to persist state between graph runs\n", "checkpointer = MemorySaver()\n", "\n", "# Finally, we compile it!\n", "# This compiles it into a LangChain Runnable,\n", "# meaning you can use it as you would any other runnable.\n", "# Note that we're (optionally) passing the memory when compiling the graph\n", "app = workflow.compile(checkpointer=checkpointer)" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [], "source": [ "def submitUserMessage(message:str):\n", " final_state = app.invoke(\n", " {\"messages\": [HumanMessage(content=message)]},\n", " config={\"configurable\": {\"thread_id\": 42}}\n", " )\n", " return final_state[\"messages\"][-1].content" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "Unable to load requested LangChainTracer. To disable this warning, unset the LANGCHAIN_TRACING_V2 environment variables.\n", "LangSmithUserError('API key must be provided when using hosted LangSmith API')\n", "Unable to load requested LangChainTracer. To disable this warning, unset the LANGCHAIN_TRACING_V2 environment variables.\n", "LangSmithUserError('API key must be provided when using hosted LangSmith API')\n", "Unable to load requested LangChainTracer. To disable this warning, unset the LANGCHAIN_TRACING_V2 environment variables.\n", "LangSmithUserError('API key must be provided when using hosted LangSmith API')\n", "Unable to load requested LangChainTracer. To disable this warning, unset the LANGCHAIN_TRACING_V2 environment variables.\n", "LangSmithUserError('API key must be provided when using hosted LangSmith API')\n", "Unable to load requested LangChainTracer. To disable this warning, unset the LANGCHAIN_TRACING_V2 environment variables.\n", "LangSmithUserError('API key must be provided when using hosted LangSmith API')\n", "Unable to load requested LangChainTracer. To disable this warning, unset the LANGCHAIN_TRACING_V2 environment variables.\n", "LangSmithUserError('API key must be provided when using hosted LangSmith API')\n" ] }, { "data": { "text/plain": [ "'บริเวณมาบุญครองในกรุงเทพฯ มีร้านกาแฟหลายแห่งที่น่าสนใจ คุณสามารถลองไปที่ร้านเหล่านี้ได้:\\n\\n1. **ร้านกาแฟโฟลว์ (Flow Coffee)** - ร้านกาแฟเล็กๆ ที่มีบรรยากาศสบาย เหมาะสำหรับนั่งทำงานหรือนั่งชิลล์\\n2. **ร้านกาแฟ On the Way** - ร้านกาแฟที่มีเมนูหลากหลายและบรรยากาศดี\\n3. **ร้านกาแฟชิค (Chic)** - ร้านกาแฟที่มีการตกแต่งน่ารักและเครื่องดื่มหลากหลาย\\n4. **ร้านกาแฟ Starbucks** - มีสาขาหลายแห่งในกรุงเทพฯ รวมถึงใกล้บริเวณมาบุญครอง\\n5. **ร้านกาแฟดอยช้าง** - ที่มีชื่อเสียงในเรื่องของกาแฟจากดอยช้าง\\n\\nคุณสามารถค้นหาร้านกาแฟเพิ่มเติมได้จาก Google Maps หรือแอปพลิเคชันค้นหาร้านอาหารต่างๆ เพื่อดูรีวิวและข้อมูลเพิ่มเติมเกี่ยวกับร้านกาแฟใกล้มาบุญครองได้ค่ะ'" ] }, "execution_count": 7, "metadata": {}, "output_type": "execute_result" } ], "source": [ "submitUserMessage(\"ค้นหาร้านกาแฟใกล้มาบุญครอง\")" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.11.9" } }, "nbformat": 4, "nbformat_minor": 2 }