Open4

chainlitの練習ノート

もぶもぶもぶもぶ
from langchain.chains import LLMMathChain
from langchain.chat_models import ChatOpenAI
from langchain.agents import initialize_agent, Tool, AgentExecutor, AgentType
import chainlit as cl
from dotenv import load_dotenv


@cl.on_chat_start
def start():
    load_dotenv()
    llm = ChatOpenAI(temperature=0, streaming=True)
    llm_math_chain = LLMMathChain.from_llm(llm=llm, verbose=True)

    tools = [
        Tool(
            name="Calculator",
            func=llm_math_chain.run,
            description="useful for when you need to answer questions about math",
        ),
    ]
    agent = initialize_agent(
        tools, llm, agent=AgentType.OPENAI_FUNCTIONS, verbose=True
    )
    cl.user_session.set("agent", agent)


@cl.on_message
async def main(message: cl.Message):
    agent = cl.user_session.get("agent")  # type: AgentExecutor
    res = await agent.arun(message.content, callbacks=[cl.LangchainCallbackHandler()])
    await cl.Message(content=res).send()

もぶもぶもぶもぶ

もうちょっとエージェントカスタムしやすいバージョンで。

from langchain.chat_models import ChatOpenAI
from langchain.agents import AgentExecutor
import chainlit as cl
from dotenv import load_dotenv
import requests
from pydantic.v1 import BaseModel, Field
import datetime
from langchain.tools import tool
from langchain.tools.render import format_tool_to_openai_function
from langchain.prompts import MessagesPlaceholder, ChatPromptTemplate
from langchain.schema.runnable import RunnablePassthrough
from langchain.agents.output_parsers import OpenAIFunctionsAgentOutputParser
from langchain.agents.format_scratchpad import format_to_openai_functions
from langchain.schema.runnable.config import RunnableConfig

# Define the input schema
class OpenMeteoInput(BaseModel):
    latitude: float = Field(..., description="Latitude of the location to fetch weather data for")
    longitude: float = Field(..., description="Longitude of the location to fetch weather data for")


@tool(args_schema=OpenMeteoInput)
def get_current_temperature(latitude: float, longitude: float) -> dict:
    """Fetch current temperature for given coordinates."""
    
    BASE_URL = "https://api.open-meteo.com/v1/forecast"
    
    # Parameters for the request
    params = {
        'latitude': latitude,
        'longitude': longitude,
        'hourly': 'temperature_2m',
        'forecast_days': 1,
    }

    # Make the request
    response = requests.get(BASE_URL, params=params)
    
    if response.status_code == 200:
        results = response.json()
    else:
        raise Exception(f"API Request failed with status code: {response.status_code}")

    current_utc_time = datetime.datetime.utcnow()
    time_list = [datetime.datetime.fromisoformat(time_str.replace('Z', '+00:00')) for time_str in results['hourly']['time']]
    temperature_list = results['hourly']['temperature_2m']
    
    closest_time_index = min(range(len(time_list)), key=lambda i: abs(time_list[i] - current_utc_time))
    current_temperature = temperature_list[closest_time_index]
    
    return f'The current temperature is {current_temperature}°C'


@cl.on_chat_start
def start():
    load_dotenv()

    tools = [get_current_temperature]
    functions = [format_tool_to_openai_function(f) for f in tools]
    model = ChatOpenAI(temperature=0).bind(functions=functions)
    prompt = ChatPromptTemplate.from_messages([
        ("system", "あなたは親切なAIエージェントです"),
        ("user", "{input}"),
        MessagesPlaceholder(variable_name="agent_scratchpad")
    ])
    agent_chain = RunnablePassthrough.assign(
        agent_scratchpad= lambda x: format_to_openai_functions(x["intermediate_steps"])
    ) | prompt | model | OpenAIFunctionsAgentOutputParser()
    agent_executor = AgentExecutor(agent=agent_chain, tools=tools, verbose=True)
    
    cl.user_session.set("agent", agent_executor)


@cl.on_message
async def main(message: cl.Message):
    agent = cl.user_session.get("agent") # type: AgentExecutor
    res = await agent.ainvoke(
        {"input": message.content},
        config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()])                  
    )
    await cl.Message(content=res["output"]).send()

もぶもぶもぶもぶ

メモリあり版

from langchain.chat_models import ChatOpenAI
from langchain.agents import AgentExecutor
import chainlit as cl
from dotenv import load_dotenv
import requests
from pydantic.v1 import BaseModel, Field
import datetime
from langchain.tools import tool
from langchain.tools.render import format_tool_to_openai_function
from langchain.prompts import MessagesPlaceholder, ChatPromptTemplate
from langchain.schema.runnable import RunnablePassthrough
from langchain.agents.output_parsers import OpenAIFunctionsAgentOutputParser
from langchain.agents.format_scratchpad import format_to_openai_functions
from langchain.schema.runnable.config import RunnableConfig
from langchain.memory import ConversationBufferMemory

# Define the input schema
class OpenMeteoInput(BaseModel):
    latitude: float = Field(..., description="Latitude of the location to fetch weather data for")
    longitude: float = Field(..., description="Longitude of the location to fetch weather data for")


@tool(args_schema=OpenMeteoInput)
def get_current_temperature(latitude: float, longitude: float) -> dict:
    """Fetch current temperature for given coordinates."""
    
    BASE_URL = "https://api.open-meteo.com/v1/forecast"
    
    # Parameters for the request
    params = {
        'latitude': latitude,
        'longitude': longitude,
        'hourly': 'temperature_2m',
        'forecast_days': 1,
    }

    # Make the request
    response = requests.get(BASE_URL, params=params)
    
    if response.status_code == 200:
        results = response.json()
    else:
        raise Exception(f"API Request failed with status code: {response.status_code}")

    current_utc_time = datetime.datetime.utcnow()
    time_list = [datetime.datetime.fromisoformat(time_str.replace('Z', '+00:00')) for time_str in results['hourly']['time']]
    temperature_list = results['hourly']['temperature_2m']
    
    closest_time_index = min(range(len(time_list)), key=lambda i: abs(time_list[i] - current_utc_time))
    current_temperature = temperature_list[closest_time_index]
    
    return f'The current temperature is {current_temperature}°C'


@cl.on_chat_start
def start():
    load_dotenv()

    tools = [get_current_temperature]
    functions = [format_tool_to_openai_function(f) for f in tools]
    model = ChatOpenAI(temperature=0).bind(functions=functions)
    prompt = ChatPromptTemplate.from_messages([
        ("system", "あなたは親切なAIエージェントです"),
        MessagesPlaceholder(variable_name="chat_history"),
        ("user", "{input}"),
        MessagesPlaceholder(variable_name="agent_scratchpad")
    ])
    memory = ConversationBufferMemory(return_messages=True,memory_key="chat_history")
    agent_chain = RunnablePassthrough.assign(
        agent_scratchpad= lambda x: format_to_openai_functions(x["intermediate_steps"])
    ) | prompt | model | OpenAIFunctionsAgentOutputParser()
    agent_executor = AgentExecutor(agent=agent_chain, tools=tools, verbose=True, memory=memory) 
    cl.user_session.set("agent_executor", agent_executor)


@cl.on_message
async def main(message: cl.Message):
    agent_executor = cl.user_session.get("agent_executor") # type: AgentExecutor
    res = await agent_executor.ainvoke(
        {"input": message.content},
        config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()])                  
    )
    await cl.Message(content=res["output"]).send()

もぶもぶもぶもぶ

AgentTokenBuffer版

from langchain.chat_models import ChatOpenAI
from langchain.agents import AgentExecutor
import chainlit as cl
from dotenv import load_dotenv
import requests
from pydantic.v1 import BaseModel, Field
import datetime
from langchain.tools import tool
from langchain.prompts import MessagesPlaceholder
from langchain.agents.openai_functions_agent.agent_token_buffer_memory import (
    AgentTokenBufferMemory,
)
from langchain.agents.openai_functions_agent.base import OpenAIFunctionsAgent
from langchain.schema.messages import SystemMessage
from langchain.schema.runnable.config import RunnableConfig

# Define the input schema
class OpenMeteoInput(BaseModel):
    latitude: float = Field(..., description="Latitude of the location to fetch weather data for")
    longitude: float = Field(..., description="Longitude of the location to fetch weather data for")


@tool(args_schema=OpenMeteoInput)
def get_current_temperature(latitude: float, longitude: float) -> dict:
    """Fetch current temperature for given coordinates."""
    
    BASE_URL = "https://api.open-meteo.com/v1/forecast"
    
    # Parameters for the request
    params = {
        'latitude': latitude,
        'longitude': longitude,
        'hourly': 'temperature_2m',
        'forecast_days': 1,
    }

    # Make the request
    response = requests.get(BASE_URL, params=params)
    
    if response.status_code == 200:
        results = response.json()
    else:
        raise Exception(f"API Request failed with status code: {response.status_code}")

    current_utc_time = datetime.datetime.utcnow()
    time_list = [datetime.datetime.fromisoformat(time_str.replace('Z', '+00:00')) for time_str in results['hourly']['time']]
    temperature_list = results['hourly']['temperature_2m']
    
    closest_time_index = min(range(len(time_list)), key=lambda i: abs(time_list[i] - current_utc_time))
    current_temperature = temperature_list[closest_time_index]
    
    return f'The current temperature is {current_temperature}°C'


@cl.on_chat_start
def start():
    load_dotenv()

    llm = ChatOpenAI(temperature=0)
    tools = [get_current_temperature]
    system_message = SystemMessage(
        content=(
            "貴方は親切なAIアシスタントです"
        )
    )
    prompt = OpenAIFunctionsAgent.create_prompt(
        system_message=system_message,
        extra_prompt_messages=[MessagesPlaceholder(variable_name="chat_history")],
    )

    memory = AgentTokenBufferMemory(memory_key="chat_history", llm=llm)
    agent = OpenAIFunctionsAgent(llm=llm, tools=tools, prompt=prompt)
    agent_executor = AgentExecutor(
        agent=agent,
        tools=tools,
        memory=memory,
        verbose=True,
        return_intermediate_steps=True,
    )
    cl.user_session.set("agent_executor", agent_executor)


@cl.on_message
async def main(message: cl.Message):
    agent_executor = cl.user_session.get("agent_executor") # type: AgentExecutor
    res = await agent_executor.ainvoke(
        {"input":message.content},
        config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()])                  
    )
    await cl.Message(content=res["output"]).send()