{ "cells": [ { "cell_type": "code", "execution_count": 6, "id": "2ea22fb4", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\r\n", "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip available: \u001b[0m\u001b[31;49m22.3\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m23.3.1\u001b[0m\r\n", "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip install --upgrade pip\u001b[0m\r\n" ] } ], "source": [ "!pip install -qU google-api-python-client" ] }, { "cell_type": "markdown", "id": "5d8d76d9", "metadata": {}, "source": [ "# Conversation buffer memory" ] }, { "cell_type": "code", "execution_count": 7, "id": "0d9e8e8f", "metadata": {}, "outputs": [], "source": [ "import os\n", "from dotenv import load_dotenv\n", "from langchain.agents import AgentExecutor, Tool, ZeroShotAgent\n", "from langchain.chains import LLMChain\n", "from langchain.llms import OpenAI\n", "from langchain.memory import ConversationBufferMemory, ReadOnlySharedMemory\n", "from langchain.prompts import PromptTemplate\n", "from langchain.utilities import GoogleSearchAPIWrapper\n", "\n", "llm = OpenAI(temperature=0)" ] }, { "cell_type": "code", "execution_count": 8, "id": "96286dd4", "metadata": {}, "outputs": [], "source": [ "template = \"\"\"This is a piece of financial report, namely Form 10-K, section 7:\n", "\n", "{chat_history}\n", "\n", "Summarize this text into 2-3 sentences as best as you can.\n", "\"\"\"\n", "\n", "prompt = PromptTemplate(input_variables=[\"chat_history\"], template=template)\n", "memory = ConversationBufferMemory(memory_key=\"chat_history\")\n", "readonlymemory = ReadOnlySharedMemory(memory=memory)\n", "summary_chain = LLMChain(\n", " llm=OpenAI(),\n", " prompt=prompt,\n", " verbose=True,\n", " memory=readonlymemory,\n", ")" ] }, { "cell_type": "code", "execution_count": 9, "id": "d76360b0", "metadata": {}, "outputs": [], "source": [ "search = GoogleSearchAPIWrapper()\n", "tools = [\n", " Tool(\n", " name=\"Search\",\n", " func=search.run,\n", " description=\"useful for when you need to answer questions about current events or find some relevant information on the internet.\",\n", " ),\n", " Tool(\n", " name=\"Summary\",\n", " func=summary_chain.run,\n", " description=\"useful for when you need to summarize a piece of financial report text. The input to this tool should be a string.\",\n", " ),\n", "]" ] }, { "cell_type": "code", "execution_count": null, "id": "5c6a7dc8", "metadata": {}, "outputs": [], "source": [ "prefix = \"\"\"\n", "You are the best broker in the world. You are asked to read the financial report for some company.\n", "Then you should suggest what is the best action: sell, buy or hold. You need to return only of those three options.\n", "\"\"\"\n", "suffix = \"\"\"Begin!\n", "\n", "{chat_history}\n", "Question: {input}\n", "{agent_scratchpad}\"\"\"\n", "\n", "prompt = ZeroShotAgent.create_prompt(\n", " tools,\n", " prefix=prefix,\n", " suffix=suffix,\n", " input_variables=[\"text_chunk\", \"chat_history\", \"agent_scratchpad\"],\n", ")" ] }, { "cell_type": "code", "execution_count": null, "id": "3bd67b6d", "metadata": {}, "outputs": [], "source": [ "llm_chain = LLMChain(llm=OpenAI(temperature=0), prompt=prompt)\n", "agent = ZeroShotAgent(llm_chain=llm_chain, tools=tools, verbose=True)\n", "agent_chain = AgentExecutor.from_agent_and_tools(\n", " agent=agent, tools=tools, verbose=True, memory=memory\n", ")" ] }, { "cell_type": "markdown", "id": "925c632e", "metadata": {}, "source": [ "# Conversation summarization memory" ] }, { "cell_type": "code", "execution_count": null, "id": "6c72c255", "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": null, "id": "8af6c4ab", "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": null, "id": "9885e240", "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "markdown", "id": "1c7b7e74", "metadata": {}, "source": [ "# Entity memory" ] }, { "cell_type": "code", "execution_count": 10, "id": "e3423486", "metadata": {}, "outputs": [], "source": [ "from langchain.llms import OpenAI\n", "from langchain.memory import ConversationEntityMemory\n", "llm = OpenAI(temperature=0)" ] }, { "cell_type": "code", "execution_count": 11, "id": "d36440a7", "metadata": {}, "outputs": [], "source": [ "memory = ConversationEntityMemory(llm=llm)\n", "_input = {\"input\": \"Deven & Sam are working on a hackathon project\"}\n", "memory.load_memory_variables(_input)\n", "memory.save_context(\n", " _input,\n", " {\"output\": \" That sounds like a great project! What kind of project are they working on?\"}\n", ")" ] }, { "cell_type": "code", "execution_count": 12, "id": "95b32eb8", "metadata": {}, "outputs": [ { "data": { "text/plain": [ "{'history': 'Human: Deven & Sam are working on a hackathon project\\nAI: That sounds like a great project! What kind of project are they working on?',\n", " 'entities': {'Sam': 'Sam is working on a hackathon project with Deven.'}}" ] }, "execution_count": 12, "metadata": {}, "output_type": "execute_result" } ], "source": [ "memory.load_memory_variables({\"input\": 'who is Sam'})" ] }, { "cell_type": "code", "execution_count": 13, "id": "8b763a07", "metadata": {}, "outputs": [], "source": [ "memory = ConversationEntityMemory(llm=llm, return_messages=True)\n", "_input = {\"input\": \"Deven & Sam are working on a hackathon project\"}\n", "memory.load_memory_variables(_input)\n", "memory.save_context(\n", " _input,\n", " {\"output\": \" That sounds like a great project! What kind of project are they working on?\"}\n", ")" ] }, { "cell_type": "code", "execution_count": 14, "id": "a95a2393", "metadata": {}, "outputs": [ { "data": { "text/plain": [ "{'history': [HumanMessage(content='Deven & Sam are working on a hackathon project'),\n", " AIMessage(content=' That sounds like a great project! What kind of project are they working on?')],\n", " 'entities': {'Sam': 'Sam is working on a hackathon project with Deven.'}}" ] }, "execution_count": 14, "metadata": {}, "output_type": "execute_result" } ], "source": [ "memory.load_memory_variables({\"input\": 'who is Sam'})" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.11.0" } }, "nbformat": 4, "nbformat_minor": 5 }