Files
prompt-engineering-for-deve…/content/LangChain for LLM Application Development/4.模型链 Chains.ipynb
2023-07-13 22:55:45 +08:00

1 line
63 KiB
Plaintext

{"cells": [{"cell_type": "markdown", "id": "52824b89-532a-4e54-87e9-1410813cd39e", "metadata": {}, "source": ["# \u7b2c\u56db\u7ae0 \u6a21\u578b\u94fe\n", "\n", " - [\u4e00\u3001\u8bbe\u7f6eOpenAI API Key](#\u4e00\u3001\u8bbe\u7f6eOpenAI-API-Key)\n", " - [\u4e8c\u3001LLMChain](#\u4e8c\u3001LLMChain)\n", " - [\u4e09\u3001Sequential Chains](#\u4e09\u3001Sequential-Chains)\n", " - [3.1 SimpleSequentialChain](#3.1-SimpleSequentialChain)\n", " - [3.2 SequentialChain](#3.2-SequentialChain)\n", " - [\u56db\u3001 Router Chain\uff08\u8def\u7531\u94fe\uff09](#\u56db\u3001-Router-Chain\uff08\u8def\u7531\u94fe\uff09)\n", " - [4.1 \u5b9a\u4e49\u63d0\u793a\u6a21\u677f](#4.1-\u5b9a\u4e49\u63d0\u793a\u6a21\u677f)\n", " - [4.2 \u5bfc\u5165\u76f8\u5173\u7684\u5305](#4.2-\u5bfc\u5165\u76f8\u5173\u7684\u5305)\n", " - [4.3 \u5b9a\u4e49\u8bed\u8a00\u6a21\u578b](#4.3-\u5b9a\u4e49\u8bed\u8a00\u6a21\u578b)\n", " - [4.4 LLMRouterChain\uff08\u6b64\u94fe\u4f7f\u7528 LLM \u6765\u786e\u5b9a\u5982\u4f55\u8def\u7531\u4e8b\u7269\uff09](#4.4-LLMRouterChain\uff08\u6b64\u94fe\u4f7f\u7528-LLM-\u6765\u786e\u5b9a\u5982\u4f55\u8def\u7531\u4e8b\u7269\uff09)\n"]}, {"cell_type": "markdown", "id": "54810ef7", "metadata": {}, "source": ["\u94fe\u5141\u8bb8\u6211\u4eec\u5c06\u591a\u4e2a\u7ec4\u4ef6\u7ec4\u5408\u5728\u4e00\u8d77\uff0c\u4ee5\u521b\u5efa\u4e00\u4e2a\u5355\u4e00\u7684\u3001\u8fde\u8d2f\u7684\u5e94\u7528\u7a0b\u5e8f\u3002\u94fe\uff08Chains\uff09\u901a\u5e38\u5c06\u4e00\u4e2aLLM\uff08\u5927\u8bed\u8a00\u6a21\u578b\uff09\u4e0e\u63d0\u793a\u7ed3\u5408\u5728\u4e00\u8d77\uff0c\u4f7f\u7528\u8fd9\u4e2a\u6784\u5efa\u5757\uff0c\u60a8\u8fd8\u53ef\u4ee5\u5c06\u4e00\u5806\u8fd9\u4e9b\u6784\u5efa\u5757\u7ec4\u5408\u5728\u4e00\u8d77\uff0c\u5bf9\u60a8\u7684\u6587\u672c\u6216\u5176\u4ed6\u6570\u636e\u8fdb\u884c\u4e00\u7cfb\u5217\u64cd\u4f5c\u3002\u4f8b\u5982\uff0c\u6211\u4eec\u53ef\u4ee5\u521b\u5efa\u4e00\u4e2a\u94fe\uff0c\u8be5\u94fe\u63a5\u53d7\u7528\u6237\u8f93\u5165\uff0c\u4f7f\u7528\u63d0\u793a\u6a21\u677f\u5bf9\u5176\u8fdb\u884c\u683c\u5f0f\u5316\uff0c\u7136\u540e\u5c06\u683c\u5f0f\u5316\u7684\u54cd\u5e94\u4f20\u9012\u7ed9LLM\u3002\u6211\u4eec\u53ef\u4ee5\u901a\u8fc7\u5c06\u591a\u4e2a\u94fe\u7ec4\u5408\u5728\u4e00\u8d77\uff0c\u6216\u8005\u901a\u8fc7\u5c06\u94fe\u4e0e\u5176\u4ed6\u7ec4\u4ef6\u7ec4\u5408\u5728\u4e00\u8d77\u6765\u6784\u5efa\u66f4\u590d\u6742\u7684\u94fe\u3002"]}, {"cell_type": "markdown", "id": "663fc885", "metadata": {}, "source": ["\u8fd9\u4e9b\u94fe\u7684\u4e00\u90e8\u5206\u7684\u5f3a\u5927\u4e4b\u5904\u5728\u4e8e\u4f60\u53ef\u4ee5\u4e00\u6b21\u8fd0\u884c\u5b83\u4eec\u5728\u8bb8\u591a\u8f93\u5165\u4e0a\uff0c\u56e0\u6b64\uff0c\u6211\u4eec\u5c06\u52a0\u8f7d\u4e00\u4e2apandas\u6570\u636e\u6846\u67b6"]}, {"cell_type": "markdown", "id": "21009bf6-49bd-466e-8177-74c23533d938", "metadata": {"tags": []}, "source": ["## \u4e00\u3001\u8bbe\u7f6eOpenAI API Key\n", "\n", "\u767b\u9646 [OpenAI \u8d26\u6237](https://platform.openai.com/account/api-keys) \u83b7\u53d6API Key\uff0c\u7136\u540e\u5c06\u5176\u8bbe\u7f6e\u4e3a\u73af\u5883\u53d8\u91cf\u3002\n", "\n", "- \u5982\u679c\u4f60\u60f3\u8981\u8bbe\u7f6e\u4e3a\u5168\u5c40\u73af\u5883\u53d8\u91cf\uff0c\u53ef\u4ee5\u53c2\u8003[\u77e5\u4e4e\u6587\u7ae0](https://zhuanlan.zhihu.com/p/627665725)\u3002\n", "- \u5982\u679c\u4f60\u60f3\u8981\u8bbe\u7f6e\u4e3a\u672c\u5730/\u9879\u76ee\u73af\u5883\u53d8\u91cf\uff0c\u5728\u672c\u6587\u4ef6\u76ee\u5f55\u4e0b\u521b\u5efa`.env`\u6587\u4ef6, \u6253\u5f00\u6587\u4ef6\u8f93\u5165\u4ee5\u4e0b\u5185\u5bb9\u3002\n", "\n", " <p style=\"font-family:verdana; font-size:12px;color:green\">\n", " OPENAI_API_KEY=\"your_api_key\" \n", " </p>\n", " \n", " \u66ff\u6362\"your_api_key\"\u4e3a\u4f60\u81ea\u5df1\u7684 API Key"]}, {"cell_type": "code", "execution_count": 1, "id": "adc3519c-4d12-4011-9223-2f3cb3c42b93", "metadata": {}, "outputs": [], "source": ["# \u4e0b\u8f7d\u9700\u8981\u7684\u5305python-dotenv\u548copenai\n", "# \u5982\u679c\u4f60\u9700\u8981\u67e5\u770b\u5b89\u88c5\u8fc7\u7a0b\u65e5\u5fd7\uff0c\u53ef\u5220\u9664 -q \n", "!pip install -q python-dotenv\n", "!pip install -q openai"]}, {"cell_type": "code", "execution_count": 2, "id": "1ad53241-bef6-42b8-894b-bcbbc8c64df7", "metadata": {"tags": []}, "outputs": [], "source": ["import os\n", "import openai\n", "from dotenv import load_dotenv, find_dotenv\n", "\n", "# \u8bfb\u53d6\u672c\u5730/\u9879\u76ee\u7684\u73af\u5883\u53d8\u91cf\u3002\n", "\n", "# find_dotenv()\u5bfb\u627e\u5e76\u5b9a\u4f4d.env\u6587\u4ef6\u7684\u8def\u5f84\n", "# load_dotenv()\u8bfb\u53d6\u8be5.env\u6587\u4ef6\uff0c\u5e76\u5c06\u5176\u4e2d\u7684\u73af\u5883\u53d8\u91cf\u52a0\u8f7d\u5230\u5f53\u524d\u7684\u8fd0\u884c\u73af\u5883\u4e2d \n", "# \u5982\u679c\u4f60\u8bbe\u7f6e\u7684\u662f\u5168\u5c40\u7684\u73af\u5883\u53d8\u91cf\uff0c\u8fd9\u884c\u4ee3\u7801\u5219\u6ca1\u6709\u4efb\u4f55\u4f5c\u7528\u3002\n", "_ = load_dotenv(find_dotenv())\n", "\n", "# \u83b7\u53d6\u73af\u5883\u53d8\u91cf OPENAI_API_KEY\n", "openai.api_key = os.environ['OPENAI_API_KEY'] "]}, {"cell_type": "markdown", "id": "b940ce7c", "metadata": {"tags": []}, "source": ["## \u4e8c\u3001LLMChain"]}, {"cell_type": "markdown", "id": "e000bd16", "metadata": {}, "source": ["LLMChain\u662f\u4e00\u4e2a\u7b80\u5355\u4f46\u975e\u5e38\u5f3a\u5927\u7684\u94fe\uff0c\u4e5f\u662f\u540e\u9762\u6211\u4eec\u5c06\u8981\u4ecb\u7ecd\u7684\u8bb8\u591a\u94fe\u7684\u57fa\u7840\u3002"]}, {"cell_type": "code", "execution_count": 3, "id": "b84e441b", "metadata": {}, "outputs": [], "source": ["#!pip install pandas"]}, {"cell_type": "code", "execution_count": 46, "id": "974acf8e-8f88-42de-88f8-40a82cb58e8b", "metadata": {}, "outputs": [], "source": ["import pandas as pd\n", "df = pd.read_csv('Data.csv')"]}, {"cell_type": "code", "execution_count": 47, "id": "b7a09c35", "metadata": {}, "outputs": [{"data": {"text/html": ["<div>\n", "<style scoped>\n", " .dataframe tbody tr th:only-of-type {\n", " vertical-align: middle;\n", " }\n", "\n", " .dataframe tbody tr th {\n", " vertical-align: top;\n", " }\n", "\n", " .dataframe thead th {\n", " text-align: right;\n", " }\n", "</style>\n", "<table border=\"1\" class=\"dataframe\">\n", " <thead>\n", " <tr style=\"text-align: right;\">\n", " <th></th>\n", " <th>Product</th>\n", " <th>Review</th>\n", " </tr>\n", " </thead>\n", " <tbody>\n", " <tr>\n", " <th>0</th>\n", " <td>Queen Size Sheet Set</td>\n", " <td>I ordered a king size set. My only criticism w...</td>\n", " </tr>\n", " <tr>\n", " <th>1</th>\n", " <td>Waterproof Phone Pouch</td>\n", " <td>I loved the waterproof sac, although the openi...</td>\n", " </tr>\n", " <tr>\n", " <th>2</th>\n", " <td>Luxury Air Mattress</td>\n", " <td>This mattress had a small hole in the top of i...</td>\n", " </tr>\n", " <tr>\n", " <th>3</th>\n", " <td>Pillows Insert</td>\n", " <td>This is the best throw pillow fillers on Amazo...</td>\n", " </tr>\n", " <tr>\n", " <th>4</th>\n", " <td>Milk Frother Handheld\\n</td>\n", " <td>I loved this product. But they only seem to l...</td>\n", " </tr>\n", " </tbody>\n", "</table>\n", "</div>"], "text/plain": [" Product Review\n", "0 Queen Size Sheet Set I ordered a king size set. My only criticism w...\n", "1 Waterproof Phone Pouch I loved the waterproof sac, although the openi...\n", "2 Luxury Air Mattress This mattress had a small hole in the top of i...\n", "3 Pillows Insert This is the best throw pillow fillers on Amazo...\n", "4 Milk Frother Handheld\\n \u00a0I loved this product. But they only seem to l..."]}, "execution_count": 47, "metadata": {}, "output_type": "execute_result"}], "source": ["df.head()"]}, {"cell_type": "code", "execution_count": 48, "id": "e92dff22", "metadata": {}, "outputs": [], "source": ["from langchain.chat_models import ChatOpenAI #\u5bfc\u5165OpenAI\u6a21\u578b\n", "from langchain.prompts import ChatPromptTemplate #\u5bfc\u5165\u804a\u5929\u63d0\u793a\u6a21\u677f\n", "from langchain.chains import LLMChain #\u5bfc\u5165LLM\u94fe\u3002"]}, {"cell_type": "markdown", "id": "94a32c6f", "metadata": {}, "source": ["\u521d\u59cb\u5316\u8bed\u8a00\u6a21\u578b"]}, {"cell_type": "code", "execution_count": 9, "id": "943237a7", "metadata": {}, "outputs": [], "source": ["llm = ChatOpenAI(temperature=0.0) #\u9884\u6d4b\u4e0b\u4e00\u4e2atoken\u65f6\uff0c\u6982\u7387\u8d8a\u5927\u7684\u503c\u5c31\u8d8a\u5e73\u6ed1(\u5e73\u6ed1\u4e5f\u5c31\u662f\u8ba9\u5dee\u5f02\u5927\u7684\u503c\u4e4b\u95f4\u7684\u5dee\u5f02\u53d8\u5f97\u6ca1\u90a3\u4e48\u5927)\uff0ctemperature\u503c\u8d8a\u5c0f\u5219\u751f\u6210\u7684\u5185\u5bb9\u8d8a\u7a33\u5b9a"]}, {"cell_type": "markdown", "id": "81887434", "metadata": {}, "source": ["\u521d\u59cb\u5316prompt\uff0c\u8fd9\u4e2aprompt\u5c06\u63a5\u53d7\u4e00\u4e2a\u540d\u4e3aproduct\u7684\u53d8\u91cf\u3002\u8be5prompt\u5c06\u8981\u6c42LLM\u751f\u6210\u4e00\u4e2a\u63cf\u8ff0\u5236\u9020\u8be5\u4ea7\u54c1\u7684\u516c\u53f8\u7684\u6700\u4f73\u540d\u79f0"]}, {"cell_type": "code", "execution_count": 7, "id": "cdcdb42d", "metadata": {}, "outputs": [], "source": ["prompt = ChatPromptTemplate.from_template( \n", " \"What is the best name to describe \\\n", " a company that makes {product}?\"\n", ")"]}, {"cell_type": "markdown", "id": "5c22cb13", "metadata": {}, "source": ["\u5c06llm\u548cprompt\u7ec4\u5408\u6210\u94fe---\u8fd9\u4e2aLLM\u94fe\u975e\u5e38\u7b80\u5355\uff0c\u4ed6\u53ea\u662fllm\u548cprompt\u7684\u7ed3\u5408\uff0c\u4f46\u662f\u73b0\u5728\uff0c\u8fd9\u4e2a\u94fe\u8ba9\u6211\u4eec\u53ef\u4ee5\u4ee5\u4e00\u79cd\u987a\u5e8f\u7684\u65b9\u5f0f\u53bb\u901a\u8fc7prompt\u8fd0\u884c\u5e76\u4e14\u7ed3\u5408\u5230LLM\u4e2d"]}, {"cell_type": "code", "execution_count": 8, "id": "d7abc20b", "metadata": {}, "outputs": [], "source": ["chain = LLMChain(llm=llm, prompt=prompt)"]}, {"cell_type": "markdown", "id": "8d7d5ff6", "metadata": {}, "source": ["\u56e0\u6b64\uff0c\u5982\u679c\u6211\u4eec\u6709\u4e00\u4e2a\u540d\u4e3a\"Queen Size Sheet Set\"\u7684\u4ea7\u54c1\uff0c\u6211\u4eec\u53ef\u4ee5\u901a\u8fc7\u4f7f\u7528chain.run\u5c06\u5176\u901a\u8fc7\u8fd9\u4e2a\u94fe\u8fd0\u884c"]}, {"cell_type": "code", "execution_count": 9, "id": "ad44d1fb", "metadata": {}, "outputs": [{"data": {"text/plain": ["'Royal Linens.'"]}, "execution_count": 9, "metadata": {}, "output_type": "execute_result"}], "source": ["product = \"Queen Size Sheet Set\"\n", "chain.run(product)"]}, {"cell_type": "markdown", "id": "1e1ede1c", "metadata": {}, "source": ["\u60a8\u53ef\u4ee5\u8f93\u5165\u4efb\u4f55\u4ea7\u54c1\u63cf\u8ff0\uff0c\u7136\u540e\u67e5\u770b\u94fe\u5c06\u8f93\u51fa\u4ec0\u4e48\u7ed3\u679c"]}, {"cell_type": "code", "execution_count": 14, "id": "2181be10", "metadata": {}, "outputs": [{"data": {"text/plain": ["'\"\u8c6a\u534e\u5e8a\u7eba\"'"]}, "execution_count": 14, "metadata": {}, "output_type": "execute_result"}], "source": ["# \u4e2d\u6587\n", "prompt = ChatPromptTemplate.from_template( \n", " \"\u63cf\u8ff0\u5236\u9020{product}\u7684\u4e00\u4e2a\u516c\u53f8\u7684\u6700\u4f73\u540d\u79f0\u662f\u4ec0\u4e48?\"\n", ")\n", "chain = LLMChain(llm=llm, prompt=prompt)\n", "product = \"\u5927\u53f7\u5e8a\u5355\u5957\u88c5\"\n", "chain.run(product)"]}, {"cell_type": "markdown", "id": "49158430", "metadata": {}, "source": ["## \u4e09\u3001Sequential Chains"]}, {"cell_type": "markdown", "id": "69b03469", "metadata": {}, "source": ["### 3.1 SimpleSequentialChain\n", "\n", "\u987a\u5e8f\u94fe\uff08Sequential Chains\uff09\u662f\u6309\u9884\u5b9a\u4e49\u987a\u5e8f\u6267\u884c\u5176\u94fe\u63a5\u7684\u94fe\u3002\u5177\u4f53\u6765\u8bf4\uff0c\u6211\u4eec\u5c06\u4f7f\u7528\u7b80\u5355\u987a\u5e8f\u94fe\uff08SimpleSequentialChain\uff09\uff0c\u8fd9\u662f\u987a\u5e8f\u94fe\u7684\u6700\u7b80\u5355\u7c7b\u578b\uff0c\u5176\u4e2d\u6bcf\u4e2a\u6b65\u9aa4\u90fd\u6709\u4e00\u4e2a\u8f93\u5165/\u8f93\u51fa\uff0c\u4e00\u4e2a\u6b65\u9aa4\u7684\u8f93\u51fa\u662f\u4e0b\u4e00\u4e2a\u6b65\u9aa4\u7684\u8f93\u5165"]}, {"cell_type": "code", "execution_count": 15, "id": "febee243", "metadata": {}, "outputs": [], "source": ["from langchain.chains import SimpleSequentialChain"]}, {"cell_type": "code", "execution_count": 16, "id": "5d019d6f", "metadata": {}, "outputs": [], "source": ["llm = ChatOpenAI(temperature=0.9)"]}, {"cell_type": "markdown", "id": "0e732589", "metadata": {}, "source": ["\u5b50\u94fe 1"]}, {"cell_type": "code", "execution_count": 15, "id": "2f31aa8a", "metadata": {}, "outputs": [], "source": ["\n", "# \u63d0\u793a\u6a21\u677f 1 \uff1a\u8fd9\u4e2a\u63d0\u793a\u5c06\u63a5\u53d7\u4ea7\u54c1\u5e76\u8fd4\u56de\u6700\u4f73\u540d\u79f0\u6765\u63cf\u8ff0\u8be5\u516c\u53f8\n", "first_prompt = ChatPromptTemplate.from_template(\n", " \"What is the best name to describe \\\n", " a company that makes {product}?\"\n", ")\n", "\n", "# Chain 1\n", "chain_one = LLMChain(llm=llm, prompt=first_prompt)"]}, {"cell_type": "markdown", "id": "dcfca7bd", "metadata": {}, "source": ["\u5b50\u94fe 2"]}, {"cell_type": "code", "execution_count": 16, "id": "3f5d5b76", "metadata": {}, "outputs": [], "source": ["\n", "# \u63d0\u793a\u6a21\u677f 2 \uff1a\u63a5\u53d7\u516c\u53f8\u540d\u79f0\uff0c\u7136\u540e\u8f93\u51fa\u8be5\u516c\u53f8\u7684\u957f\u4e3a20\u4e2a\u5355\u8bcd\u7684\u63cf\u8ff0\n", "second_prompt = ChatPromptTemplate.from_template(\n", " \"Write a 20 words description for the following \\\n", " company:{company_name}\"\n", ")\n", "# chain 2\n", "chain_two = LLMChain(llm=llm, prompt=second_prompt)"]}, {"cell_type": "markdown", "id": "3a1991f4", "metadata": {}, "source": ["\u73b0\u5728\u6211\u4eec\u53ef\u4ee5\u7ec4\u5408\u4e24\u4e2aLLMChain\uff0c\u4ee5\u4fbf\u6211\u4eec\u53ef\u4ee5\u5728\u4e00\u4e2a\u6b65\u9aa4\u4e2d\u521b\u5efa\u516c\u53f8\u540d\u79f0\u548c\u63cf\u8ff0"]}, {"cell_type": "code", "execution_count": 17, "id": "6c1eb2c4", "metadata": {}, "outputs": [], "source": ["overall_simple_chain = SimpleSequentialChain(chains=[chain_one, chain_two],\n", " verbose=True\n", " )"]}, {"cell_type": "markdown", "id": "5122f26a", "metadata": {}, "source": ["\u7ed9\u4e00\u4e2a\u8f93\u5165\uff0c\u7136\u540e\u8fd0\u884c\u4e0a\u9762\u7684\u94fe"]}, {"cell_type": "code", "execution_count": 18, "id": "78458efe", "metadata": {}, "outputs": [{"name": "stdout", "output_type": "stream", "text": ["\n", "\n", "\u001b[1m> Entering new SimpleSequentialChain chain...\u001b[0m\n", "\u001b[36;1m\u001b[1;3mRoyal Linens\u001b[0m\n", "\u001b[33;1m\u001b[1;3mRoyal Linens is a high-quality bedding and linen company that offers luxurious and stylish products for comfortable living.\u001b[0m\n", "\n", "\u001b[1m> Finished chain.\u001b[0m\n"]}, {"data": {"text/plain": ["'Royal Linens is a high-quality bedding and linen company that offers luxurious and stylish products for comfortable living.'"]}, "execution_count": 18, "metadata": {}, "output_type": "execute_result"}], "source": ["product = \"Queen Size Sheet Set\"\n", "overall_simple_chain.run(product)"]}, {"cell_type": "code", "execution_count": 19, "id": "c7c32997", "metadata": {}, "outputs": [{"name": "stdout", "output_type": "stream", "text": ["\n", "\n", "\u001b[1m> Entering new SimpleSequentialChain chain...\u001b[0m\n", "\u001b[36;1m\u001b[1;3m\"\u5c3a\u5bf8\u738b\u5e8a\u54c1\u6709\u9650\u516c\u53f8\"\u001b[0m\n", "\u001b[33;1m\u001b[1;3m\u5c3a\u5bf8\u738b\u5e8a\u54c1\u6709\u9650\u516c\u53f8\u662f\u4e00\u5bb6\u4e13\u6ce8\u4e8e\u5e8a\u4e0a\u7528\u54c1\u751f\u4ea7\u7684\u516c\u53f8\u3002\u001b[0m\n", "\n", "\u001b[1m> Finished chain.\u001b[0m\n"]}, {"data": {"text/plain": ["'\u5c3a\u5bf8\u738b\u5e8a\u54c1\u6709\u9650\u516c\u53f8\u662f\u4e00\u5bb6\u4e13\u6ce8\u4e8e\u5e8a\u4e0a\u7528\u54c1\u751f\u4ea7\u7684\u516c\u53f8\u3002'"]}, "execution_count": 19, "metadata": {}, "output_type": "execute_result"}], "source": ["# \u4e2d\u6587\n", "\n", "first_prompt = ChatPromptTemplate.from_template( \n", " \"\u63cf\u8ff0\u5236\u9020{product}\u7684\u4e00\u4e2a\u516c\u53f8\u7684\u6700\u597d\u7684\u540d\u79f0\u662f\u4ec0\u4e48\"\n", ")\n", "chain_one = LLMChain(llm=llm, prompt=first_prompt)\n", "\n", "second_prompt = ChatPromptTemplate.from_template( \n", " \"\u5199\u4e00\u4e2a20\u5b57\u7684\u63cf\u8ff0\u5bf9\u4e8e\u4e0b\u9762\u8fd9\u4e2a\\\n", " \u516c\u53f8\uff1a{company_name}\u7684\"\n", ")\n", "chain_two = LLMChain(llm=llm, prompt=second_prompt)\n", "\n", "\n", "overall_simple_chain = SimpleSequentialChain(chains=[chain_one, chain_two],\n", " verbose=True\n", " )\n", "product = \"\u5927\u53f7\u5e8a\u5355\u5957\u88c5\"\n", "overall_simple_chain.run(product)"]}, {"cell_type": "markdown", "id": "7b5ce18c", "metadata": {}, "source": ["### 3.2 SequentialChain"]}, {"cell_type": "markdown", "id": "1e69f4c0", "metadata": {}, "source": ["\u5f53\u53ea\u6709\u4e00\u4e2a\u8f93\u5165\u548c\u4e00\u4e2a\u8f93\u51fa\u65f6\uff0c\u7b80\u5355\u7684\u987a\u5e8f\u94fe\u53ef\u4ee5\u987a\u5229\u5b8c\u6210\u3002\u4f46\u662f\u5f53\u6709\u591a\u4e2a\u8f93\u5165\u6216\u591a\u4e2a\u8f93\u51fa\u65f6\u8be5\u5982\u4f55\u5b9e\u73b0\u5462\uff1f\n", "\n", "\u6211\u4eec\u53ef\u4ee5\u4f7f\u7528\u666e\u901a\u7684\u987a\u5e8f\u94fe\u6765\u5b9e\u73b0\u8fd9\u4e00\u70b9"]}, {"cell_type": "code", "execution_count": 1, "id": "4c129ef6", "metadata": {}, "outputs": [], "source": ["from langchain.chains import SequentialChain\n", "from langchain.chat_models import ChatOpenAI #\u5bfc\u5165OpenAI\u6a21\u578b\n", "from langchain.prompts import ChatPromptTemplate #\u5bfc\u5165\u804a\u5929\u63d0\u793a\u6a21\u677f\n", "from langchain.chains import LLMChain #\u5bfc\u5165LLM\u94fe\u3002"]}, {"cell_type": "markdown", "id": "3d4be4e8", "metadata": {}, "source": ["\u521d\u59cb\u5316\u8bed\u8a00\u6a21\u578b"]}, {"cell_type": "code", "execution_count": 2, "id": "03a8e203", "metadata": {}, "outputs": [], "source": ["llm = ChatOpenAI(temperature=0.9)"]}, {"cell_type": "markdown", "id": "9811445c", "metadata": {}, "source": ["\u63a5\u4e0b\u6765\u6211\u4eec\u5c06\u521b\u5efa\u4e00\u7cfb\u5217\u7684\u94fe\uff0c\u7136\u540e\u4e00\u4e2a\u63a5\u4e00\u4e2a\u4f7f\u7528\u4ed6\u4eec"]}, {"cell_type": "code", "execution_count": 23, "id": "016187ac", "metadata": {}, "outputs": [], "source": ["#\u5b50\u94fe1\n", "\n", "# prompt\u6a21\u677f 1: \u7ffb\u8bd1\u6210\u82f1\u8bed\uff08\u628a\u4e0b\u9762\u7684review\u7ffb\u8bd1\u6210\u82f1\u8bed\uff09\n", "first_prompt = ChatPromptTemplate.from_template(\n", " \"Translate the following review to english:\"\n", " \"\\n\\n{Review}\"\n", ")\n", "# chain 1: \u8f93\u5165\uff1aReview \u8f93\u51fa\uff1a \u82f1\u6587\u7684 Review\n", "chain_one = LLMChain(llm=llm, prompt=first_prompt, \n", " output_key=\"English_Review\"\n", " )\n"]}, {"cell_type": "code", "execution_count": 25, "id": "0fb0730e", "metadata": {}, "outputs": [], "source": ["#\u5b50\u94fe2\n", "\n", "# prompt\u6a21\u677f 2: \u7528\u4e00\u53e5\u8bdd\u603b\u7ed3\u4e0b\u9762\u7684 review\n", "second_prompt = ChatPromptTemplate.from_template(\n", " \"Can you summarize the following review in 1 sentence:\"\n", " \"\\n\\n{English_Review}\"\n", ")\n", "# chain 2: \u8f93\u5165\uff1a\u82f1\u6587\u7684Review \u8f93\u51fa\uff1a\u603b\u7ed3\n", "chain_two = LLMChain(llm=llm, prompt=second_prompt, \n", " output_key=\"summary\"\n", " )\n"]}, {"cell_type": "code", "execution_count": 26, "id": "6accf92d", "metadata": {}, "outputs": [], "source": ["#\u5b50\u94fe3\n", "\n", "# prompt\u6a21\u677f 3: \u4e0b\u9762review\u4f7f\u7528\u7684\u4ec0\u4e48\u8bed\u8a00\n", "third_prompt = ChatPromptTemplate.from_template(\n", " \"What language is the following review:\\n\\n{Review}\"\n", ")\n", "# chain 3: \u8f93\u5165\uff1aReview \u8f93\u51fa\uff1a\u8bed\u8a00\n", "chain_three = LLMChain(llm=llm, prompt=third_prompt,\n", " output_key=\"language\"\n", " )\n"]}, {"cell_type": "code", "execution_count": 27, "id": "c7a46121", "metadata": {}, "outputs": [], "source": ["#\u5b50\u94fe4\n", "\n", "# prompt\u6a21\u677f 4: \u4f7f\u7528\u7279\u5b9a\u7684\u8bed\u8a00\u5bf9\u4e0b\u9762\u7684\u603b\u7ed3\u5199\u4e00\u4e2a\u540e\u7eed\u56de\u590d\n", "fourth_prompt = ChatPromptTemplate.from_template(\n", " \"Write a follow up response to the following \"\n", " \"summary in the specified language:\"\n", " \"\\n\\nSummary: {summary}\\n\\nLanguage: {language}\"\n", ")\n", "# chain 4: \u8f93\u5165\uff1a \u603b\u7ed3, \u8bed\u8a00 \u8f93\u51fa\uff1a \u540e\u7eed\u56de\u590d\n", "chain_four = LLMChain(llm=llm, prompt=fourth_prompt,\n", " output_key=\"followup_message\"\n", " )\n"]}, {"cell_type": "code", "execution_count": 28, "id": "89603117", "metadata": {}, "outputs": [], "source": ["# \u5bf9\u56db\u4e2a\u5b50\u94fe\u8fdb\u884c\u7ec4\u5408\n", "\n", "#\u8f93\u5165\uff1areview \u8f93\u51fa\uff1a\u82f1\u6587review\uff0c\u603b\u7ed3\uff0c\u540e\u7eed\u56de\u590d \n", "overall_chain = SequentialChain(\n", " chains=[chain_one, chain_two, chain_three, chain_four],\n", " input_variables=[\"Review\"],\n", " output_variables=[\"English_Review\", \"summary\",\"followup_message\"],\n", " verbose=True\n", ")"]}, {"cell_type": "markdown", "id": "0509de01", "metadata": {}, "source": ["\u8ba9\u6211\u4eec\u9009\u62e9\u4e00\u7bc7\u8bc4\u8bba\u5e76\u901a\u8fc7\u6574\u4e2a\u94fe\u4f20\u9012\u5b83\uff0c\u53ef\u4ee5\u53d1\u73b0\uff0c\u539f\u59cbreview\u662f\u6cd5\u8bed\uff0c\u53ef\u4ee5\u628a\u82f1\u6587review\u770b\u505a\u662f\u4e00\u79cd\u7ffb\u8bd1\uff0c\u63a5\u4e0b\u6765\u662f\u6839\u636e\u82f1\u6587review\u5f97\u5230\u7684\u603b\u7ed3\uff0c\u6700\u540e\u8f93\u51fa\u7684\u662f\u7528\u6cd5\u8bed\u539f\u6587\u8fdb\u884c\u7684\u7eed\u5199\u4fe1\u606f\u3002"]}, {"cell_type": "code", "execution_count": 12, "id": "51b04f45", "metadata": {}, "outputs": [{"name": "stdout", "output_type": "stream", "text": ["\n", "\n", "\u001b[1m> Entering new SequentialChain chain...\u001b[0m\n"]}, {"name": "stderr", "output_type": "stream", "text": ["Retrying langchain.chat_models.openai.ChatOpenAI.completion_with_retry.<locals>._completion_with_retry in 1.0 seconds as it raised RateLimitError: Rate limit reached for default-gpt-3.5-turbo in organization org-kmMHlitaQynVMwTW3jDTCPga on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method..\n", "Retrying langchain.chat_models.openai.ChatOpenAI.completion_with_retry.<locals>._completion_with_retry in 2.0 seconds as it raised RateLimitError: Rate limit reached for default-gpt-3.5-turbo in organization org-kmMHlitaQynVMwTW3jDTCPga on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method..\n", "Retrying langchain.chat_models.openai.ChatOpenAI.completion_with_retry.<locals>._completion_with_retry in 4.0 seconds as it raised RateLimitError: Rate limit reached for default-gpt-3.5-turbo in organization org-kmMHlitaQynVMwTW3jDTCPga on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method..\n", "Retrying langchain.chat_models.openai.ChatOpenAI.completion_with_retry.<locals>._completion_with_retry in 8.0 seconds as it raised RateLimitError: Rate limit reached for default-gpt-3.5-turbo in organization org-kmMHlitaQynVMwTW3jDTCPga on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method..\n"]}, {"name": "stdout", "output_type": "stream", "text": ["\n", "\u001b[1m> Finished chain.\u001b[0m\n"]}, {"data": {"text/plain": ["{'Review': \"Je trouve le go\u00fbt m\u00e9diocre. La mousse ne tient pas, c'est bizarre. J'ach\u00e8te les m\u00eames dans le commerce et le go\u00fbt est bien meilleur...\\nVieux lot ou contrefa\u00e7on !?\",\n", " 'English_Review': '\"I find the taste mediocre. The foam doesn\\'t hold, it\\'s weird. I buy the same ones in stores and the taste is much better... Old batch or counterfeit!?\"',\n", " 'summary': \"The taste is mediocre, the foam doesn't hold and the reviewer suspects it's either an old batch or counterfeit.\",\n", " 'followup_message': \"R\u00e9ponse : La saveur est moyenne, la mousse ne tient pas et le critique soup\u00e7onne qu'il s'agit soit d'un lot p\u00e9rim\u00e9, soit d'une contrefa\u00e7on. Il est important de prendre en compte les commentaires des clients pour am\u00e9liorer notre produit. Nous allons enqu\u00eater sur cette question plus en d\u00e9tail pour nous assurer que nos produits sont de la plus haute qualit\u00e9 possible. Nous esp\u00e9rons que vous nous donnerez une autre chance \u00e0 l'avenir. Merci d'avoir pris le temps de nous donner votre avis sinc\u00e8re.\"}"]}, "execution_count": 12, "metadata": {}, "output_type": "execute_result"}], "source": ["review = df.Review[5]\n", "overall_chain(review)"]}, {"cell_type": "code", "execution_count": 9, "id": "31624a7c", "metadata": {}, "outputs": [{"name": "stdout", "output_type": "stream", "text": ["\n", "\n", "\u001b[1m> Entering new SequentialChain chain...\u001b[0m\n"]}, {"name": "stderr", "output_type": "stream", "text": ["Retrying langchain.chat_models.openai.ChatOpenAI.completion_with_retry.<locals>._completion_with_retry in 1.0 seconds as it raised RateLimitError: Rate limit reached for default-gpt-3.5-turbo in organization org-kmMHlitaQynVMwTW3jDTCPga on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method..\n", "Retrying langchain.chat_models.openai.ChatOpenAI.completion_with_retry.<locals>._completion_with_retry in 2.0 seconds as it raised RateLimitError: Rate limit reached for default-gpt-3.5-turbo in organization org-kmMHlitaQynVMwTW3jDTCPga on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method..\n", "Retrying langchain.chat_models.openai.ChatOpenAI.completion_with_retry.<locals>._completion_with_retry in 4.0 seconds as it raised RateLimitError: Rate limit reached for default-gpt-3.5-turbo in organization org-kmMHlitaQynVMwTW3jDTCPga on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method..\n", "Retrying langchain.chat_models.openai.ChatOpenAI.completion_with_retry.<locals>._completion_with_retry in 8.0 seconds as it raised RateLimitError: Rate limit reached for default-gpt-3.5-turbo in organization org-kmMHlitaQynVMwTW3jDTCPga on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method..\n"]}, {"name": "stdout", "output_type": "stream", "text": ["\n", "\u001b[1m> Finished chain.\u001b[0m\n"]}, {"data": {"text/plain": ["{'Review': \"Je trouve le go\u00fbt m\u00e9diocre. La mousse ne tient pas, c'est bizarre. J'ach\u00e8te les m\u00eames dans le commerce et le go\u00fbt est bien meilleur...\\nVieux lot ou contrefa\u00e7on !?\",\n", " 'English_Review': \"I find the taste mediocre. The foam doesn't last, it's strange. I buy the same ones in stores and the taste is much better...\\nOld batch or counterfeit?!\",\n", " 'summary': \"The reviewer finds the taste mediocre and suspects that either the product is from an old batch or it might be counterfeit, as the foam doesn't last and the taste is not as good as the ones purchased in stores.\",\n", " 'followup_message': \"\u540e\u7eed\u56de\u590d: Bonjour! Je vous remercie de votre commentaire concernant notre produit. Nous sommes d\u00e9sol\u00e9s d'apprendre que vous avez trouv\u00e9 le go\u00fbt moyen et que vous soup\u00e7onnez qu'il s'agit peut-\u00eatre d'un ancien lot ou d'une contrefa\u00e7on, car la mousse ne dure pas et le go\u00fbt n'est pas aussi bon que ceux achet\u00e9s en magasin. Nous appr\u00e9cions vos pr\u00e9occupations et nous aimerions enqu\u00eater davantage sur cette situation. Pourriez-vous s'il vous pla\u00eet nous fournir plus de d\u00e9tails, tels que la date d'achat et le num\u00e9ro de lot du produit? Nous sommes d\u00e9termin\u00e9s \u00e0 offrir la meilleure qualit\u00e9 \u00e0 nos clients et nous ferons tout notre possible pour r\u00e9soudre ce probl\u00e8me. Merci encore pour votre commentaire et nous attendons votre r\u00e9ponse avec impatience. Cordialement.\"}"]}, "execution_count": 9, "metadata": {}, "output_type": "execute_result"}], "source": ["# \u4e2d\u6587\n", "\n", "#\u5b50\u94fe1\n", "\n", "# prompt\u6a21\u677f 1: \u7ffb\u8bd1\u6210\u82f1\u8bed\uff08\u628a\u4e0b\u9762\u7684review\u7ffb\u8bd1\u6210\u82f1\u8bed\uff09\n", "first_prompt = ChatPromptTemplate.from_template(\n", " \"\u628a\u4e0b\u9762\u7684\u8bc4\u8bbareview\u7ffb\u8bd1\u6210\u82f1\u6587:\"\n", " \"\\n\\n{Review}\"\n", ")\n", "# chain 1: \u8f93\u5165\uff1aReview \u8f93\u51fa\uff1a\u82f1\u6587\u7684 Review\n", "chain_one = LLMChain(llm=llm, prompt=first_prompt, \n", " output_key=\"English_Review\"\n", " )\n", "\n", "#\u5b50\u94fe2\n", "\n", "# prompt\u6a21\u677f 2: \u7528\u4e00\u53e5\u8bdd\u603b\u7ed3\u4e0b\u9762\u7684 review\n", "second_prompt = ChatPromptTemplate.from_template(\n", " \"\u8bf7\u4f60\u7528\u4e00\u53e5\u8bdd\u6765\u603b\u7ed3\u4e0b\u9762\u7684\u8bc4\u8bbareview:\"\n", " \"\\n\\n{English_Review}\"\n", ")\n", "# chain 2: \u8f93\u5165\uff1a\u82f1\u6587\u7684Review \u8f93\u51fa\uff1a\u603b\u7ed3\n", "chain_two = LLMChain(llm=llm, prompt=second_prompt, \n", " output_key=\"summary\"\n", " )\n", "\n", "\n", "#\u5b50\u94fe3\n", "\n", "# prompt\u6a21\u677f 3: \u4e0b\u9762review\u4f7f\u7528\u7684\u4ec0\u4e48\u8bed\u8a00\n", "third_prompt = ChatPromptTemplate.from_template(\n", " \"\u4e0b\u9762\u7684\u8bc4\u8bbareview\u4f7f\u7528\u7684\u4ec0\u4e48\u8bed\u8a00:\\n\\n{Review}\"\n", ")\n", "# chain 3: \u8f93\u5165\uff1aReview \u8f93\u51fa\uff1a\u8bed\u8a00\n", "chain_three = LLMChain(llm=llm, prompt=third_prompt,\n", " output_key=\"language\"\n", " )\n", "\n", "\n", "#\u5b50\u94fe4\n", "\n", "# prompt\u6a21\u677f 4: \u4f7f\u7528\u7279\u5b9a\u7684\u8bed\u8a00\u5bf9\u4e0b\u9762\u7684\u603b\u7ed3\u5199\u4e00\u4e2a\u540e\u7eed\u56de\u590d\n", "fourth_prompt = ChatPromptTemplate.from_template(\n", " \"\u4f7f\u7528\u7279\u5b9a\u7684\u8bed\u8a00\u5bf9\u4e0b\u9762\u7684\u603b\u7ed3\u5199\u4e00\u4e2a\u540e\u7eed\u56de\u590d:\"\n", " \"\\n\\n\u603b\u7ed3: {summary}\\n\\n\u8bed\u8a00: {language}\"\n", ")\n", "# chain 4: \u8f93\u5165\uff1a \u603b\u7ed3, \u8bed\u8a00 \u8f93\u51fa\uff1a \u540e\u7eed\u56de\u590d\n", "chain_four = LLMChain(llm=llm, prompt=fourth_prompt,\n", " output_key=\"followup_message\"\n", " )\n", "\n", "\n", "# \u5bf9\u56db\u4e2a\u5b50\u94fe\u8fdb\u884c\u7ec4\u5408\n", "\n", "#\u8f93\u5165\uff1areview \u8f93\u51fa\uff1a\u82f1\u6587review\uff0c\u603b\u7ed3\uff0c\u540e\u7eed\u56de\u590d \n", "overall_chain = SequentialChain(\n", " chains=[chain_one, chain_two, chain_three, chain_four],\n", " input_variables=[\"Review\"],\n", " output_variables=[\"English_Review\", \"summary\",\"followup_message\"],\n", " verbose=True\n", ")\n", "\n", "\n", "review = df.Review[5]\n", "overall_chain(review)"]}, {"cell_type": "markdown", "id": "3041ea4c", "metadata": {}, "source": ["## \u56db\u3001 Router Chain\uff08\u8def\u7531\u94fe\uff09"]}, {"cell_type": "markdown", "id": "f0c32f97", "metadata": {}, "source": ["\u5230\u76ee\u524d\u4e3a\u6b62\uff0c\u6211\u4eec\u5df2\u7ecf\u5b66\u4e60\u4e86LLM\u94fe\u548c\u987a\u5e8f\u94fe\u3002\u4f46\u662f\uff0c\u5982\u679c\u60a8\u60f3\u505a\u4e00\u4e9b\u66f4\u590d\u6742\u7684\u4e8b\u60c5\u600e\u4e48\u529e\uff1f\n", "\n", "\u4e00\u4e2a\u76f8\u5f53\u5e38\u89c1\u4f46\u57fa\u672c\u7684\u64cd\u4f5c\u662f\u6839\u636e\u8f93\u5165\u5c06\u5176\u8def\u7531\u5230\u4e00\u6761\u94fe\uff0c\u5177\u4f53\u53d6\u51b3\u4e8e\u8be5\u8f93\u5165\u5230\u5e95\u662f\u4ec0\u4e48\u3002\u5982\u679c\u4f60\u6709\u591a\u4e2a\u5b50\u94fe\uff0c\u6bcf\u4e2a\u5b50\u94fe\u90fd\u4e13\u95e8\u7528\u4e8e\u7279\u5b9a\u7c7b\u578b\u7684\u8f93\u5165\uff0c\u90a3\u4e48\u53ef\u4ee5\u7ec4\u6210\u4e00\u4e2a\u8def\u7531\u94fe\uff0c\u5b83\u9996\u5148\u51b3\u5b9a\u5c06\u5b83\u4f20\u9012\u7ed9\u54ea\u4e2a\u5b50\u94fe\uff0c\u7136\u540e\u5c06\u5b83\u4f20\u9012\u7ed9\u90a3\u4e2a\u94fe\u3002\n", "\n", "\u8def\u7531\u5668\u7531\u4e24\u4e2a\u7ec4\u4ef6\u7ec4\u6210\uff1a\n", "\n", "- \u8def\u7531\u5668\u94fe\u672c\u8eab\uff08\u8d1f\u8d23\u9009\u62e9\u8981\u8c03\u7528\u7684\u4e0b\u4e00\u4e2a\u94fe\uff09\n", "- destination_chains\uff1a\u8def\u7531\u5668\u94fe\u53ef\u4ee5\u8def\u7531\u5230\u7684\u94fe\n", "\n", "\u4e3e\u4e00\u4e2a\u5177\u4f53\u7684\u4f8b\u5b50\uff0c\u8ba9\u6211\u4eec\u770b\u4e00\u4e0b\u6211\u4eec\u5728\u4e0d\u540c\u7c7b\u578b\u7684\u94fe\u4e4b\u95f4\u8def\u7531\u7684\u5730\u65b9\uff0c\u6211\u4eec\u5728\u8fd9\u91cc\u6709\u4e0d\u540c\u7684prompt: "]}, {"cell_type": "markdown", "id": "cb1b4708", "metadata": {}, "source": ["### 4.1 \u5b9a\u4e49\u63d0\u793a\u6a21\u677f"]}, {"cell_type": "code", "execution_count": 49, "id": "ade83f4f", "metadata": {}, "outputs": [], "source": ["#\u7b2c\u4e00\u4e2a\u63d0\u793a\u9002\u5408\u56de\u7b54\u7269\u7406\u95ee\u9898\n", "physics_template = \"\"\"You are a very smart physics professor. \\\n", "You are great at answering questions about physics in a concise\\\n", "and easy to understand manner. \\\n", "When you don't know the answer to a question you admit\\\n", "that you don't know.\n", "\n", "Here is a question:\n", "{input}\"\"\"\n", "\n", "\n", "#\u7b2c\u4e8c\u4e2a\u63d0\u793a\u9002\u5408\u56de\u7b54\u6570\u5b66\u95ee\u9898\n", "math_template = \"\"\"You are a very good mathematician. \\\n", "You are great at answering math questions. \\\n", "You are so good because you are able to break down \\\n", "hard problems into their component parts, \n", "answer the component parts, and then put them together\\\n", "to answer the broader question.\n", "\n", "Here is a question:\n", "{input}\"\"\"\n", "\n", "\n", "#\u7b2c\u4e09\u4e2a\u9002\u5408\u56de\u7b54\u5386\u53f2\u95ee\u9898\n", "history_template = \"\"\"You are a very good historian. \\\n", "You have an excellent knowledge of and understanding of people,\\\n", "events and contexts from a range of historical periods. \\\n", "You have the ability to think, reflect, debate, discuss and \\\n", "evaluate the past. You have a respect for historical evidence\\\n", "and the ability to make use of it to support your explanations \\\n", "and judgements.\n", "\n", "Here is a question:\n", "{input}\"\"\"\n", "\n", "\n", "#\u7b2c\u56db\u4e2a\u9002\u5408\u56de\u7b54\u8ba1\u7b97\u673a\u95ee\u9898\n", "computerscience_template = \"\"\" You are a successful computer scientist.\\\n", "You have a passion for creativity, collaboration,\\\n", "forward-thinking, confidence, strong problem-solving capabilities,\\\n", "understanding of theories and algorithms, and excellent communication \\\n", "skills. You are great at answering coding questions. \\\n", "You are so good because you know how to solve a problem by \\\n", "describing the solution in imperative steps \\\n", "that a machine can easily interpret and you know how to \\\n", "choose a solution that has a good balance between \\\n", "time complexity and space complexity. \n", "\n", "Here is a question:\n", "{input}\"\"\""]}, {"cell_type": "code", "execution_count": 51, "id": "f7fade7a", "metadata": {}, "outputs": [], "source": ["# \u4e2d\u6587\n", "#\u7b2c\u4e00\u4e2a\u63d0\u793a\u9002\u5408\u56de\u7b54\u7269\u7406\u95ee\u9898\n", "physics_template = \"\"\"\u4f60\u662f\u4e00\u4e2a\u975e\u5e38\u806a\u660e\u7684\u7269\u7406\u4e13\u5bb6\u3002 \\\n", "\u4f60\u64c5\u957f\u7528\u4e00\u79cd\u7b80\u6d01\u5e76\u4e14\u6613\u4e8e\u7406\u89e3\u7684\u65b9\u5f0f\u53bb\u56de\u7b54\u95ee\u9898\u3002\\\n", "\u5f53\u4f60\u4e0d\u77e5\u9053\u95ee\u9898\u7684\u7b54\u6848\u65f6\uff0c\u4f60\u627f\u8ba4\\\n", "\u4f60\u4e0d\u77e5\u9053.\n", "\n", "\u8fd9\u662f\u4e00\u4e2a\u95ee\u9898:\n", "{input}\"\"\"\n", "\n", "\n", "#\u7b2c\u4e8c\u4e2a\u63d0\u793a\u9002\u5408\u56de\u7b54\u6570\u5b66\u95ee\u9898\n", "math_template = \"\"\"\u4f60\u662f\u4e00\u4e2a\u975e\u5e38\u4f18\u79c0\u7684\u6570\u5b66\u5bb6\u3002 \\\n", "\u4f60\u64c5\u957f\u56de\u7b54\u6570\u5b66\u95ee\u9898\u3002 \\\n", "\u4f60\u4e4b\u6240\u4ee5\u5982\u6b64\u4f18\u79c0\uff0c \\\n", "\u662f\u56e0\u4e3a\u4f60\u80fd\u591f\u5c06\u68d8\u624b\u7684\u95ee\u9898\u5206\u89e3\u4e3a\u7ec4\u6210\u90e8\u5206\uff0c\\\n", "\u56de\u7b54\u7ec4\u6210\u90e8\u5206\uff0c\u7136\u540e\u5c06\u5b83\u4eec\u7ec4\u5408\u5728\u4e00\u8d77\uff0c\u56de\u7b54\u66f4\u5e7f\u6cdb\u7684\u95ee\u9898\u3002\n", "\n", "\u8fd9\u662f\u4e00\u4e2a\u95ee\u9898\uff1a\n", "{input}\"\"\"\n", "\n", "\n", "#\u7b2c\u4e09\u4e2a\u9002\u5408\u56de\u7b54\u5386\u53f2\u95ee\u9898\n", "history_template = \"\"\"\u4f60\u662f\u4ee5\u4e3a\u975e\u5e38\u4f18\u79c0\u7684\u5386\u53f2\u5b66\u5bb6\u3002 \\\n", "\u4f60\u5bf9\u4e00\u7cfb\u5217\u5386\u53f2\u65f6\u671f\u7684\u4eba\u7269\u3001\u4e8b\u4ef6\u548c\u80cc\u666f\u6709\u7740\u6781\u597d\u7684\u5b66\u8bc6\u548c\u7406\u89e3\\\n", "\u4f60\u6709\u80fd\u529b\u601d\u8003\u3001\u53cd\u601d\u3001\u8fa9\u8bc1\u3001\u8ba8\u8bba\u548c\u8bc4\u4f30\u8fc7\u53bb\u3002\\\n", "\u4f60\u5c0a\u91cd\u5386\u53f2\u8bc1\u636e\uff0c\u5e76\u6709\u80fd\u529b\u5229\u7528\u5b83\u6765\u652f\u6301\u4f60\u7684\u89e3\u91ca\u548c\u5224\u65ad\u3002\n", "\n", "\u8fd9\u662f\u4e00\u4e2a\u95ee\u9898:\n", "{input}\"\"\"\n", "\n", "\n", "#\u7b2c\u56db\u4e2a\u9002\u5408\u56de\u7b54\u8ba1\u7b97\u673a\u95ee\u9898\n", "computerscience_template = \"\"\" \u4f60\u662f\u4e00\u4e2a\u6210\u529f\u7684\u8ba1\u7b97\u673a\u79d1\u5b66\u4e13\u5bb6\u3002\\\n", "\u4f60\u6709\u521b\u9020\u529b\u3001\u534f\u4f5c\u7cbe\u795e\u3001\\\n", "\u524d\u77bb\u6027\u601d\u7ef4\u3001\u81ea\u4fe1\u3001\u89e3\u51b3\u95ee\u9898\u7684\u80fd\u529b\u3001\\\n", "\u5bf9\u7406\u8bba\u548c\u7b97\u6cd5\u7684\u7406\u89e3\u4ee5\u53ca\u51fa\u8272\u7684\u6c9f\u901a\u6280\u5de7\u3002\\\n", "\u4f60\u975e\u5e38\u64c5\u957f\u56de\u7b54\u7f16\u7a0b\u95ee\u9898\u3002\\\n", "\u4f60\u4e4b\u6240\u4ee5\u5982\u6b64\u4f18\u79c0\uff0c\u662f\u56e0\u4e3a\u4f60\u77e5\u9053 \\\n", "\u5982\u4f55\u901a\u8fc7\u4ee5\u673a\u5668\u53ef\u4ee5\u8f7b\u677e\u89e3\u91ca\u7684\u547d\u4ee4\u5f0f\u6b65\u9aa4\u63cf\u8ff0\u89e3\u51b3\u65b9\u6848\u6765\u89e3\u51b3\u95ee\u9898\uff0c\\\n", "\u5e76\u4e14\u4f60\u77e5\u9053\u5982\u4f55\u9009\u62e9\u5728\u65f6\u95f4\u590d\u6742\u6027\u548c\u7a7a\u95f4\u590d\u6742\u6027\u4e4b\u95f4\u53d6\u5f97\u826f\u597d\u5e73\u8861\u7684\u89e3\u51b3\u65b9\u6848\u3002\n", "\n", "\u8fd9\u8fd8\u662f\u4e00\u4e2a\u8f93\u5165\uff1a\n", "{input}\"\"\""]}, {"cell_type": "markdown", "id": "6922b35e", "metadata": {}, "source": ["\u9996\u5148\u9700\u8981\u5b9a\u4e49\u8fd9\u4e9b\u63d0\u793a\u6a21\u677f\uff0c\u5728\u6211\u4eec\u62e5\u6709\u4e86\u8fd9\u4e9b\u63d0\u793a\u6a21\u677f\u540e\uff0c\u53ef\u4ee5\u4e3a\u6bcf\u4e2a\u6a21\u677f\u547d\u540d\uff0c\u7136\u540e\u63d0\u4f9b\u63cf\u8ff0\u3002\u4f8b\u5982\uff0c\u7b2c\u4e00\u4e2a\u7269\u7406\u5b66\u7684\u63cf\u8ff0\u9002\u5408\u56de\u7b54\u5173\u4e8e\u7269\u7406\u5b66\u7684\u95ee\u9898\uff0c\u8fd9\u4e9b\u4fe1\u606f\u5c06\u4f20\u9012\u7ed9\u8def\u7531\u94fe\uff0c\u7136\u540e\u7531\u8def\u7531\u94fe\u51b3\u5b9a\u4f55\u65f6\u4f7f\u7528\u6b64\u5b50\u94fe\u3002"]}, {"cell_type": "code", "execution_count": 27, "id": "141a3d32", "metadata": {}, "outputs": [], "source": ["prompt_infos = [\n", " {\n", " \"name\": \"physics\", \n", " \"description\": \"Good for answering questions about physics\", \n", " \"prompt_template\": physics_template\n", " },\n", " {\n", " \"name\": \"math\", \n", " \"description\": \"Good for answering math questions\", \n", " \"prompt_template\": math_template\n", " },\n", " {\n", " \"name\": \"History\", \n", " \"description\": \"Good for answering history questions\", \n", " \"prompt_template\": history_template\n", " },\n", " {\n", " \"name\": \"computer science\", \n", " \"description\": \"Good for answering computer science questions\", \n", " \"prompt_template\": computerscience_template\n", " }\n", "]"]}, {"cell_type": "code", "execution_count": 52, "id": "deb8aafc", "metadata": {}, "outputs": [], "source": ["# \u4e2d\u6587\n", "prompt_infos = [\n", " {\n", " \"\u540d\u5b57\": \"\u7269\u7406\u5b66\", \n", " \"\u63cf\u8ff0\": \"\u64c5\u957f\u56de\u7b54\u5173\u4e8e\u7269\u7406\u5b66\u7684\u95ee\u9898\", \n", " \"\u63d0\u793a\u6a21\u677f\": physics_template\n", " },\n", " {\n", " \"\u540d\u5b57\": \"\u6570\u5b66\", \n", " \"\u63cf\u8ff0\": \"\u64c5\u957f\u56de\u7b54\u6570\u5b66\u95ee\u9898\", \n", " \"\u63d0\u793a\u6a21\u677f\": math_template\n", " },\n", " {\n", " \"\u540d\u5b57\": \"\u5386\u53f2\", \n", " \"\u63cf\u8ff0\": \"\u64c5\u957f\u56de\u7b54\u5386\u53f2\u95ee\u9898\", \n", " \"\u63d0\u793a\u6a21\u677f\": history_template\n", " },\n", " {\n", " \"\u540d\u5b57\": \"\u8ba1\u7b97\u673a\u79d1\u5b66\", \n", " \"\u63cf\u8ff0\": \"\u64c5\u957f\u56de\u7b54\u8ba1\u7b97\u673a\u79d1\u5b66\u95ee\u9898\", \n", " \"\u63d0\u793a\u6a21\u677f\": computerscience_template\n", " }\n", "]\n"]}, {"cell_type": "markdown", "id": "80eb1de8", "metadata": {}, "source": ["### 4.2 \u5bfc\u5165\u76f8\u5173\u7684\u5305"]}, {"cell_type": "code", "execution_count": 28, "id": "31b06fc8", "metadata": {}, "outputs": [], "source": ["from langchain.chains.router import MultiPromptChain #\u5bfc\u5165\u591a\u63d0\u793a\u94fe\n", "from langchain.chains.router.llm_router import LLMRouterChain,RouterOutputParser\n", "from langchain.prompts import PromptTemplate"]}, {"cell_type": "markdown", "id": "50c16f01", "metadata": {}, "source": ["### 4.3 \u5b9a\u4e49\u8bed\u8a00\u6a21\u578b"]}, {"cell_type": "code", "execution_count": 29, "id": "f3f50bcc", "metadata": {}, "outputs": [], "source": ["llm = ChatOpenAI(temperature=0)"]}, {"cell_type": "markdown", "id": "8795cd42", "metadata": {}, "source": ["### 4.4 LLMRouterChain\uff08\u6b64\u94fe\u4f7f\u7528 LLM \u6765\u786e\u5b9a\u5982\u4f55\u8def\u7531\u4e8b\u7269\uff09\n", "\n", "\u5728\u8fd9\u91cc\uff0c\u6211\u4eec\u9700\u8981\u4e00\u4e2a**\u591a\u63d0\u793a\u94fe**\u3002\u8fd9\u662f\u4e00\u79cd\u7279\u5b9a\u7c7b\u578b\u7684\u94fe\uff0c\u7528\u4e8e\u5728\u591a\u4e2a\u4e0d\u540c\u7684\u63d0\u793a\u6a21\u677f\u4e4b\u95f4\u8fdb\u884c\u8def\u7531\u3002\n", "\u4f46\u662f\uff0c\u8fd9\u53ea\u662f\u4f60\u53ef\u4ee5\u8def\u7531\u7684\u4e00\u79cd\u7c7b\u578b\u3002\u4f60\u4e5f\u53ef\u4ee5\u5728\u4efb\u4f55\u7c7b\u578b\u7684\u94fe\u4e4b\u95f4\u8fdb\u884c\u8def\u7531\u3002\n", "\n", "\u8fd9\u91cc\u6211\u4eec\u8981\u5b9e\u73b0\u7684\u51e0\u4e2a\u7c7b\u662fLLM\u8def\u7531\u5668\u94fe\u3002\u8fd9\u4e2a\u7c7b\u672c\u8eab\u4f7f\u7528\u8bed\u8a00\u6a21\u578b\u6765\u5728\u4e0d\u540c\u7684\u5b50\u94fe\u4e4b\u95f4\u8fdb\u884c\u8def\u7531\u3002\n", "\u8fd9\u5c31\u662f\u4e0a\u9762\u63d0\u4f9b\u7684\u63cf\u8ff0\u548c\u540d\u79f0\u5c06\u88ab\u4f7f\u7528\u7684\u5730\u65b9\u3002"]}, {"cell_type": "markdown", "id": "46633b43", "metadata": {}, "source": ["#### 4.4.1 \u521b\u5efa\u76ee\u6807\u94fe \n", "\u76ee\u6807\u94fe\u662f\u7531\u8def\u7531\u94fe\u8c03\u7528\u7684\u94fe\uff0c\u6bcf\u4e2a\u76ee\u6807\u94fe\u90fd\u662f\u4e00\u4e2a\u8bed\u8a00\u6a21\u578b\u94fe"]}, {"cell_type": "code", "execution_count": 30, "id": "8eefec24", "metadata": {}, "outputs": [], "source": ["destination_chains = {}\n", "for p_info in prompt_infos:\n", " name = p_info[\"name\"]\n", " prompt_template = p_info[\"prompt_template\"]\n", " prompt = ChatPromptTemplate.from_template(template=prompt_template)\n", " chain = LLMChain(llm=llm, prompt=prompt)\n", " destination_chains[name] = chain \n", " \n", "destinations = [f\"{p['name']}: {p['description']}\" for p in prompt_infos]\n", "destinations_str = \"\\n\".join(destinations)"]}, {"cell_type": "code", "execution_count": null, "id": "fd6eb641", "metadata": {}, "outputs": [], "source": ["# \u4e2d\u6587\n", "destination_chains = {}\n", "for p_info in prompt_infos:\n", " name = p_info[\"\u540d\u5b57\"]\n", " prompt_template = p_info[\"\u63d0\u793a\u6a21\u677f\"]\n", " prompt = ChatPromptTemplate.from_template(template=prompt_template)\n", " chain = LLMChain(llm=llm, prompt=prompt)\n", " destination_chains[name] = chain \n", " \n", "destinations = [f\"{p['\u540d\u5b57']}: {p['\u63cf\u8ff0']}\" for p in prompt_infos]\n", "destinations_str = \"\\n\".join(destinations)"]}, {"cell_type": "markdown", "id": "eba115de", "metadata": {}, "source": ["#### 4.4.2 \u521b\u5efa\u9ed8\u8ba4\u76ee\u6807\u94fe\n", "\u9664\u4e86\u76ee\u6807\u94fe\u4e4b\u5916\uff0c\u6211\u4eec\u8fd8\u9700\u8981\u4e00\u4e2a\u9ed8\u8ba4\u76ee\u6807\u94fe\u3002\u8fd9\u662f\u4e00\u4e2a\u5f53\u8def\u7531\u5668\u65e0\u6cd5\u51b3\u5b9a\u4f7f\u7528\u54ea\u4e2a\u5b50\u94fe\u65f6\u8c03\u7528\u7684\u94fe\u3002\u5728\u4e0a\u9762\u7684\u793a\u4f8b\u4e2d\uff0c\u5f53\u8f93\u5165\u95ee\u9898\u4e0e\u7269\u7406\u3001\u6570\u5b66\u3001\u5386\u53f2\u6216\u8ba1\u7b97\u673a\u79d1\u5b66\u65e0\u5173\u65f6\uff0c\u53ef\u80fd\u4f1a\u8c03\u7528\u5b83\u3002"]}, {"cell_type": "code", "execution_count": 31, "id": "9f98018a", "metadata": {}, "outputs": [], "source": ["default_prompt = ChatPromptTemplate.from_template(\"{input}\")\n", "default_chain = LLMChain(llm=llm, prompt=default_prompt)"]}, {"cell_type": "markdown", "id": "948700c4", "metadata": {}, "source": ["#### 4.4.3 \u521b\u5efaLLM\u7528\u4e8e\u5728\u4e0d\u540c\u94fe\u4e4b\u95f4\u8fdb\u884c\u8def\u7531\u7684\u6a21\u677f\n", "\u8fd9\u5305\u62ec\u8981\u5b8c\u6210\u7684\u4efb\u52a1\u7684\u8bf4\u660e\u4ee5\u53ca\u8f93\u51fa\u5e94\u8be5\u91c7\u7528\u7684\u7279\u5b9a\u683c\u5f0f\u3002"]}, {"cell_type": "markdown", "id": "24f30c2c", "metadata": {}, "source": ["\u6ce8\u610f\uff1a\u6b64\u5904\u5728\u539f\u6559\u7a0b\u7684\u57fa\u7840\u4e0a\u6dfb\u52a0\u4e86\u4e00\u4e2a\u793a\u4f8b\uff0c\u4e3b\u8981\u662f\u56e0\u4e3a\"gpt-3.5-turbo\"\u6a21\u578b\u4e0d\u80fd\u5f88\u597d\u9002\u5e94\u7406\u89e3\u6a21\u677f\u7684\u610f\u601d\uff0c\u4f7f\u7528 \"text-davinci-003\" \u6216\u8005\"gpt-4-0613\"\u53ef\u4ee5\u5f88\u597d\u7684\u5de5\u4f5c\uff0c\u56e0\u6b64\u5728\u8fd9\u91cc\u591a\u52a0\u4e86\u793a\u4f8b\u63d0\u793a\u8ba9\u5176\u66f4\u597d\u7684\u5b66\u4e60\u3002\n", "eg:\n", "<< INPUT >>\n", "\"What is black body radiation?\"\n", "<< OUTPUT >>\n", "```json\n", "{{{{\n", " \"destination\": string \\ name of the prompt to use or \"DEFAULT\"\n", " \"next_inputs\": string \\ a potentially modified version of the original input\n", "}}}}\n", "```"]}, {"cell_type": "code", "execution_count": 32, "id": "11b2e2ba", "metadata": {}, "outputs": [], "source": ["MULTI_PROMPT_ROUTER_TEMPLATE = \"\"\"Given a raw text input to a \\\n", "language model select the model prompt best suited for the input. \\\n", "You will be given the names of the available prompts and a \\\n", "description of what the prompt is best suited for. \\\n", "You may also revise the original input if you think that revising\\\n", "it will ultimately lead to a better response from the language model.\n", "\n", "<< FORMATTING >>\n", "Return a markdown code snippet with a JSON object formatted to look like:\n", "```json\n", "{{{{\n", " \"destination\": string \\ name of the prompt to use or \"DEFAULT\"\n", " \"next_inputs\": string \\ a potentially modified version of the original input\n", "}}}}\n", "```\n", "\n", "REMEMBER: \"destination\" MUST be one of the candidate prompt \\\n", "names specified below OR it can be \"DEFAULT\" if the input is not\\\n", "well suited for any of the candidate prompts.\n", "REMEMBER: \"next_inputs\" can just be the original input \\\n", "if you don't think any modifications are needed.\n", "\n", "<< CANDIDATE PROMPTS >>\n", "{destinations}\n", "\n", "<< INPUT >>\n", "{{input}}\n", "\n", "<< OUTPUT (remember to include the ```json)>>\n", "\n", "eg:\n", "<< INPUT >>\n", "\"What is black body radiation?\"\n", "<< OUTPUT >>\n", "```json\n", "{{{{\n", " \"destination\": string \\ name of the prompt to use or \"DEFAULT\"\n", " \"next_inputs\": string \\ a potentially modified version of the original input\n", "}}}}\n", "```\n", "\n", "\"\"\""]}, {"cell_type": "code", "execution_count": null, "id": "a7aae035", "metadata": {}, "outputs": [], "source": ["# \u4e2d\u6587\n", "\n", "# \u591a\u63d0\u793a\u8def\u7531\u6a21\u677f\n", "MULTI_PROMPT_ROUTER_TEMPLATE = \"\"\"\u7ed9\u8bed\u8a00\u6a21\u578b\u4e00\u4e2a\u539f\u59cb\u6587\u672c\u8f93\u5165\uff0c\\\n", "\u8ba9\u5176\u9009\u62e9\u6700\u9002\u5408\u8f93\u5165\u7684\u6a21\u578b\u63d0\u793a\u3002\\\n", "\u7cfb\u7edf\u5c06\u4e3a\u60a8\u63d0\u4f9b\u53ef\u7528\u63d0\u793a\u7684\u540d\u79f0\u4ee5\u53ca\u6700\u9002\u5408\u6539\u63d0\u793a\u7684\u63cf\u8ff0\u3002\\\n", "\u5982\u679c\u4f60\u8ba4\u4e3a\u4fee\u6539\u539f\u59cb\u8f93\u5165\u6700\u7ec8\u4f1a\u5bfc\u81f4\u8bed\u8a00\u6a21\u578b\u505a\u51fa\u66f4\u597d\u7684\u54cd\u5e94\uff0c\\\n", "\u4f60\u4e5f\u53ef\u4ee5\u4fee\u6539\u539f\u59cb\u8f93\u5165\u3002\n", "\n", "\n", "<< \u683c\u5f0f >>\n", "\u8fd4\u56de\u4e00\u4e2a\u5e26\u6709JSON\u5bf9\u8c61\u7684markdown\u4ee3\u7801\u7247\u6bb5\uff0c\u8be5JSON\u5bf9\u8c61\u7684\u683c\u5f0f\u5982\u4e0b\uff1a\n", "```json\n", "{{{{\n", " \"destination\": \u5b57\u7b26\u4e32 \\ \u4f7f\u7528\u7684\u63d0\u793a\u540d\u5b57\u6216\u8005\u4f7f\u7528 \"DEFAULT\"\n", " \"next_inputs\": \u5b57\u7b26\u4e32 \\ \u539f\u59cb\u8f93\u5165\u7684\u6539\u8fdb\u7248\u672c\n", "}}}}\n", "```\n", "\n", "\n", "\u8bb0\u4f4f\uff1a\u201cdestination\u201d\u5fc5\u987b\u662f\u4e0b\u9762\u6307\u5b9a\u7684\u5019\u9009\u63d0\u793a\u540d\u79f0\u4e4b\u4e00\uff0c\\\n", "\u6216\u8005\u5982\u679c\u8f93\u5165\u4e0d\u592a\u9002\u5408\u4efb\u4f55\u5019\u9009\u63d0\u793a\uff0c\\\n", "\u5219\u53ef\u4ee5\u662f \u201cDEFAULT\u201d \u3002\n", "\u8bb0\u4f4f\uff1a\u5982\u679c\u60a8\u8ba4\u4e3a\u4e0d\u9700\u8981\u4efb\u4f55\u4fee\u6539\uff0c\\\n", "\u5219 \u201cnext_inputs\u201d \u53ef\u4ee5\u53ea\u662f\u539f\u59cb\u8f93\u5165\u3002\n", "\n", "<< \u5019\u9009\u63d0\u793a >>\n", "{destinations}\n", "\n", "<< \u8f93\u5165 >>\n", "{{input}}\n", "\n", "<< \u8f93\u51fa (\u8bb0\u5f97\u8981\u5305\u542b ```json)>>\n", "\n", "\u6837\u4f8b:\n", "<< \u8f93\u5165 >>\n", "\"\u4ec0\u4e48\u662f\u9ed1\u4f53\u8f90\u5c04?\"\n", "<< \u8f93\u51fa >>\n", "```json\n", "{{{{\n", " \"destination\": \u5b57\u7b26\u4e32 \\ \u4f7f\u7528\u7684\u63d0\u793a\u540d\u5b57\u6216\u8005\u4f7f\u7528 \"DEFAULT\"\n", " \"next_inputs\": \u5b57\u7b26\u4e32 \\ \u539f\u59cb\u8f93\u5165\u7684\u6539\u8fdb\u7248\u672c\n", "}}}}\n", "```\n", "\n", "\"\"\""]}, {"cell_type": "markdown", "id": "de5c46d0", "metadata": {}, "source": ["#### 4.4.4 \u6784\u5efa\u8def\u7531\u94fe\n", "\u9996\u5148\uff0c\u6211\u4eec\u901a\u8fc7\u683c\u5f0f\u5316\u4e0a\u9762\u5b9a\u4e49\u7684\u76ee\u6807\u521b\u5efa\u5b8c\u6574\u7684\u8def\u7531\u5668\u6a21\u677f\u3002\u8fd9\u4e2a\u6a21\u677f\u53ef\u4ee5\u9002\u7528\u8bb8\u591a\u4e0d\u540c\u7c7b\u578b\u7684\u76ee\u6807\u3002\n", "\u56e0\u6b64\uff0c\u5728\u8fd9\u91cc\uff0c\u60a8\u53ef\u4ee5\u6dfb\u52a0\u4e00\u4e2a\u4e0d\u540c\u7684\u5b66\u79d1\uff0c\u5982\u82f1\u8bed\u6216\u62c9\u4e01\u8bed\uff0c\u800c\u4e0d\u4ec5\u4ec5\u662f\u7269\u7406\u3001\u6570\u5b66\u3001\u5386\u53f2\u548c\u8ba1\u7b97\u673a\u79d1\u5b66\u3002\n", "\n", "\u63a5\u4e0b\u6765\uff0c\u6211\u4eec\u4ece\u8fd9\u4e2a\u6a21\u677f\u521b\u5efa\u63d0\u793a\u6a21\u677f\n", "\n", "\u6700\u540e\uff0c\u901a\u8fc7\u4f20\u5165llm\u548c\u6574\u4e2a\u8def\u7531\u63d0\u793a\u6765\u521b\u5efa\u8def\u7531\u94fe\u3002\u9700\u8981\u6ce8\u610f\u7684\u662f\u8fd9\u91cc\u6709\u8def\u7531\u8f93\u51fa\u89e3\u6790\uff0c\u8fd9\u5f88\u91cd\u8981\uff0c\u56e0\u4e3a\u5b83\u5c06\u5e2e\u52a9\u8fd9\u4e2a\u94fe\u8def\u51b3\u5b9a\u5728\u54ea\u4e9b\u5b50\u94fe\u8def\u4e4b\u95f4\u8fdb\u884c\u8def\u7531\u3002"]}, {"cell_type": "code", "execution_count": 34, "id": "1387109d", "metadata": {}, "outputs": [], "source": ["router_template = MULTI_PROMPT_ROUTER_TEMPLATE.format(\n", " destinations=destinations_str\n", ")\n", "router_prompt = PromptTemplate(\n", " template=router_template,\n", " input_variables=[\"input\"],\n", " output_parser=RouterOutputParser(),\n", ")\n", "\n", "router_chain = LLMRouterChain.from_llm(llm, router_prompt)"]}, {"cell_type": "markdown", "id": "7e92355c", "metadata": {}, "source": ["#### 4.4.5 \u521b\u5efa\u6574\u4f53\u94fe\u8def"]}, {"cell_type": "code", "execution_count": 35, "id": "2fb7d560", "metadata": {}, "outputs": [], "source": ["#\u591a\u63d0\u793a\u94fe\n", "chain = MultiPromptChain(router_chain=router_chain, #l\u8def\u7531\u94fe\u8def\n", " destination_chains=destination_chains, #\u76ee\u6807\u94fe\u8def\n", " default_chain=default_chain, #\u9ed8\u8ba4\u94fe\u8def\n", " verbose=True \n", " )"]}, {"cell_type": "markdown", "id": "086503f7", "metadata": {}, "source": ["#### 4.4.6 \u8fdb\u884c\u63d0\u95ee"]}, {"cell_type": "markdown", "id": "969cd878", "metadata": {}, "source": ["\u5982\u679c\u6211\u4eec\u95ee\u4e00\u4e2a\u7269\u7406\u95ee\u9898\uff0c\u6211\u4eec\u5e0c\u671b\u770b\u5230\u4ed6\u88ab\u8def\u7531\u5230\u7269\u7406\u94fe\u8def"]}, {"cell_type": "code", "execution_count": null, "id": "2217d987", "metadata": {}, "outputs": [{"name": "stdout", "output_type": "stream", "text": ["\n", "\n", "\u001b[1m> Entering new MultiPromptChain chain...\u001b[0m\n", "physics: {'input': 'What is black body radiation?'}\n", "\u001b[1m> Finished chain.\u001b[0m\n"]}, {"data": {"text/plain": ["'Black body radiation is the electromagnetic radiation emitted by a perfect black body, which absorbs all incident radiation and reflects none. It is characterized by a continuous spectrum of radiated energy that is dependent on the temperature of the body, with higher temperatures leading to more intense and shorter wavelength radiation. This phenomenon is an important concept in thermal physics and has numerous applications, ranging from understanding stellar spectra to designing artificial light sources.'"]}, "metadata": {}, "output_type": "display_data"}], "source": ["# \u95ee\u9898\uff1a\u4ec0\u4e48\u662f\u9ed1\u4f53\u8f90\u5c04\uff1f\n", "chain.run(\"What is black body radiation?\")"]}, {"cell_type": "code", "execution_count": 36, "id": "4446724c", "metadata": {}, "outputs": [{"name": "stdout", "output_type": "stream", "text": ["\n", "\n", "\u001b[1m> Entering new MultiPromptChain chain...\u001b[0m\n", "physics: {'input': '\u4ec0\u4e48\u662f\u9ed1\u4f53\u8f90\u5c04\uff1f'}\n", "\u001b[1m> Finished chain.\u001b[0m\n"]}, {"data": {"text/plain": ["'\u9ed1\u4f53\u8f90\u5c04\u662f\u6307\u4e00\u4e2a\u7406\u60f3\u5316\u7684\u7269\u4f53\uff0c\u5b83\u80fd\u591f\u5b8c\u5168\u5438\u6536\u6240\u6709\u5165\u5c04\u5230\u5b83\u8868\u9762\u7684\u8f90\u5c04\u80fd\u91cf\uff0c\u5e76\u4ee5\u70ed\u8f90\u5c04\u7684\u5f62\u5f0f\u91cd\u65b0\u53d1\u5c04\u51fa\u6765\u3002\u9ed1\u4f53\u8f90\u5c04\u7684\u7279\u70b9\u662f\u5176\u8f90\u5c04\u80fd\u91cf\u7684\u5206\u5e03\u4e0e\u6e29\u5ea6\u6709\u5173\uff0c\u968f\u7740\u6e29\u5ea6\u7684\u5347\u9ad8\uff0c\u8f90\u5c04\u80fd\u91cf\u7684\u5cf0\u503c\u4f1a\u5411\u66f4\u77ed\u7684\u6ce2\u957f\u65b9\u5411\u79fb\u52a8\u3002\u8fd9\u4e2a\u73b0\u8c61\u88ab\u79f0\u4e3a\u9ed1\u4f53\u8f90\u5c04\u8c31\u7684\u4f4d\u79fb\u5b9a\u5f8b\uff0c\u7531\u666e\u6717\u514b\u572820\u4e16\u7eaa\u521d\u63d0\u51fa\u3002\u9ed1\u4f53\u8f90\u5c04\u5728\u7814\u7a76\u70ed\u529b\u5b66\u3001\u91cf\u5b50\u529b\u5b66\u548c\u5b87\u5b99\u5b66\u7b49\u9886\u57df\u4e2d\u5177\u6709\u91cd\u8981\u7684\u5e94\u7528\u3002'"]}, "execution_count": 36, "metadata": {}, "output_type": "execute_result"}], "source": ["#\u4e2d\u6587\n", "chain.run(\"\u4ec0\u4e48\u662f\u9ed1\u4f53\u8f90\u5c04\uff1f\")"]}, {"cell_type": "code", "execution_count": 41, "id": "ef81eda3", "metadata": {}, "outputs": [{"name": "stdout", "output_type": "stream", "text": ["\n", "\n", "\u001b[1m> Entering new MultiPromptChain chain...\u001b[0m\n", "History: {'input': '\u4f60\u77e5\u9053\u674e\u767d\u662f\u8c01\u561b?'}\n", "\u001b[1m> Finished chain.\u001b[0m\n"]}, {"data": {"text/plain": ["'\u674e\u767d\u662f\u5510\u671d\u65f6\u671f\u7684\u4e00\u4f4d\u8457\u540d\u8bd7\u4eba\u3002\u4ed6\u7684\u8bd7\u6b4c\u4ee5\u8c6a\u653e\u3001\u5954\u653e\u3001\u81ea\u7531\u7684\u98ce\u683c\u8457\u79f0\uff0c\u88ab\u8a89\u4e3a\u201c\u8bd7\u4ed9\u201d\u3002\u4ed6\u7684\u4f5c\u54c1\u6d89\u53ca\u5e7f\u6cdb\uff0c\u5305\u62ec\u5c71\u6c34\u7530\u56ed\u3001\u5386\u53f2\u4f20\u8bf4\u3001\u54f2\u7406\u601d\u8003\u7b49\u591a\u4e2a\u65b9\u9762\uff0c\u5bf9\u4e2d\u56fd\u53e4\u5178\u6587\u5b66\u7684\u53d1\u5c55\u4ea7\u751f\u4e86\u6df1\u8fdc\u7684\u5f71\u54cd\u3002'"]}, "execution_count": 41, "metadata": {}, "output_type": "execute_result"}], "source": ["# \u4e2d\u6587\n", "chain.run(\"\u4f60\u77e5\u9053\u674e\u767d\u662f\u8c01\u561b?\")"]}, {"cell_type": "markdown", "id": "289c5ca9", "metadata": {}, "source": ["\u5982\u679c\u6211\u4eec\u95ee\u4e00\u4e2a\u6570\u5b66\u95ee\u9898\uff0c\u6211\u4eec\u5e0c\u671b\u770b\u5230\u4ed6\u88ab\u8def\u7531\u5230\u6570\u5b66\u94fe\u8def"]}, {"cell_type": "code", "execution_count": 22, "id": "3b717379", "metadata": {}, "outputs": [{"name": "stdout", "output_type": "stream", "text": ["\n", "\n", "\u001b[1m> Entering new MultiPromptChain chain...\u001b[0m\n", "math: {'input': 'what is 2 + 2'}\n", "\u001b[1m> Finished chain.\u001b[0m\n"]}, {"data": {"text/plain": ["'As an AI language model, I can answer this question. The answer to 2 + 2 is 4.'"]}, "execution_count": 22, "metadata": {}, "output_type": "execute_result"}], "source": ["# \u95ee\u9898\uff1a2+2\u7b49\u4e8e\u591a\u5c11\uff1f\n", "chain.run(\"what is 2 + 2\")"]}, {"cell_type": "code", "execution_count": 37, "id": "795bea17", "metadata": {}, "outputs": [{"name": "stdout", "output_type": "stream", "text": ["\n", "\n", "\u001b[1m> Entering new MultiPromptChain chain...\u001b[0m\n", "math: {'input': '2 + 2 \u7b49\u4e8e\u591a\u5c11'}"]}, {"name": "stderr", "output_type": "stream", "text": ["Retrying langchain.chat_models.openai.ChatOpenAI.completion_with_retry.<locals>._completion_with_retry in 1.0 seconds as it raised ServiceUnavailableError: The server is overloaded or not ready yet..\n"]}, {"name": "stdout", "output_type": "stream", "text": ["\n", "\u001b[1m> Finished chain.\u001b[0m\n"]}, {"data": {"text/plain": ["'2 + 2 \u7b49\u4e8e 4\u3002'"]}, "execution_count": 37, "metadata": {}, "output_type": "execute_result"}], "source": ["# \u4e2d\u6587\n", "chain.run(\"2 + 2 \u7b49\u4e8e\u591a\u5c11\")"]}, {"cell_type": "markdown", "id": "4186a2b9", "metadata": {}, "source": ["\u5982\u679c\u6211\u4eec\u4f20\u9012\u4e00\u4e2a\u4e0e\u4efb\u4f55\u5b50\u94fe\u8def\u90fd\u65e0\u5173\u7684\u95ee\u9898\u65f6\uff0c\u4f1a\u53d1\u751f\u4ec0\u4e48\u5462\uff1f\n", "\n", "\u8fd9\u91cc\uff0c\u6211\u4eec\u95ee\u4e86\u4e00\u4e2a\u5173\u4e8e\u751f\u7269\u5b66\u7684\u95ee\u9898\uff0c\u6211\u4eec\u53ef\u4ee5\u770b\u5230\u5b83\u9009\u62e9\u7684\u94fe\u8def\u662f\u65e0\u3002\u8fd9\u610f\u5473\u7740\u5b83\u5c06\u88ab**\u4f20\u9012\u5230\u9ed8\u8ba4\u94fe\u8def\uff0c\u5b83\u672c\u8eab\u53ea\u662f\u5bf9\u8bed\u8a00\u6a21\u578b\u7684\u901a\u7528\u8c03\u7528**\u3002\u8bed\u8a00\u6a21\u578b\u5e78\u8fd0\u5730\u5bf9\u751f\u7269\u5b66\u77e5\u9053\u5f88\u591a\uff0c\u6240\u4ee5\u5b83\u53ef\u4ee5\u5e2e\u52a9\u6211\u4eec\u3002"]}, {"cell_type": "code", "execution_count": 40, "id": "29e5be01", "metadata": {}, "outputs": [{"name": "stdout", "output_type": "stream", "text": ["\n", "\n", "\u001b[1m> Entering new MultiPromptChain chain...\u001b[0m\n", "None: {'input': 'Why does every cell in our body contain DNA?'}\n", "\u001b[1m> Finished chain.\u001b[0m\n"]}, {"data": {"text/plain": ["'Every cell in our body contains DNA because DNA carries the genetic information that determines the characteristics and functions of each cell. DNA contains the instructions for the synthesis of proteins, which are essential for the structure and function of cells. Additionally, DNA is responsible for the transmission of genetic information from one generation to the next. Therefore, every cell in our body needs DNA to carry out its specific functions and to maintain the integrity of the organism as a whole.'"]}, "execution_count": 40, "metadata": {}, "output_type": "execute_result"}], "source": ["# \u95ee\u9898\uff1a\u4e3a\u4ec0\u4e48\u6211\u4eec\u8eab\u4f53\u91cc\u7684\u6bcf\u4e2a\u7ec6\u80de\u90fd\u5305\u542bDNA\uff1f\n", "chain.run(\"Why does every cell in our body contain DNA?\")"]}, {"cell_type": "code", "execution_count": 38, "id": "a64d0759", "metadata": {}, "outputs": [{"name": "stdout", "output_type": "stream", "text": ["\n", "\n", "\u001b[1m> Entering new MultiPromptChain chain...\u001b[0m\n", "None: {'input': '\u4e3a\u4ec0\u4e48\u6211\u4eec\u8eab\u4f53\u91cc\u7684\u6bcf\u4e2a\u7ec6\u80de\u90fd\u5305\u542bDNA\uff1f'}\n", "\u001b[1m> Finished chain.\u001b[0m\n"]}, {"data": {"text/plain": ["'\u6211\u4eec\u8eab\u4f53\u91cc\u7684\u6bcf\u4e2a\u7ec6\u80de\u90fd\u5305\u542bDNA\uff0c\u662f\u56e0\u4e3aDNA\u662f\u9057\u4f20\u4fe1\u606f\u7684\u8f7d\u4f53\u3002DNA\u662f\u7531\u56db\u79cd\u78b1\u57fa\uff08\u817a\u560c\u5464\u3001\u9e1f\u560c\u5464\u3001\u80f8\u817a\u5627\u5576\u548c\u9cde\u560c\u5464\uff09\u7ec4\u6210\u7684\u957f\u94fe\u72b6\u5206\u5b50\uff0c\u5b83\u5b58\u50a8\u4e86\u751f\u7269\u4f53\u7684\u9057\u4f20\u4fe1\u606f\uff0c\u5305\u62ec\u4e2a\u4f53\u7684\u7279\u5f81\u3001\u751f\u957f\u53d1\u80b2\u3001\u4ee3\u8c22\u529f\u80fd\u7b49\u3002\u6bcf\u4e2a\u7ec6\u80de\u90fd\u9700\u8981\u8fd9\u4e9b\u9057\u4f20\u4fe1\u606f\u6765\u6267\u884c\u5176\u7279\u5b9a\u7684\u529f\u80fd\u548c\u4efb\u52a1\uff0c\u56e0\u6b64\u6bcf\u4e2a\u7ec6\u80de\u90fd\u9700\u8981\u5305\u542bDNA\u3002\u6b64\u5916\uff0cDNA\u8fd8\u80fd\u901a\u8fc7\u590d\u5236\u548c\u4f20\u9012\u7ed9\u4e0b\u4e00\u4ee3\u7ec6\u80de\u548c\u4e2a\u4f53\uff0c\u4ee5\u4fdd\u8bc1\u9057\u4f20\u4fe1\u606f\u7684\u4f20\u627f\u3002'"]}, "execution_count": 38, "metadata": {}, "output_type": "execute_result"}], "source": ["# \u4e2d\u6587\n", "chain.run(\"\u4e3a\u4ec0\u4e48\u6211\u4eec\u8eab\u4f53\u91cc\u7684\u6bcf\u4e2a\u7ec6\u80de\u90fd\u5305\u542bDNA\uff1f\")"]}], "metadata": {"kernelspec": {"display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3"}, "language_info": {"codemirror_mode": {"name": "ipython", "version": 3}, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.9.12"}}, "nbformat": 4, "nbformat_minor": 5}