更新代码
This commit is contained in:
@ -1,424 +0,0 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "51e47840",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# 第四章 基于LangChain的文档问答\n",
|
||||
"本章内容主要利用langchain构建向量数据库,可以在文档上方或关于文档回答问题,因此,给定从PDF文件、网页或某些公司的内部文档收集中提取的文本,使用llm回答有关这些文档内容的问题"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "ef807f79",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## 一、环境配置\n",
|
||||
"\n",
|
||||
"安装langchain,设置chatGPT的OPENAI_API_KEY\n",
|
||||
"* 安装langchain\n",
|
||||
"```\n",
|
||||
"pip install --upgrade langchain\n",
|
||||
"```\n",
|
||||
"* 安装docarray\n",
|
||||
"```\n",
|
||||
"pip install docarray\n",
|
||||
"```\n",
|
||||
"* 设置API-KEY环境变量\n",
|
||||
"```\n",
|
||||
"export OPENAI_API_KEY='api-key'\n",
|
||||
"\n",
|
||||
"```"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "af3ffa97",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"\n",
|
||||
"from dotenv import load_dotenv, find_dotenv\n",
|
||||
"_ = load_dotenv(find_dotenv()) # 读取系统中的环境变量"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"id": "49081091",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"False"
|
||||
]
|
||||
},
|
||||
"execution_count": 8,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"load_dotenv(find_dotenv())\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 12,
|
||||
"id": "3bcb095f",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = \"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\"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 9,
|
||||
"id": "46595e8c",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"#导入检索QA链,在文档上进行检索\n",
|
||||
"from langchain.chains import RetrievalQA\n",
|
||||
"from langchain.chat_models import ChatOpenAI\n",
|
||||
"from langchain.document_loaders import CSVLoader\n",
|
||||
"from langchain.vectorstores import DocArrayInMemorySearch\n",
|
||||
"from IPython.display import display, Markdown"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "e511efa5",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## 使用 LangChain 完成一次问答"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 13,
|
||||
"id": "3ab4b9d1",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"'\\n\\n人工智能是一种重要的现代科技,它可以大大改善人类生活,减轻人类负担,提升工作效率。它可以帮助人们提高生产力,更有效地管理组织,并且可以提供更为准确的数据,帮助人们更好地决策。另外,人工智能可以帮助科学家发现新的药物,改善医疗服务,以及发展新的环保技术。总之,人工智能是一项重要的科技,具有广泛的应用前景。'"
|
||||
]
|
||||
},
|
||||
"execution_count": 13,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"\n",
|
||||
"llm = OpenAI(model_name=\"text-davinci-003\",max_tokens=1024)\n",
|
||||
"llm(\"怎么评价人工智能\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 14,
|
||||
"id": "884399f1",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"file = 'OutdoorClothingCatalog_1000.csv'\n",
|
||||
"loader = CSVLoader(file_path=file)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 15,
|
||||
"id": "52ec965a",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/html": [
|
||||
"<div>\n",
|
||||
"<style scoped>\n",
|
||||
" .dataframe tbody tr th:only-of-type {\n",
|
||||
" vertical-align: middle;\n",
|
||||
" }\n",
|
||||
"\n",
|
||||
" .dataframe tbody tr th {\n",
|
||||
" vertical-align: top;\n",
|
||||
" }\n",
|
||||
"\n",
|
||||
" .dataframe thead th {\n",
|
||||
" text-align: right;\n",
|
||||
" }\n",
|
||||
"</style>\n",
|
||||
"<table border=\"1\" class=\"dataframe\">\n",
|
||||
" <thead>\n",
|
||||
" <tr style=\"text-align: right;\">\n",
|
||||
" <th></th>\n",
|
||||
" <th>0</th>\n",
|
||||
" <th>1</th>\n",
|
||||
" <th>2</th>\n",
|
||||
" </tr>\n",
|
||||
" </thead>\n",
|
||||
" <tbody>\n",
|
||||
" <tr>\n",
|
||||
" <th>0</th>\n",
|
||||
" <td>NaN</td>\n",
|
||||
" <td>name</td>\n",
|
||||
" <td>description</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>1</th>\n",
|
||||
" <td>0.0</td>\n",
|
||||
" <td>Women's Campside Oxfords</td>\n",
|
||||
" <td>This ultracomfortable lace-to-toe Oxford boast...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>2</th>\n",
|
||||
" <td>1.0</td>\n",
|
||||
" <td>Recycled Waterhog Dog Mat, Chevron Weave</td>\n",
|
||||
" <td>Protect your floors from spills and splashing ...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>3</th>\n",
|
||||
" <td>2.0</td>\n",
|
||||
" <td>Infant and Toddler Girls' Coastal Chill Swimsu...</td>\n",
|
||||
" <td>She'll love the bright colors, ruffles and exc...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>4</th>\n",
|
||||
" <td>3.0</td>\n",
|
||||
" <td>Refresh Swimwear, V-Neck Tankini Contrasts</td>\n",
|
||||
" <td>Whether you're going for a swim or heading out...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>...</th>\n",
|
||||
" <td>...</td>\n",
|
||||
" <td>...</td>\n",
|
||||
" <td>...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>996</th>\n",
|
||||
" <td>995.0</td>\n",
|
||||
" <td>Men's Classic Denim, Standard Fit</td>\n",
|
||||
" <td>Crafted from premium denim that will last wash...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>997</th>\n",
|
||||
" <td>996.0</td>\n",
|
||||
" <td>CozyPrint Sweater Fleece Pullover</td>\n",
|
||||
" <td>The ultimate sweater fleece - made from superi...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>998</th>\n",
|
||||
" <td>997.0</td>\n",
|
||||
" <td>Women's NRS Endurance Spray Paddling Pants</td>\n",
|
||||
" <td>These comfortable and affordable splash paddli...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>999</th>\n",
|
||||
" <td>998.0</td>\n",
|
||||
" <td>Women's Stop Flies Hoodie</td>\n",
|
||||
" <td>This great-looking hoodie uses No Fly Zone Tec...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>1000</th>\n",
|
||||
" <td>999.0</td>\n",
|
||||
" <td>Modern Utility Bag</td>\n",
|
||||
" <td>This US-made crossbody bag is built with the s...</td>\n",
|
||||
" </tr>\n",
|
||||
" </tbody>\n",
|
||||
"</table>\n",
|
||||
"<p>1001 rows × 3 columns</p>\n",
|
||||
"</div>"
|
||||
],
|
||||
"text/plain": [
|
||||
" 0 1 \\\n",
|
||||
"0 NaN name \n",
|
||||
"1 0.0 Women's Campside Oxfords \n",
|
||||
"2 1.0 Recycled Waterhog Dog Mat, Chevron Weave \n",
|
||||
"3 2.0 Infant and Toddler Girls' Coastal Chill Swimsu... \n",
|
||||
"4 3.0 Refresh Swimwear, V-Neck Tankini Contrasts \n",
|
||||
"... ... ... \n",
|
||||
"996 995.0 Men's Classic Denim, Standard Fit \n",
|
||||
"997 996.0 CozyPrint Sweater Fleece Pullover \n",
|
||||
"998 997.0 Women's NRS Endurance Spray Paddling Pants \n",
|
||||
"999 998.0 Women's Stop Flies Hoodie \n",
|
||||
"1000 999.0 Modern Utility Bag \n",
|
||||
"\n",
|
||||
" 2 \n",
|
||||
"0 description \n",
|
||||
"1 This ultracomfortable lace-to-toe Oxford boast... \n",
|
||||
"2 Protect your floors from spills and splashing ... \n",
|
||||
"3 She'll love the bright colors, ruffles and exc... \n",
|
||||
"4 Whether you're going for a swim or heading out... \n",
|
||||
"... ... \n",
|
||||
"996 Crafted from premium denim that will last wash... \n",
|
||||
"997 The ultimate sweater fleece - made from superi... \n",
|
||||
"998 These comfortable and affordable splash paddli... \n",
|
||||
"999 This great-looking hoodie uses No Fly Zone Tec... \n",
|
||||
"1000 This US-made crossbody bag is built with the s... \n",
|
||||
"\n",
|
||||
"[1001 rows x 3 columns]"
|
||||
]
|
||||
},
|
||||
"execution_count": 15,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"import pandas as pd\n",
|
||||
"file = 'OutdoorClothingCatalog_1000.csv'\n",
|
||||
"\n",
|
||||
"data = pd.read_csv(file,header=None)\n",
|
||||
"data"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 16,
|
||||
"id": "efc6c592",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.indexes import VectorstoreIndexCreator"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 17,
|
||||
"id": "5e90139b",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"ename": "",
|
||||
"evalue": "",
|
||||
"output_type": "error",
|
||||
"traceback": [
|
||||
"\u001b[1;31mCanceled future for execute_request message before replies were done"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ename": "",
|
||||
"evalue": "",
|
||||
"output_type": "error",
|
||||
"traceback": [
|
||||
"\u001b[1;31m在当前单元格或上一个单元格中执行代码时 Kernel 崩溃。请查看单元格中的代码,以确定故障的可能原因。有关详细信息,请单击 <a href='https://aka.ms/vscodeJupyterKernelCrash'>此处</a>。有关更多详细信息,请查看 Jupyter <a href='command:jupyter.viewOutput'>log</a>。"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"index = VectorstoreIndexCreator(\n",
|
||||
" vectorstore_cls=DocArrayInMemorySearch\n",
|
||||
").from_loaders([loader])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"id": "8249a523",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"ename": "",
|
||||
"evalue": "",
|
||||
"output_type": "error",
|
||||
"traceback": [
|
||||
"\u001b[1;31mCanceled future for execute_request message before replies were done"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ename": "",
|
||||
"evalue": "",
|
||||
"output_type": "error",
|
||||
"traceback": [
|
||||
"\u001b[1;31m在当前单元格或上一个单元格中执行代码时 Kernel 崩溃。请查看单元格中的代码,以确定故障的可能原因。有关详细信息,请单击 <a href='https://aka.ms/vscodeJupyterKernelCrash'>此处</a>。有关更多详细信息,请查看 Jupyter <a href='command:jupyter.viewOutput'>log</a>。"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from docarray import DocumentArray\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"id": "3b160609",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"query =\"Please list all your shirts with sun protection \\\n",
|
||||
"in a table in markdown and summarize each one.\""
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"id": "cf61d864",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"ename": "NameError",
|
||||
"evalue": "name 'index' is not defined",
|
||||
"output_type": "error",
|
||||
"traceback": [
|
||||
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
||||
"\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
|
||||
"Cell \u001b[0;32mIn[2], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[43mindex\u001b[49m\u001b[38;5;241m.\u001b[39mquery(query)\n",
|
||||
"\u001b[0;31mNameError\u001b[0m: name 'index' is not defined"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"response = index.query(query)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "0737f809",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"display(Markdown(response))"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "chatGPT",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.9.16"
|
||||
},
|
||||
"vscode": {
|
||||
"interpreter": {
|
||||
"hash": "4d8dc73ac51fd938ce7dec941fbf542c26232b3529b0c2a6ebc607bfa3d5aa69"
|
||||
}
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
@ -1,380 +0,0 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "e1ce673f",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Summarize"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"id": "abbc4934",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import openai\n",
|
||||
"import os\n",
|
||||
"\n",
|
||||
"from dotenv import load_dotenv, find_dotenv\n",
|
||||
"_ = load_dotenv(find_dotenv()) # read local .env file\n",
|
||||
"\n",
|
||||
"openai.api_key = os.getenv('OPENAI_API_KEY')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"id": "19f13be6",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def get_completion(prompt, model=\"gpt-3.5-turbo\"): # Andrew mentioned that the prompt/ completion paradigm is preferable for this class\n",
|
||||
" messages = [{\"role\": \"user\", \"content\": prompt}]\n",
|
||||
" response = openai.ChatCompletion.create(\n",
|
||||
" model=model,\n",
|
||||
" messages=messages,\n",
|
||||
" temperature=0, # this is the degree of randomness of the model's output\n",
|
||||
" )\n",
|
||||
" return response.choices[0].message[\"content\"]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "a9a77385",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## 对文本做总结"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"id": "37f412b5",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"prod_review = \"\"\"\n",
|
||||
"Got this panda plush toy for my daughter's birthday, \\\n",
|
||||
"who loves it and takes it everywhere. It's soft and \\ \n",
|
||||
"super cute, and its face has a friendly look. It's \\ \n",
|
||||
"a bit small for what I paid though. I think there \\ \n",
|
||||
"might be other options that are bigger for the \\ \n",
|
||||
"same price. It arrived a day earlier than expected, \\ \n",
|
||||
"so I got to play with it myself before I gave it \\ \n",
|
||||
"to her.\n",
|
||||
"\"\"\""
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "13d70836",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## 以单词/句子/字符限制进行总结"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"id": "10c0c7e8",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Soft and cute panda plush toy loved by daughter, but a bit small for the price. Arrived early.\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"prompt = f\"\"\"\n",
|
||||
"Your task is to generate a short summary of a product \\\n",
|
||||
"review from an ecommerce site. \n",
|
||||
"\n",
|
||||
"Summarize the review below, delimited by triple \n",
|
||||
"backticks, in at most 30 words. \n",
|
||||
"\n",
|
||||
"Review: ```{prod_review}```\n",
|
||||
"\"\"\"\n",
|
||||
"\n",
|
||||
"response = get_completion(prompt)\n",
|
||||
"print(response)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"id": "64da0bc6",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"The panda plush toy arrived a day earlier than expected, but the customer felt it was a bit small for the price paid.\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"prompt = f\"\"\"\n",
|
||||
"Your task is to generate a short summary of a product \\\n",
|
||||
"review from an ecommerce site to give feedback to the \\\n",
|
||||
"Shipping deparmtment. \n",
|
||||
"\n",
|
||||
"Summarize the review below, delimited by triple \n",
|
||||
"backticks, in at most 30 words, and focusing on any aspects \\\n",
|
||||
"that mention shipping and delivery of the product. \n",
|
||||
"\n",
|
||||
"Review: ```{prod_review}```\n",
|
||||
"\"\"\"\n",
|
||||
"\n",
|
||||
"response = get_completion(prompt)\n",
|
||||
"print(response)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"id": "4e9f7803",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"The panda plush toy is soft, cute, and loved by the recipient, but the price may be too high for its size.\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"prompt = f\"\"\"\n",
|
||||
"Your task is to generate a short summary of a product \\\n",
|
||||
"review from an ecommerce site to give feedback to the \\\n",
|
||||
"pricing deparmtment, responsible for determining the \\\n",
|
||||
"price of the product. \n",
|
||||
"\n",
|
||||
"Summarize the review below, delimited by triple \n",
|
||||
"backticks, in at most 30 words, and focusing on any aspects \\\n",
|
||||
"that are relevant to the price and perceived value. \n",
|
||||
"\n",
|
||||
"Review: ```{prod_review}```\n",
|
||||
"\"\"\"\n",
|
||||
"\n",
|
||||
"response = get_completion(prompt)\n",
|
||||
"print(response)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "b6ed513e",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## 使用提取代替总结"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 9,
|
||||
"id": "a2035734",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"The product arrived a day earlier than expected.\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"prompt = f\"\"\"\n",
|
||||
"Your task is to extract relevant information from \\ \n",
|
||||
"a product review from an ecommerce site to give \\\n",
|
||||
"feedback to the Shipping department. \n",
|
||||
"\n",
|
||||
"From the review below, delimited by triple quotes \\\n",
|
||||
"extract the information relevant to shipping and \\ \n",
|
||||
"delivery. Limit to 30 words. \n",
|
||||
"\n",
|
||||
"Review: ```{prod_review}```\n",
|
||||
"\"\"\"\n",
|
||||
"\n",
|
||||
"response = get_completion(prompt)\n",
|
||||
"print(response)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "32e0345b",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## 对多个文本做总结"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 11,
|
||||
"id": "f2d9dd87",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"review_1 = prod_review \n",
|
||||
"\n",
|
||||
"# review for a standing lamp\n",
|
||||
"review_2 = \"\"\"\n",
|
||||
"Needed a nice lamp for my bedroom, and this one \\\n",
|
||||
"had additional storage and not too high of a price \\\n",
|
||||
"point. Got it fast - arrived in 2 days. The string \\\n",
|
||||
"to the lamp broke during the transit and the company \\\n",
|
||||
"happily sent over a new one. Came within a few days \\\n",
|
||||
"as well. It was easy to put together. Then I had a \\\n",
|
||||
"missing part, so I contacted their support and they \\\n",
|
||||
"very quickly got me the missing piece! Seems to me \\\n",
|
||||
"to be a great company that cares about their customers \\\n",
|
||||
"and products. \n",
|
||||
"\"\"\"\n",
|
||||
"\n",
|
||||
"# review for an electric toothbrush\n",
|
||||
"review_3 = \"\"\"\n",
|
||||
"My dental hygienist recommended an electric toothbrush, \\\n",
|
||||
"which is why I got this. The battery life seems to be \\\n",
|
||||
"pretty impressive so far. After initial charging and \\\n",
|
||||
"leaving the charger plugged in for the first week to \\\n",
|
||||
"condition the battery, I've unplugged the charger and \\\n",
|
||||
"been using it for twice daily brushing for the last \\\n",
|
||||
"3 weeks all on the same charge. But the toothbrush head \\\n",
|
||||
"is too small. I’ve seen baby toothbrushes bigger than \\\n",
|
||||
"this one. I wish the head was bigger with different \\\n",
|
||||
"length bristles to get between teeth better because \\\n",
|
||||
"this one doesn’t. Overall if you can get this one \\\n",
|
||||
"around the $50 mark, it's a good deal. The manufactuer's \\\n",
|
||||
"replacements heads are pretty expensive, but you can \\\n",
|
||||
"get generic ones that're more reasonably priced. This \\\n",
|
||||
"toothbrush makes me feel like I've been to the dentist \\\n",
|
||||
"every day. My teeth feel sparkly clean! \n",
|
||||
"\"\"\"\n",
|
||||
"\n",
|
||||
"# review for a blender\n",
|
||||
"review_4 = \"\"\"\n",
|
||||
"So, they still had the 17 piece system on seasonal \\\n",
|
||||
"sale for around $49 in the month of November, about \\\n",
|
||||
"half off, but for some reason (call it price gouging) \\\n",
|
||||
"around the second week of December the prices all went \\\n",
|
||||
"up to about anywhere from between $70-$89 for the same \\\n",
|
||||
"system. And the 11 piece system went up around $10 or \\\n",
|
||||
"so in price also from the earlier sale price of $29. \\\n",
|
||||
"So it looks okay, but if you look at the base, the part \\\n",
|
||||
"where the blade locks into place doesn’t look as good \\\n",
|
||||
"as in previous editions from a few years ago, but I \\\n",
|
||||
"plan to be very gentle with it (example, I crush \\\n",
|
||||
"very hard items like beans, ice, rice, etc. in the \\ \n",
|
||||
"blender first then pulverize them in the serving size \\\n",
|
||||
"I want in the blender then switch to the whipping \\\n",
|
||||
"blade for a finer flour, and use the cross cutting blade \\\n",
|
||||
"first when making smoothies, then use the flat blade \\\n",
|
||||
"if I need them finer/less pulpy). Special tip when making \\\n",
|
||||
"smoothies, finely cut and freeze the fruits and \\\n",
|
||||
"vegetables (if using spinach-lightly stew soften the \\ \n",
|
||||
"spinach then freeze until ready for use-and if making \\\n",
|
||||
"sorbet, use a small to medium sized food processor) \\ \n",
|
||||
"that you plan to use that way you can avoid adding so \\\n",
|
||||
"much ice if at all-when making your smoothie. \\\n",
|
||||
"After about a year, the motor was making a funny noise. \\\n",
|
||||
"I called customer service but the warranty expired \\\n",
|
||||
"already, so I had to buy another one. FYI: The overall \\\n",
|
||||
"quality has gone done in these types of products, so \\\n",
|
||||
"they are kind of counting on brand recognition and \\\n",
|
||||
"consumer loyalty to maintain sales. Got it in about \\\n",
|
||||
"two days.\n",
|
||||
"\"\"\"\n",
|
||||
"\n",
|
||||
"reviews = [review_1, review_2, review_3, review_4]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 12,
|
||||
"id": "43d84e1f",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"0 Soft and cute panda plush toy loved by daughter, but a bit small for the price. Arrived early. \n",
|
||||
"\n",
|
||||
"1 Affordable lamp with storage, fast shipping, and excellent customer service. Easy to assemble and missing parts were quickly replaced. \n",
|
||||
"\n",
|
||||
"2 Good battery life, small toothbrush head, but effective cleaning. Good deal if bought around $50. \n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ename": "RateLimitError",
|
||||
"evalue": "Rate limit reached for default-gpt-3.5-turbo in organization org-SoADSeM578ReGFBjktovCGZG on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method.",
|
||||
"output_type": "error",
|
||||
"traceback": [
|
||||
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
||||
"\u001b[0;31mRateLimitError\u001b[0m Traceback (most recent call last)",
|
||||
"Cell \u001b[0;32mIn[12], line 12\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m i \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mrange\u001b[39m(\u001b[38;5;28mlen\u001b[39m(reviews)):\n\u001b[1;32m 2\u001b[0m prompt \u001b[38;5;241m=\u001b[39m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\"\"\u001b[39m\n\u001b[1;32m 3\u001b[0m \u001b[38;5;124m Your task is to generate a short summary of a product \u001b[39m\u001b[38;5;124m\\\u001b[39m\u001b[38;5;124m \u001b[39m\n\u001b[1;32m 4\u001b[0m \u001b[38;5;124m review from an ecommerce site. \u001b[39m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 9\u001b[0m \u001b[38;5;124m Review: ```\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mreviews[i]\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m```\u001b[39m\n\u001b[1;32m 10\u001b[0m \u001b[38;5;124m \u001b[39m\u001b[38;5;124m\"\"\"\u001b[39m\n\u001b[0;32m---> 12\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[43mget_completion\u001b[49m\u001b[43m(\u001b[49m\u001b[43mprompt\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 13\u001b[0m \u001b[38;5;28mprint\u001b[39m(i, response, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n",
|
||||
"Cell \u001b[0;32mIn[4], line 3\u001b[0m, in \u001b[0;36mget_completion\u001b[0;34m(prompt, model)\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mget_completion\u001b[39m(prompt, model\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mgpt-3.5-turbo\u001b[39m\u001b[38;5;124m\"\u001b[39m): \u001b[38;5;66;03m# Andrew mentioned that the prompt/ completion paradigm is preferable for this class\u001b[39;00m\n\u001b[1;32m 2\u001b[0m messages \u001b[38;5;241m=\u001b[39m [{\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mrole\u001b[39m\u001b[38;5;124m\"\u001b[39m: \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124muser\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcontent\u001b[39m\u001b[38;5;124m\"\u001b[39m: prompt}]\n\u001b[0;32m----> 3\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[43mopenai\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mChatCompletion\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcreate\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 4\u001b[0m \u001b[43m \u001b[49m\u001b[43mmodel\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 5\u001b[0m \u001b[43m \u001b[49m\u001b[43mmessages\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmessages\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 6\u001b[0m \u001b[43m \u001b[49m\u001b[43mtemperature\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m0\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# this is the degree of randomness of the model's output\u001b[39;49;00m\n\u001b[1;32m 7\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 8\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m response\u001b[38;5;241m.\u001b[39mchoices[\u001b[38;5;241m0\u001b[39m]\u001b[38;5;241m.\u001b[39mmessage[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcontent\u001b[39m\u001b[38;5;124m\"\u001b[39m]\n",
|
||||
"File \u001b[0;32m~/opt/anaconda3/envs/chatGPT/lib/python3.9/site-packages/openai/api_resources/chat_completion.py:25\u001b[0m, in \u001b[0;36mChatCompletion.create\u001b[0;34m(cls, *args, **kwargs)\u001b[0m\n\u001b[1;32m 23\u001b[0m \u001b[38;5;28;01mwhile\u001b[39;00m \u001b[38;5;28;01mTrue\u001b[39;00m:\n\u001b[1;32m 24\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m---> 25\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43msuper\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcreate\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 26\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m TryAgain \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[1;32m 27\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m timeout \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;129;01mand\u001b[39;00m time\u001b[38;5;241m.\u001b[39mtime() \u001b[38;5;241m>\u001b[39m start \u001b[38;5;241m+\u001b[39m timeout:\n",
|
||||
"File \u001b[0;32m~/opt/anaconda3/envs/chatGPT/lib/python3.9/site-packages/openai/api_resources/abstract/engine_api_resource.py:153\u001b[0m, in \u001b[0;36mEngineAPIResource.create\u001b[0;34m(cls, api_key, api_base, api_type, request_id, api_version, organization, **params)\u001b[0m\n\u001b[1;32m 127\u001b[0m \u001b[38;5;129m@classmethod\u001b[39m\n\u001b[1;32m 128\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mcreate\u001b[39m(\n\u001b[1;32m 129\u001b[0m \u001b[38;5;28mcls\u001b[39m,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 136\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mparams,\n\u001b[1;32m 137\u001b[0m ):\n\u001b[1;32m 138\u001b[0m (\n\u001b[1;32m 139\u001b[0m deployment_id,\n\u001b[1;32m 140\u001b[0m engine,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 150\u001b[0m api_key, api_base, api_type, api_version, organization, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mparams\n\u001b[1;32m 151\u001b[0m )\n\u001b[0;32m--> 153\u001b[0m response, _, api_key \u001b[38;5;241m=\u001b[39m \u001b[43mrequestor\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrequest\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 154\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mpost\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 155\u001b[0m \u001b[43m \u001b[49m\u001b[43murl\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 156\u001b[0m \u001b[43m \u001b[49m\u001b[43mparams\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mparams\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 157\u001b[0m \u001b[43m \u001b[49m\u001b[43mheaders\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mheaders\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 158\u001b[0m \u001b[43m \u001b[49m\u001b[43mstream\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstream\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 159\u001b[0m \u001b[43m \u001b[49m\u001b[43mrequest_id\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrequest_id\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 160\u001b[0m \u001b[43m \u001b[49m\u001b[43mrequest_timeout\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrequest_timeout\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 161\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 163\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m stream:\n\u001b[1;32m 164\u001b[0m \u001b[38;5;66;03m# must be an iterator\u001b[39;00m\n\u001b[1;32m 165\u001b[0m \u001b[38;5;28;01massert\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(response, OpenAIResponse)\n",
|
||||
"File \u001b[0;32m~/opt/anaconda3/envs/chatGPT/lib/python3.9/site-packages/openai/api_requestor.py:226\u001b[0m, in \u001b[0;36mAPIRequestor.request\u001b[0;34m(self, method, url, params, headers, files, stream, request_id, request_timeout)\u001b[0m\n\u001b[1;32m 205\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mrequest\u001b[39m(\n\u001b[1;32m 206\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 207\u001b[0m method,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 214\u001b[0m request_timeout: Optional[Union[\u001b[38;5;28mfloat\u001b[39m, Tuple[\u001b[38;5;28mfloat\u001b[39m, \u001b[38;5;28mfloat\u001b[39m]]] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m 215\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Tuple[Union[OpenAIResponse, Iterator[OpenAIResponse]], \u001b[38;5;28mbool\u001b[39m, \u001b[38;5;28mstr\u001b[39m]:\n\u001b[1;32m 216\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mrequest_raw(\n\u001b[1;32m 217\u001b[0m method\u001b[38;5;241m.\u001b[39mlower(),\n\u001b[1;32m 218\u001b[0m url,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 224\u001b[0m request_timeout\u001b[38;5;241m=\u001b[39mrequest_timeout,\n\u001b[1;32m 225\u001b[0m )\n\u001b[0;32m--> 226\u001b[0m resp, got_stream \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_interpret_response\u001b[49m\u001b[43m(\u001b[49m\u001b[43mresult\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstream\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 227\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m resp, got_stream, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mapi_key\n",
|
||||
"File \u001b[0;32m~/opt/anaconda3/envs/chatGPT/lib/python3.9/site-packages/openai/api_requestor.py:620\u001b[0m, in \u001b[0;36mAPIRequestor._interpret_response\u001b[0;34m(self, result, stream)\u001b[0m\n\u001b[1;32m 612\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m (\n\u001b[1;32m 613\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_interpret_response_line(\n\u001b[1;32m 614\u001b[0m line, result\u001b[38;5;241m.\u001b[39mstatus_code, result\u001b[38;5;241m.\u001b[39mheaders, stream\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m\n\u001b[1;32m 615\u001b[0m )\n\u001b[1;32m 616\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m line \u001b[38;5;129;01min\u001b[39;00m parse_stream(result\u001b[38;5;241m.\u001b[39miter_lines())\n\u001b[1;32m 617\u001b[0m ), \u001b[38;5;28;01mTrue\u001b[39;00m\n\u001b[1;32m 618\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 619\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m (\n\u001b[0;32m--> 620\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_interpret_response_line\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 621\u001b[0m \u001b[43m \u001b[49m\u001b[43mresult\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcontent\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdecode\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mutf-8\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 622\u001b[0m \u001b[43m \u001b[49m\u001b[43mresult\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstatus_code\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 623\u001b[0m \u001b[43m \u001b[49m\u001b[43mresult\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mheaders\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 624\u001b[0m \u001b[43m \u001b[49m\u001b[43mstream\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 625\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m,\n\u001b[1;32m 626\u001b[0m \u001b[38;5;28;01mFalse\u001b[39;00m,\n\u001b[1;32m 627\u001b[0m )\n",
|
||||
"File \u001b[0;32m~/opt/anaconda3/envs/chatGPT/lib/python3.9/site-packages/openai/api_requestor.py:683\u001b[0m, in \u001b[0;36mAPIRequestor._interpret_response_line\u001b[0;34m(self, rbody, rcode, rheaders, stream)\u001b[0m\n\u001b[1;32m 681\u001b[0m stream_error \u001b[38;5;241m=\u001b[39m stream \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124merror\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;129;01min\u001b[39;00m resp\u001b[38;5;241m.\u001b[39mdata\n\u001b[1;32m 682\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m stream_error \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;241m200\u001b[39m \u001b[38;5;241m<\u001b[39m\u001b[38;5;241m=\u001b[39m rcode \u001b[38;5;241m<\u001b[39m \u001b[38;5;241m300\u001b[39m:\n\u001b[0;32m--> 683\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mhandle_error_response(\n\u001b[1;32m 684\u001b[0m rbody, rcode, resp\u001b[38;5;241m.\u001b[39mdata, rheaders, stream_error\u001b[38;5;241m=\u001b[39mstream_error\n\u001b[1;32m 685\u001b[0m )\n\u001b[1;32m 686\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m resp\n",
|
||||
"\u001b[0;31mRateLimitError\u001b[0m: Rate limit reached for default-gpt-3.5-turbo in organization org-SoADSeM578ReGFBjktovCGZG on requests per min. Limit: 3 / min. Please try again in 20s. Contact us through our help center at help.openai.com if you continue to have issues. Please add a payment method to your account to increase your rate limit. Visit https://platform.openai.com/account/billing to add a payment method."
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"for i in range(len(reviews)):\n",
|
||||
" prompt = f\"\"\"\n",
|
||||
" Your task is to generate a short summary of a product \\ \n",
|
||||
" review from an ecommerce site. \n",
|
||||
"\n",
|
||||
" Summarize the review below, delimited by triple \\\n",
|
||||
" backticks in at most 20 words. \n",
|
||||
"\n",
|
||||
" Review: ```{reviews[i]}```\n",
|
||||
" \"\"\"\n",
|
||||
"\n",
|
||||
" response = get_completion(prompt)\n",
|
||||
" print(i, response, \"\\n\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "a7aa40f3",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.9.16"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
Reference in New Issue
Block a user