mirror of
https://github.com/openai/openai-cookbook
synced 2024-11-04 06:00:33 +00:00
Wrap error message to avoid saving all the trace to the notebook
This commit is contained in:
parent
2b9e3d65bd
commit
6f274b4f5f
@ -49,36 +49,23 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 18,
|
||||
"execution_count": 22,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"ename": "InvalidRequestError",
|
||||
"evalue": "This model's maximum context length is 8191 tokens, however you requested 10001 tokens (10001 in your prompt; 0 for the completion). Please reduce your prompt; or completion length.",
|
||||
"output_type": "error",
|
||||
"traceback": [
|
||||
"\u001B[0;31m---------------------------------------------------------------------------\u001B[0m",
|
||||
"\u001B[0;31mInvalidRequestError\u001B[0m Traceback (most recent call last)",
|
||||
"Cell \u001B[0;32mIn [18], line 2\u001B[0m\n\u001B[1;32m 1\u001B[0m long_text \u001B[38;5;241m=\u001B[39m \u001B[38;5;124m'\u001B[39m\u001B[38;5;124mAGI \u001B[39m\u001B[38;5;124m'\u001B[39m \u001B[38;5;241m*\u001B[39m \u001B[38;5;241m5000\u001B[39m\n\u001B[0;32m----> 2\u001B[0m \u001B[43mget_embedding\u001B[49m\u001B[43m(\u001B[49m\u001B[43mlong_text\u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"File \u001B[0;32m~/.virtualenvs/openai/lib/python3.9/site-packages/tenacity/__init__.py:326\u001B[0m, in \u001B[0;36mBaseRetrying.wraps.<locals>.wrapped_f\u001B[0;34m(*args, **kw)\u001B[0m\n\u001B[1;32m 324\u001B[0m \u001B[38;5;129m@functools\u001B[39m\u001B[38;5;241m.\u001B[39mwraps(f)\n\u001B[1;32m 325\u001B[0m \u001B[38;5;28;01mdef\u001B[39;00m \u001B[38;5;21mwrapped_f\u001B[39m(\u001B[38;5;241m*\u001B[39margs: t\u001B[38;5;241m.\u001B[39mAny, \u001B[38;5;241m*\u001B[39m\u001B[38;5;241m*\u001B[39mkw: t\u001B[38;5;241m.\u001B[39mAny) \u001B[38;5;241m-\u001B[39m\u001B[38;5;241m>\u001B[39m t\u001B[38;5;241m.\u001B[39mAny:\n\u001B[0;32m--> 326\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28;43mself\u001B[39;49m\u001B[43m(\u001B[49m\u001B[43mf\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43margs\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43mkw\u001B[49m\u001B[43m)\u001B[49m\n",
|
||||
"File \u001B[0;32m~/.virtualenvs/openai/lib/python3.9/site-packages/tenacity/__init__.py:406\u001B[0m, in \u001B[0;36mRetrying.__call__\u001B[0;34m(self, fn, *args, **kwargs)\u001B[0m\n\u001B[1;32m 404\u001B[0m retry_state \u001B[38;5;241m=\u001B[39m RetryCallState(retry_object\u001B[38;5;241m=\u001B[39m\u001B[38;5;28mself\u001B[39m, fn\u001B[38;5;241m=\u001B[39mfn, args\u001B[38;5;241m=\u001B[39margs, kwargs\u001B[38;5;241m=\u001B[39mkwargs)\n\u001B[1;32m 405\u001B[0m \u001B[38;5;28;01mwhile\u001B[39;00m \u001B[38;5;28;01mTrue\u001B[39;00m:\n\u001B[0;32m--> 406\u001B[0m do \u001B[38;5;241m=\u001B[39m \u001B[38;5;28;43mself\u001B[39;49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43miter\u001B[49m\u001B[43m(\u001B[49m\u001B[43mretry_state\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mretry_state\u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m 407\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28misinstance\u001B[39m(do, DoAttempt):\n\u001B[1;32m 408\u001B[0m \u001B[38;5;28;01mtry\u001B[39;00m:\n",
|
||||
"File \u001B[0;32m~/.virtualenvs/openai/lib/python3.9/site-packages/tenacity/__init__.py:351\u001B[0m, in \u001B[0;36mBaseRetrying.iter\u001B[0;34m(self, retry_state)\u001B[0m\n\u001B[1;32m 349\u001B[0m is_explicit_retry \u001B[38;5;241m=\u001B[39m retry_state\u001B[38;5;241m.\u001B[39moutcome\u001B[38;5;241m.\u001B[39mfailed \u001B[38;5;129;01mand\u001B[39;00m \u001B[38;5;28misinstance\u001B[39m(retry_state\u001B[38;5;241m.\u001B[39moutcome\u001B[38;5;241m.\u001B[39mexception(), TryAgain)\n\u001B[1;32m 350\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m (is_explicit_retry \u001B[38;5;129;01mor\u001B[39;00m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39mretry(retry_state\u001B[38;5;241m=\u001B[39mretry_state)):\n\u001B[0;32m--> 351\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43mfut\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mresult\u001B[49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m 353\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39mafter \u001B[38;5;129;01mis\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m \u001B[38;5;28;01mNone\u001B[39;00m:\n\u001B[1;32m 354\u001B[0m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39mafter(retry_state)\n",
|
||||
"File \u001B[0;32m~/.pyenv/versions/3.9.9/lib/python3.9/concurrent/futures/_base.py:438\u001B[0m, in \u001B[0;36mFuture.result\u001B[0;34m(self, timeout)\u001B[0m\n\u001B[1;32m 436\u001B[0m \u001B[38;5;28;01mraise\u001B[39;00m CancelledError()\n\u001B[1;32m 437\u001B[0m \u001B[38;5;28;01melif\u001B[39;00m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39m_state \u001B[38;5;241m==\u001B[39m FINISHED:\n\u001B[0;32m--> 438\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28;43mself\u001B[39;49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43m__get_result\u001B[49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m 440\u001B[0m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39m_condition\u001B[38;5;241m.\u001B[39mwait(timeout)\n\u001B[1;32m 442\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39m_state \u001B[38;5;129;01min\u001B[39;00m [CANCELLED, CANCELLED_AND_NOTIFIED]:\n",
|
||||
"File \u001B[0;32m~/.pyenv/versions/3.9.9/lib/python3.9/concurrent/futures/_base.py:390\u001B[0m, in \u001B[0;36mFuture.__get_result\u001B[0;34m(self)\u001B[0m\n\u001B[1;32m 388\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39m_exception:\n\u001B[1;32m 389\u001B[0m \u001B[38;5;28;01mtry\u001B[39;00m:\n\u001B[0;32m--> 390\u001B[0m \u001B[38;5;28;01mraise\u001B[39;00m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39m_exception\n\u001B[1;32m 391\u001B[0m \u001B[38;5;28;01mfinally\u001B[39;00m:\n\u001B[1;32m 392\u001B[0m \u001B[38;5;66;03m# Break a reference cycle with the exception in self._exception\u001B[39;00m\n\u001B[1;32m 393\u001B[0m \u001B[38;5;28mself\u001B[39m \u001B[38;5;241m=\u001B[39m \u001B[38;5;28;01mNone\u001B[39;00m\n",
|
||||
"File \u001B[0;32m~/.virtualenvs/openai/lib/python3.9/site-packages/tenacity/__init__.py:409\u001B[0m, in \u001B[0;36mRetrying.__call__\u001B[0;34m(self, fn, *args, **kwargs)\u001B[0m\n\u001B[1;32m 407\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28misinstance\u001B[39m(do, DoAttempt):\n\u001B[1;32m 408\u001B[0m \u001B[38;5;28;01mtry\u001B[39;00m:\n\u001B[0;32m--> 409\u001B[0m result \u001B[38;5;241m=\u001B[39m \u001B[43mfn\u001B[49m\u001B[43m(\u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43margs\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43mkwargs\u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m 410\u001B[0m \u001B[38;5;28;01mexcept\u001B[39;00m \u001B[38;5;167;01mBaseException\u001B[39;00m: \u001B[38;5;66;03m# noqa: B902\u001B[39;00m\n\u001B[1;32m 411\u001B[0m retry_state\u001B[38;5;241m.\u001B[39mset_exception(sys\u001B[38;5;241m.\u001B[39mexc_info())\n",
|
||||
"Cell \u001B[0;32mIn [16], line 12\u001B[0m, in \u001B[0;36mget_embedding\u001B[0;34m(text_or_tokens, model)\u001B[0m\n\u001B[1;32m 10\u001B[0m \u001B[38;5;129m@retry\u001B[39m(wait\u001B[38;5;241m=\u001B[39mwait_random_exponential(\u001B[38;5;28mmin\u001B[39m\u001B[38;5;241m=\u001B[39m\u001B[38;5;241m1\u001B[39m, \u001B[38;5;28mmax\u001B[39m\u001B[38;5;241m=\u001B[39m\u001B[38;5;241m20\u001B[39m), stop\u001B[38;5;241m=\u001B[39mstop_after_attempt(\u001B[38;5;241m6\u001B[39m), retry\u001B[38;5;241m=\u001B[39mretry_if_not_exception_type(openai\u001B[38;5;241m.\u001B[39mInvalidRequestError))\n\u001B[1;32m 11\u001B[0m \u001B[38;5;28;01mdef\u001B[39;00m \u001B[38;5;21mget_embedding\u001B[39m(text_or_tokens, model\u001B[38;5;241m=\u001B[39mEMBEDDING_MODEL):\n\u001B[0;32m---> 12\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43mopenai\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mEmbedding\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mcreate\u001B[49m\u001B[43m(\u001B[49m\u001B[38;5;28;43minput\u001B[39;49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mtext_or_tokens\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mmodel\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mmodel\u001B[49m\u001B[43m)\u001B[49m[\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124mdata\u001B[39m\u001B[38;5;124m\"\u001B[39m][\u001B[38;5;241m0\u001B[39m][\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124membedding\u001B[39m\u001B[38;5;124m\"\u001B[39m]\n",
|
||||
"File \u001B[0;32m~/code/openai-python/openai/api_resources/embedding.py:33\u001B[0m, in \u001B[0;36mEmbedding.create\u001B[0;34m(cls, *args, **kwargs)\u001B[0m\n\u001B[1;32m 31\u001B[0m \u001B[38;5;28;01mwhile\u001B[39;00m \u001B[38;5;28;01mTrue\u001B[39;00m:\n\u001B[1;32m 32\u001B[0m \u001B[38;5;28;01mtry\u001B[39;00m:\n\u001B[0;32m---> 33\u001B[0m response \u001B[38;5;241m=\u001B[39m \u001B[38;5;28;43msuper\u001B[39;49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mcreate\u001B[49m\u001B[43m(\u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43margs\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43mkwargs\u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m 35\u001B[0m \u001B[38;5;66;03m# If a user specifies base64, we'll just return the encoded string.\u001B[39;00m\n\u001B[1;32m 36\u001B[0m \u001B[38;5;66;03m# This is only for the default case.\u001B[39;00m\n\u001B[1;32m 37\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m user_provided_encoding_format:\n",
|
||||
"File \u001B[0;32m~/code/openai-python/openai/api_resources/abstract/engine_api_resource.py:153\u001B[0m, in \u001B[0;36mEngineAPIResource.create\u001B[0;34m(cls, api_key, api_base, api_type, request_id, api_version, organization, **params)\u001B[0m\n\u001B[1;32m 127\u001B[0m \u001B[38;5;129m@classmethod\u001B[39m\n\u001B[1;32m 128\u001B[0m \u001B[38;5;28;01mdef\u001B[39;00m \u001B[38;5;21mcreate\u001B[39m(\n\u001B[1;32m 129\u001B[0m \u001B[38;5;28mcls\u001B[39m,\n\u001B[0;32m (...)\u001B[0m\n\u001B[1;32m 136\u001B[0m \u001B[38;5;241m*\u001B[39m\u001B[38;5;241m*\u001B[39mparams,\n\u001B[1;32m 137\u001B[0m ):\n\u001B[1;32m 138\u001B[0m (\n\u001B[1;32m 139\u001B[0m deployment_id,\n\u001B[1;32m 140\u001B[0m engine,\n\u001B[0;32m (...)\u001B[0m\n\u001B[1;32m 150\u001B[0m api_key, api_base, api_type, api_version, organization, \u001B[38;5;241m*\u001B[39m\u001B[38;5;241m*\u001B[39mparams\n\u001B[1;32m 151\u001B[0m )\n\u001B[0;32m--> 153\u001B[0m response, _, api_key \u001B[38;5;241m=\u001B[39m \u001B[43mrequestor\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mrequest\u001B[49m\u001B[43m(\u001B[49m\n\u001B[1;32m 154\u001B[0m \u001B[43m \u001B[49m\u001B[38;5;124;43m\"\u001B[39;49m\u001B[38;5;124;43mpost\u001B[39;49m\u001B[38;5;124;43m\"\u001B[39;49m\u001B[43m,\u001B[49m\n\u001B[1;32m 155\u001B[0m \u001B[43m \u001B[49m\u001B[43murl\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 156\u001B[0m \u001B[43m \u001B[49m\u001B[43mparams\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mparams\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 157\u001B[0m \u001B[43m \u001B[49m\u001B[43mheaders\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mheaders\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 158\u001B[0m \u001B[43m \u001B[49m\u001B[43mstream\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mstream\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 159\u001B[0m \u001B[43m \u001B[49m\u001B[43mrequest_id\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mrequest_id\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 160\u001B[0m \u001B[43m \u001B[49m\u001B[43mrequest_timeout\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mrequest_timeout\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 161\u001B[0m \u001B[43m \u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m 163\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m stream:\n\u001B[1;32m 164\u001B[0m \u001B[38;5;66;03m# must be an iterator\u001B[39;00m\n\u001B[1;32m 165\u001B[0m \u001B[38;5;28;01massert\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m \u001B[38;5;28misinstance\u001B[39m(response, OpenAIResponse)\n",
|
||||
"File \u001B[0;32m~/code/openai-python/openai/api_requestor.py:227\u001B[0m, in \u001B[0;36mAPIRequestor.request\u001B[0;34m(self, method, url, params, headers, files, stream, request_id, request_timeout)\u001B[0m\n\u001B[1;32m 206\u001B[0m \u001B[38;5;28;01mdef\u001B[39;00m \u001B[38;5;21mrequest\u001B[39m(\n\u001B[1;32m 207\u001B[0m \u001B[38;5;28mself\u001B[39m,\n\u001B[1;32m 208\u001B[0m method,\n\u001B[0;32m (...)\u001B[0m\n\u001B[1;32m 215\u001B[0m request_timeout: Optional[Union[\u001B[38;5;28mfloat\u001B[39m, Tuple[\u001B[38;5;28mfloat\u001B[39m, \u001B[38;5;28mfloat\u001B[39m]]] \u001B[38;5;241m=\u001B[39m \u001B[38;5;28;01mNone\u001B[39;00m,\n\u001B[1;32m 216\u001B[0m ) \u001B[38;5;241m-\u001B[39m\u001B[38;5;241m>\u001B[39m Tuple[Union[OpenAIResponse, Iterator[OpenAIResponse]], \u001B[38;5;28mbool\u001B[39m, \u001B[38;5;28mstr\u001B[39m]:\n\u001B[1;32m 217\u001B[0m result \u001B[38;5;241m=\u001B[39m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39mrequest_raw(\n\u001B[1;32m 218\u001B[0m method\u001B[38;5;241m.\u001B[39mlower(),\n\u001B[1;32m 219\u001B[0m url,\n\u001B[0;32m (...)\u001B[0m\n\u001B[1;32m 225\u001B[0m request_timeout\u001B[38;5;241m=\u001B[39mrequest_timeout,\n\u001B[1;32m 226\u001B[0m )\n\u001B[0;32m--> 227\u001B[0m resp, got_stream \u001B[38;5;241m=\u001B[39m \u001B[38;5;28;43mself\u001B[39;49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43m_interpret_response\u001B[49m\u001B[43m(\u001B[49m\u001B[43mresult\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mstream\u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m 228\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m resp, got_stream, \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39mapi_key\n",
|
||||
"File \u001B[0;32m~/code/openai-python/openai/api_requestor.py:620\u001B[0m, in \u001B[0;36mAPIRequestor._interpret_response\u001B[0;34m(self, result, stream)\u001B[0m\n\u001B[1;32m 612\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m (\n\u001B[1;32m 613\u001B[0m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39m_interpret_response_line(\n\u001B[1;32m 614\u001B[0m line, result\u001B[38;5;241m.\u001B[39mstatus_code, result\u001B[38;5;241m.\u001B[39mheaders, stream\u001B[38;5;241m=\u001B[39m\u001B[38;5;28;01mTrue\u001B[39;00m\n\u001B[1;32m 615\u001B[0m )\n\u001B[1;32m 616\u001B[0m \u001B[38;5;28;01mfor\u001B[39;00m line \u001B[38;5;129;01min\u001B[39;00m parse_stream(result\u001B[38;5;241m.\u001B[39miter_lines())\n\u001B[1;32m 617\u001B[0m ), \u001B[38;5;28;01mTrue\u001B[39;00m\n\u001B[1;32m 618\u001B[0m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[1;32m 619\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m (\n\u001B[0;32m--> 620\u001B[0m \u001B[38;5;28;43mself\u001B[39;49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43m_interpret_response_line\u001B[49m\u001B[43m(\u001B[49m\n\u001B[1;32m 621\u001B[0m \u001B[43m \u001B[49m\u001B[43mresult\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mcontent\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mdecode\u001B[49m\u001B[43m(\u001B[49m\u001B[38;5;124;43m\"\u001B[39;49m\u001B[38;5;124;43mutf-8\u001B[39;49m\u001B[38;5;124;43m\"\u001B[39;49m\u001B[43m)\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 622\u001B[0m \u001B[43m \u001B[49m\u001B[43mresult\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mstatus_code\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 623\u001B[0m \u001B[43m \u001B[49m\u001B[43mresult\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mheaders\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m 624\u001B[0m \u001B[43m \u001B[49m\u001B[43mstream\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[38;5;28;43;01mFalse\u001B[39;49;00m\u001B[43m,\u001B[49m\n\u001B[1;32m 625\u001B[0m \u001B[43m \u001B[49m\u001B[43m)\u001B[49m,\n\u001B[1;32m 626\u001B[0m \u001B[38;5;28;01mFalse\u001B[39;00m,\n\u001B[1;32m 627\u001B[0m )\n",
|
||||
"File \u001B[0;32m~/code/openai-python/openai/api_requestor.py:680\u001B[0m, in \u001B[0;36mAPIRequestor._interpret_response_line\u001B[0;34m(self, rbody, rcode, rheaders, stream)\u001B[0m\n\u001B[1;32m 678\u001B[0m stream_error \u001B[38;5;241m=\u001B[39m stream \u001B[38;5;129;01mand\u001B[39;00m \u001B[38;5;124m\"\u001B[39m\u001B[38;5;124merror\u001B[39m\u001B[38;5;124m\"\u001B[39m \u001B[38;5;129;01min\u001B[39;00m resp\u001B[38;5;241m.\u001B[39mdata\n\u001B[1;32m 679\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m stream_error \u001B[38;5;129;01mor\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m \u001B[38;5;241m200\u001B[39m \u001B[38;5;241m<\u001B[39m\u001B[38;5;241m=\u001B[39m rcode \u001B[38;5;241m<\u001B[39m \u001B[38;5;241m300\u001B[39m:\n\u001B[0;32m--> 680\u001B[0m \u001B[38;5;28;01mraise\u001B[39;00m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39mhandle_error_response(\n\u001B[1;32m 681\u001B[0m rbody, rcode, resp\u001B[38;5;241m.\u001B[39mdata, rheaders, stream_error\u001B[38;5;241m=\u001B[39mstream_error\n\u001B[1;32m 682\u001B[0m )\n\u001B[1;32m 683\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m resp\n",
|
||||
"\u001B[0;31mInvalidRequestError\u001B[0m: This model's maximum context length is 8191 tokens, however you requested 10001 tokens (10001 in your prompt; 0 for the completion). Please reduce your prompt; or completion length."
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"This model's maximum context length is 8191 tokens, however you requested 10001 tokens (10001 in your prompt; 0 for the completion). Please reduce your prompt; or completion length.\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"long_text = 'AGI ' * 5000\n",
|
||||
"get_embedding(long_text)"
|
||||
"try:\n",
|
||||
" get_embedding(long_text)\n",
|
||||
"except openai.InvalidRequestError as e:\n",
|
||||
" print(e)"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
Loading…
Reference in New Issue
Block a user