openai-cookbook/examples/Using_embeddings.ipynb

119 lines
3.1 KiB
Plaintext
Raw Normal View History

2022-03-11 02:08:53 +00:00
{
"cells": [
{
2023-10-18 00:39:42 +00:00
"attachments": {},
2022-03-11 02:08:53 +00:00
"cell_type": "markdown",
"metadata": {},
"source": [
2023-10-18 00:39:42 +00:00
"## Using embeddings\n",
2022-03-11 02:08:53 +00:00
"\n",
2024-01-25 17:59:05 +00:00
"This notebook contains some helpful snippets you can use to embed text with the `text-embedding-3-small` model via the OpenAI API."
2022-03-11 02:08:53 +00:00
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1536"
2022-03-11 02:08:53 +00:00
]
},
"execution_count": 1,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
2024-01-25 17:59:05 +00:00
"import openai\n",
2022-03-11 02:08:53 +00:00
"\n",
2024-01-25 17:59:05 +00:00
"embedding = openai.Embedding.create(\n",
" input=\"Your text goes here\", model=\"text-embedding-3-small\"\n",
")[\"data\"][0][\"embedding\"]\n",
2022-07-11 23:56:47 +00:00
"len(embedding)\n"
2022-03-11 02:08:53 +00:00
]
},
{
2023-10-18 00:39:42 +00:00
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"It's recommended to use the 'tenacity' package or another exponential backoff implementation to better manage API rate limits, as hitting the API too much too fast can trigger rate limits. Using the following function ensures you get your embeddings as fast as possible."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Negative example (slow and rate-limited)\n",
2024-01-25 17:59:05 +00:00
"import openai\n",
"\n",
"num_embeddings = 10000 # Some large number\n",
"for i in range(num_embeddings):\n",
2024-01-25 17:59:05 +00:00
" embedding = openai.Embedding.create(\n",
" input=\"Your text goes here\", model=\"text-embedding-3-small\"\n",
" )[\"data\"][0][\"embedding\"]\n",
" print(len(embedding))"
]
},
2022-03-11 02:08:53 +00:00
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"1536\n"
2022-03-11 02:08:53 +00:00
]
}
],
"source": [
"# Best practice\n",
2024-01-25 17:59:05 +00:00
"import openai\n",
2022-03-11 02:08:53 +00:00
"from tenacity import retry, wait_random_exponential, stop_after_attempt\n",
"\n",
"# Retry up to 6 times with exponential backoff, starting at 1 second and maxing out at 20 seconds delay\n",
2022-03-11 02:08:53 +00:00
"@retry(wait=wait_random_exponential(min=1, max=20), stop=stop_after_attempt(6))\n",
2024-01-25 17:59:05 +00:00
"def get_embedding(text: str, model=\"text-embedding-3-small\") -> list[float]:\n",
" return openai.Embedding.create(input=[text], model=model)[\"data\"][0][\"embedding\"]\n",
2022-03-11 02:08:53 +00:00
"\n",
2024-01-25 17:59:05 +00:00
"embedding = get_embedding(\"Your text goes here\", model=\"text-embedding-3-small\")\n",
"print(len(embedding))"
2022-03-11 02:08:53 +00:00
]
}
],
"metadata": {
"kernelspec": {
2022-07-11 23:56:47 +00:00
"display_name": "Python 3.9.9 ('openai')",
"language": "python",
2022-03-11 02:08:53 +00:00
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
2022-07-11 23:56:47 +00:00
"version": "3.9.9"
2022-03-11 02:08:53 +00:00
},
2022-07-11 23:56:47 +00:00
"orig_nbformat": 4,
"vscode": {
"interpreter": {
"hash": "365536dcbde60510dc9073d6b991cd35db2d9bac356a11f5b64279a5e6708b97"
}
}
2022-03-11 02:08:53 +00:00
},
"nbformat": 4,
"nbformat_minor": 2
}