forked from Archives/langchain
100d6655df
# Reformat the openai proxy setting as code Only affect the doc for openai Model - @hwchase17 - @agola11
177 lines
3.7 KiB
Plaintext
177 lines
3.7 KiB
Plaintext
{
|
|
"cells": [
|
|
{
|
|
"cell_type": "markdown",
|
|
"id": "9597802c",
|
|
"metadata": {},
|
|
"source": [
|
|
"# OpenAI\n",
|
|
"\n",
|
|
"[OpenAI](https://platform.openai.com/docs/introduction) offers a spectrum of models with different levels of power suitable for different tasks.\n",
|
|
"\n",
|
|
"This example goes over how to use LangChain to interact with `OpenAI` [models](https://platform.openai.com/docs/models)"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 4,
|
|
"id": "5d71df86-8a17-4283-83d7-4e46e7c06c44",
|
|
"metadata": {
|
|
"tags": []
|
|
},
|
|
"outputs": [
|
|
{
|
|
"name": "stdout",
|
|
"output_type": "stream",
|
|
"text": [
|
|
" ········\n"
|
|
]
|
|
}
|
|
],
|
|
"source": [
|
|
"# get a token: https://platform.openai.com/account/api-keys\n",
|
|
"\n",
|
|
"from getpass import getpass\n",
|
|
"\n",
|
|
"OPENAI_API_KEY = getpass()"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 5,
|
|
"id": "5472a7cd-af26-48ca-ae9b-5f6ae73c74d2",
|
|
"metadata": {
|
|
"tags": []
|
|
},
|
|
"outputs": [],
|
|
"source": [
|
|
"import os\n",
|
|
"\n",
|
|
"os.environ[\"OPENAI_API_KEY\"] = OPENAI_API_KEY"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 6,
|
|
"id": "6fb585dd",
|
|
"metadata": {
|
|
"tags": []
|
|
},
|
|
"outputs": [],
|
|
"source": [
|
|
"from langchain.llms import OpenAI\n",
|
|
"from langchain import PromptTemplate, LLMChain"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 7,
|
|
"id": "035dea0f",
|
|
"metadata": {
|
|
"tags": []
|
|
},
|
|
"outputs": [],
|
|
"source": [
|
|
"template = \"\"\"Question: {question}\n",
|
|
"\n",
|
|
"Answer: Let's think step by step.\"\"\"\n",
|
|
"\n",
|
|
"prompt = PromptTemplate(template=template, input_variables=[\"question\"])"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 8,
|
|
"id": "3f3458d9",
|
|
"metadata": {
|
|
"tags": []
|
|
},
|
|
"outputs": [],
|
|
"source": [
|
|
"llm = OpenAI()"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 9,
|
|
"id": "a641dbd9",
|
|
"metadata": {
|
|
"tags": []
|
|
},
|
|
"outputs": [],
|
|
"source": [
|
|
"llm_chain = LLMChain(prompt=prompt, llm=llm)"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 10,
|
|
"id": "9f844993",
|
|
"metadata": {
|
|
"tags": []
|
|
},
|
|
"outputs": [
|
|
{
|
|
"data": {
|
|
"text/plain": [
|
|
"' Justin Bieber was born in 1994, so we are looking for the Super Bowl winner from that year. The Super Bowl in 1994 was Super Bowl XXVIII, and the winner was the Dallas Cowboys.'"
|
|
]
|
|
},
|
|
"execution_count": 10,
|
|
"metadata": {},
|
|
"output_type": "execute_result"
|
|
}
|
|
],
|
|
"source": [
|
|
"question = \"What NFL team won the Super Bowl in the year Justin Beiber was born?\"\n",
|
|
"\n",
|
|
"llm_chain.run(question)"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"id": "58a9ddb1",
|
|
"metadata": {},
|
|
"source": [
|
|
"If you are behind an explicit proxy, you can use the OPENAI_PROXY environment variable to pass through"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"id": "55142cec",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"os.environ[\"OPENAI_PROXY\"] = \"http://proxy.yourcompany.com:8080\""
|
|
]
|
|
}
|
|
],
|
|
"metadata": {
|
|
"kernelspec": {
|
|
"display_name": "Python 3.11.1 64-bit",
|
|
"language": "python",
|
|
"name": "python3"
|
|
},
|
|
"language_info": {
|
|
"codemirror_mode": {
|
|
"name": "ipython",
|
|
"version": 3
|
|
},
|
|
"file_extension": ".py",
|
|
"mimetype": "text/x-python",
|
|
"name": "python",
|
|
"nbconvert_exporter": "python",
|
|
"pygments_lexer": "ipython3",
|
|
"version": "3.11.1"
|
|
},
|
|
"vscode": {
|
|
"interpreter": {
|
|
"hash": "e971737741ff4ec9aff7dc6155a1060a59a8a6d52c757dbbe66bf8ee389494b1"
|
|
}
|
|
}
|
|
},
|
|
"nbformat": 4,
|
|
"nbformat_minor": 5
|
|
}
|