Improve credential handing to allow passing in constructors (#79)

Addresses the issue in #76 by either using the relevant environment
variable if set or using a string passed in the constructor.

Prefers the constructor string over the environment variable, which
seemed like the natural choice to me.
This commit is contained in:
Cameron Whitehead 2022-11-07 21:34:45 +00:00 committed by GitHub
parent 9679bdc34c
commit 54e325be2f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 67 additions and 36 deletions

View File

@ -4,8 +4,6 @@
[![lint](https://github.com/hwchase17/langchain/actions/workflows/lint.yml/badge.svg)](https://github.com/hwchase17/langchain/actions/workflows/lint.yml) [![test](https://github.com/hwchase17/langchain/actions/workflows/test.yml/badge.svg)](https://github.com/hwchase17/langchain/actions/workflows/test.yml) [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) [![Twitter](https://img.shields.io/twitter/url/https/twitter.com/langchainai.svg?style=social&label=Follow%20%40LangChainAI)](https://twitter.com/langchainai) [![](https://dcbadge.vercel.app/api/server/6adMQxSpJS?compact=true&style=flat)](https://discord.gg/6adMQxSpJS)
## Quick Install
`pip install langchain`
@ -20,6 +18,7 @@ combine them with other sources of computation or knowledge.
This library is aimed at assisting in the development of those types of applications.
It aims to create:
1. a comprehensive collection of pieces you would ever want to combine
2. a flexible interface for combining pieces into a single comprehensive "chain"
3. a schema for easily saving and sharing those chains
@ -30,23 +29,23 @@ Besides the installation of this python package, you will also need to install p
Note: the reason these packages are not included in the dependencies by default is that as we imagine scaling this package, we do not want to force dependencies that are not needed.
The following use cases require specific installs and environment variables:
The following use cases require specific installs and api keys:
- *OpenAI*:
- _OpenAI_:
- Install requirements with `pip install openai`
- Set the following environment variable: `OPENAI_API_KEY`
- *Cohere*:
- Get an OpenAI api key and either set it as an environment variable (`OPENAI_API_KEY`) or pass it to the LLM constructor as `openai_api_key`.
- _Cohere_:
- Install requirements with `pip install cohere`
- Set the following environment variable: `COHERE_API_KEY`
- *HuggingFace Hub*
- Get a Cohere api key and either set it as an environment variable (`COHERE_API_KEY`) or pass it to the LLM constructor as `cohere_api_key`.
- _HuggingFace Hub_
- Install requirements with `pip install huggingface_hub`
- Set the following environment variable: `HUGGINGFACEHUB_API_TOKEN`
- *SerpAPI*:
- Get a HuggingFace Hub api token and either set it as an environment variable (`HUGGINGFACEHUB_API_TOKEN`) or pass it to the LLM constructor as `huggingfacehub_api_token`.
- _SerpAPI_:
- Install requirements with `pip install google-search-results`
- Set the following environment variable: `SERPAPI_API_KEY`
- *NatBot*:
- Get a SerpAPI api key and either set it as an environment variable (`SERPAPI_API_KEY`) or pass it to the LLM constructor as `serpapi_api_key`.
- _NatBot_:
- Install requirements with `pip install playwright`
- *Wikipedia*:
- _Wikipedia_:
- Install requirements with `pip install wikipedia`
## 🚀 What can I do with this

View File

@ -92,6 +92,7 @@ class SelfAskWithSearchChain(Chain, BaseModel):
input_key: str = "question" #: :meta private:
output_key: str = "answer" #: :meta private:
class Config:
"""Configuration for this pydantic object."""

View File

@ -4,7 +4,7 @@ Heavily borrowed from https://github.com/ofirpress/self-ask
"""
import os
import sys
from typing import Any, Dict, List
from typing import Any, Dict, List, Optional
from pydantic import BaseModel, Extra, root_validator
@ -29,7 +29,8 @@ class SerpAPIChain(Chain, BaseModel):
"""Chain that calls SerpAPI.
To use, you should have the ``google-search-results`` python package installed,
and the environment variable ``SERPAPI_API_KEY`` set with your API key.
and the environment variable ``SERPAPI_API_KEY`` set with your API key, or pass
`serpapi_api_key` as a named parameter to the constructor.
Example:
.. code-block:: python
@ -42,6 +43,8 @@ class SerpAPIChain(Chain, BaseModel):
input_key: str = "search_query" #: :meta private:
output_key: str = "search_result" #: :meta private:
serpapi_api_key: Optional[str] = os.environ.get("SERPAPI_API_KEY")
class Config:
"""Configuration for this pydantic object."""
@ -66,10 +69,13 @@ class SerpAPIChain(Chain, BaseModel):
@root_validator()
def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment."""
if "SERPAPI_API_KEY" not in os.environ:
serpapi_api_key = values.get("serpapi_api_key")
if serpapi_api_key is None or serpapi_api_key == "":
raise ValueError(
"Did not find SerpAPI API key, please add an environment variable"
" `SERPAPI_API_KEY` which contains it."
" `SERPAPI_API_KEY` which contains it, or pass `serpapi_api_key` as a named"
" parameter to the constructor."
)
try:
from serpapi import GoogleSearch
@ -84,7 +90,7 @@ class SerpAPIChain(Chain, BaseModel):
def _run(self, inputs: Dict[str, Any]) -> Dict[str, str]:
params = {
"api_key": os.environ["SERPAPI_API_KEY"],
"api_key": self.serpapi_api_key,
"engine": "google",
"q": inputs[self.input_key],
"google_domain": "google.com",

View File

@ -1,6 +1,6 @@
"""Wrapper around OpenAI embedding models."""
import os
from typing import Any, Dict, List
from typing import Any, Dict, List, Optional
from pydantic import BaseModel, Extra, root_validator
@ -11,19 +11,22 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
"""Wrapper around OpenAI embedding models.
To use, you should have the ``openai`` python package installed, and the
environment variable ``OPENAI_API_KEY`` set with your API key.
environment variable ``OPENAI_API_KEY`` set with your API key or pass it
as a named parameter to the constructor.
Example:
.. code-block:: python
from langchain.embeddings import OpenAIEmbeddings
openai = OpenAIEmbeddings(model_name="davinci")
openai = OpenAIEmbeddings(model_name="davinci", openai_api_key="my-api-key")
"""
client: Any #: :meta private:
model_name: str = "babbage"
"""Model name to use."""
openai_api_key: Optional[str] = os.environ.get("OPENAI_API_KEY")
class Config:
"""Configuration for this pydantic object."""
@ -32,14 +35,18 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
@root_validator()
def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment."""
if "OPENAI_API_KEY" not in os.environ:
openai_api_key = values.get("openai_api_key")
if openai_api_key is None or openai_api_key == "":
raise ValueError(
"Did not find OpenAI API key, please add an environment variable"
" `OPENAI_API_KEY` which contains it."
" `OPENAI_API_KEY` which contains it, or pass `openai_api_key` as a"
" named parameter."
)
try:
import openai
openai.api_key = openai_api_key
values["client"] = openai.Embedding
except ImportError:
raise ValueError(

View File

@ -12,13 +12,14 @@ class Cohere(BaseModel, LLM):
"""Wrapper around Cohere large language models.
To use, you should have the ``cohere`` python package installed, and the
environment variable ``COHERE_API_KEY`` set with your API key.
environment variable ``COHERE_API_KEY`` set with your API key, or pass
it as a named parameter to the constructor.
Example:
.. code-block:: python
from langchain import Cohere
cohere = Cohere(model="gptd-instruct-tft")
cohere = Cohere(model="gptd-instruct-tft", cohere_api_key="my-api-key")
"""
client: Any #: :meta private:
@ -43,6 +44,8 @@ class Cohere(BaseModel, LLM):
presence_penalty: int = 0
"""Penalizes repeated tokens."""
cohere_api_key: Optional[str] = os.environ.get("COHERE_API_KEY")
class Config:
"""Configuration for this pydantic object."""
@ -51,15 +54,18 @@ class Cohere(BaseModel, LLM):
@root_validator()
def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment."""
if "COHERE_API_KEY" not in os.environ:
cohere_api_key = values.get("cohere_api_key")
if cohere_api_key is None or cohere_api_key == "":
raise ValueError(
"Did not find Cohere API key, please add an environment variable"
" `COHERE_API_KEY` which contains it."
" `COHERE_API_KEY` which contains it, or pass `cohere_api_key`"
" as a named parameter."
)
try:
import cohere
values["client"] = cohere.Client(os.environ["COHERE_API_KEY"])
values["client"] = cohere.Client(cohere_api_key)
except ImportError:
raise ValueError(
"Could not import cohere python package. "

View File

@ -14,7 +14,8 @@ class HuggingFaceHub(BaseModel, LLM):
"""Wrapper around HuggingFaceHub models.
To use, you should have the ``huggingface_hub`` python package installed, and the
environment variable ``HUGGINGFACEHUB_API_TOKEN`` set with your API token.
environment variable ``HUGGINGFACEHUB_API_TOKEN`` set with your API token, or pass
it as a named parameter to the constructor.
Only supports task `text-generation` for now.
@ -22,7 +23,7 @@ class HuggingFaceHub(BaseModel, LLM):
.. code-block:: python
from langchain import HuggingFaceHub
hf = HuggingFaceHub(repo_id="gpt2")
hf = HuggingFaceHub(repo_id="gpt2", huggingfacehub_api_token="my-api-key")
"""
client: Any #: :meta private:
@ -37,6 +38,8 @@ class HuggingFaceHub(BaseModel, LLM):
num_return_sequences: int = 1
"""How many completions to generate for each prompt."""
huggingfacehub_api_token: Optional[str] = os.environ.get("HUGGINGFACEHUB_API_TOKEN")
class Config:
"""Configuration for this pydantic object."""
@ -45,10 +48,13 @@ class HuggingFaceHub(BaseModel, LLM):
@root_validator()
def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment."""
if "HUGGINGFACEHUB_API_TOKEN" not in os.environ:
huggingfacehub_api_token = values.get("huggingfacehub_api_token")
if huggingfacehub_api_token is None or huggingfacehub_api_token == "":
raise ValueError(
"Did not find HuggingFace API token, please add an environment variable"
" `HUGGINGFACEHUB_API_TOKEN` which contains it."
" `HUGGINGFACEHUB_API_TOKEN` which contains it, or pass"
" `huggingfacehub_api_token` as a named parameter."
)
try:
from huggingface_hub.inference_api import InferenceApi
@ -56,7 +62,7 @@ class HuggingFaceHub(BaseModel, LLM):
repo_id = values.get("repo_id", DEFAULT_REPO_ID)
values["client"] = InferenceApi(
repo_id=repo_id,
token=os.environ["HUGGINGFACEHUB_API_TOKEN"],
token=huggingfacehub_api_token,
task="text-generation",
)
except ImportError:

View File

@ -38,6 +38,8 @@ class OpenAI(BaseModel, LLM):
best_of: int = 1
"""Generates best_of completions server-side and returns the "best"."""
openai_api_key: Optional[str] = os.environ.get("OPENAI_API_KEY")
class Config:
"""Configuration for this pydantic object."""
@ -46,14 +48,18 @@ class OpenAI(BaseModel, LLM):
@root_validator()
def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment."""
if "OPENAI_API_KEY" not in os.environ:
openai_api_key = values.get("openai_api_key")
if openai_api_key is None or openai_api_key == "":
raise ValueError(
"Did not find OpenAI API key, please add an environment variable"
" `OPENAI_API_KEY` which contains it."
" `OPENAI_API_KEY` which contains it, or pass `openai_api_key`"
" as a named parameter."
)
try:
import openai
openai.api_key = openai_api_key
values["client"] = openai.Completion
except ImportError:
raise ValueError(