From 3ce0e8a292ace1dfac7f638fa3ed72df5af6759a Mon Sep 17 00:00:00 2001 From: Gustav von Zitzewitz Date: Mon, 15 May 2023 13:48:56 +0200 Subject: [PATCH] allow openai api key to be stored in streamlit secrets as well --- .gitignore | 1 - README.md | 2 +- app.py | 2 +- constants.py | 5 ----- utils.py | 12 ++++++++++-- 5 files changed, 12 insertions(+), 10 deletions(-) diff --git a/.gitignore b/.gitignore index 4f00b71..e9f52c3 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,3 @@ data __pycache__ -.streamlit/secrets.toml .env \ No newline at end of file diff --git a/README.md b/README.md index 1f62845..4a1e485 100644 --- a/README.md +++ b/README.md @@ -15,6 +15,6 @@ This is an app that let's you ask questions about any data source by leveraging ## Good to know - As default context this git repository is taken so you can directly start asking question about its functionality without chosing an own data source. -- To run locally or deploy somewhere, execute `cp .env.template .env` and set necessary keys in the newly created secrets file. Other options are manually setting of environment variables, or creating a `.streamlit/secrets.toml` file and storing credentials there. +- To run locally or deploy somewhere, execute `cp .env.template .env` and set credentials in the newly created .env file. Other options are manually setting of system environment variables, or storing them into `.streamlit/secrets.toml`. - Your data won't load? Feel free to open an Issue or PR and contribute! - Yes, Chad in `DataChad` refers to the well-known [meme](https://www.google.com/search?q=chad+meme) diff --git a/app.py b/app.py index 4674628..356549a 100644 --- a/app.py +++ b/app.py @@ -21,7 +21,7 @@ from utils import ( ) # Page options and header -st.set_option("client.showErrorDetails", True) +st.set_option("client.showErrorDetails", False) st.set_page_config( page_title=APP_NAME, page_icon=PAGE_ICON, initial_sidebar_state="expanded" ) diff --git a/constants.py b/constants.py index b4c84dd..e3d0c88 100644 --- a/constants.py +++ b/constants.py @@ -1,10 +1,5 @@ from pathlib import Path -from dotenv import load_dotenv - -# loads environment variables -load_dotenv() - APP_NAME = "DataChad" MODEL = "gpt-3.5-turbo" PAGE_ICON = "🤖" diff --git a/utils.py b/utils.py index 33228e1..b2b8ff2 100644 --- a/utils.py +++ b/utils.py @@ -7,6 +7,7 @@ import sys import deeplake import openai import streamlit as st +from dotenv import load_dotenv from langchain.callbacks import get_openai_callback from langchain.chains import ConversationalRetrievalChain from langchain.chat_models import ChatOpenAI @@ -30,6 +31,9 @@ from langchain.vectorstores import DeepLake from constants import APP_NAME, DATA_PATH, MODEL, PAGE_ICON +# loads environment variables +load_dotenv() + logger = logging.getLogger(APP_NAME) @@ -55,7 +59,11 @@ configure_logger(0) def authenticate(openai_api_key, activeloop_token, activeloop_org_name): # Validate all credentials are set and correct # Check for env variables to enable local dev and deployments with shared credentials - openai_api_key = openai_api_key or os.environ.get("OPENAI_API_KEY") + openai_api_key = ( + openai_api_key + or os.environ.get("OPENAI_API_KEY") + or st.secrets.get("OPENAI_API_KEY") + ) activeloop_token = ( activeloop_token or os.environ.get("ACTIVELOOP_TOKEN") @@ -238,7 +246,7 @@ def get_chain(data_source): vector_store = setup_vector_store(data_source) retriever = vector_store.as_retriever() # Search params "fetch_k" and "k" define how many documents are pulled from the hub - # and selected after the document matching to build the context + # and selected after the document matching to build the context # that is fed to the model together with your prompt search_kwargs = { "maximal_marginal_relevance": True,