diff --git a/application/api/answer/routes.py b/application/api/answer/routes.py index a932725..fc6f969 100644 --- a/application/api/answer/routes.py +++ b/application/api/answer/routes.py @@ -36,21 +36,18 @@ else: # load the prompts current_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) -with open(os.path.join(current_dir, "prompts", "combine_prompt.txt"), "r") as f: - template = f.read() - -with open(os.path.join(current_dir, "prompts", "combine_prompt_hist.txt"), "r") as f: - template_hist = f.read() - -with open(os.path.join(current_dir, "prompts", "question_prompt.txt"), "r") as f: - template_quest = f.read() - -with open(os.path.join(current_dir, "prompts", "chat_combine_prompt.txt"), "r") as f: +with open(os.path.join(current_dir, "prompts", "chat_combine_default.txt"), "r") as f: chat_combine_template = f.read() with open(os.path.join(current_dir, "prompts", "chat_reduce_prompt.txt"), "r") as f: chat_reduce_template = f.read() +with open(os.path.join(current_dir, "prompts", "chat_combine_creative.txt"), "r") as f: + chat_reduce_creative = f.read() + +with open(os.path.join(current_dir, "prompts", "chat_combine_strict.txt"), "r") as f: + chat_reduce_strict = f.read() + api_key_set = settings.API_KEY is not None embeddings_key_set = settings.EMBEDDINGS_KEY is not None @@ -115,8 +112,17 @@ def is_azure_configured(): return settings.OPENAI_API_BASE and settings.OPENAI_API_VERSION and settings.AZURE_DEPLOYMENT_NAME -def complete_stream(question, docsearch, chat_history, api_key, conversation_id): +def complete_stream(question, docsearch, chat_history, api_key, prompt_id, conversation_id): llm = LLMCreator.create_llm(settings.LLM_NAME, api_key=api_key) + + if prompt_id == 'default': + prompt = chat_reduce_template + elif prompt_id == 'creative': + prompt = chat_reduce_creative + elif prompt_id == 'strict': + prompt = chat_reduce_strict + else: + prompt = chat_reduce_template docs = docsearch.search(question, k=2) @@ -124,7 +130,7 @@ def complete_stream(question, docsearch, chat_history, api_key, conversation_id) docs = [docs[0]] # join all page_content together with a newline docs_together = "\n".join([doc.page_content for doc in docs]) - p_chat_combine = chat_combine_template.replace("{summaries}", docs_together) + p_chat_combine = prompt.replace("{summaries}", docs_together) messages_combine = [{"role": "system", "content": p_chat_combine}] source_log_docs = [] for doc in docs: @@ -201,6 +207,10 @@ def stream(): # history to json object from string history = json.loads(history) conversation_id = data["conversation_id"] + if 'prompt_id' in data: + prompt_id = data["prompt_id"] + else: + prompt_id = 'default' # check if active_docs is set @@ -221,6 +231,7 @@ def stream(): return Response( complete_stream(question, docsearch, chat_history=history, api_key=api_key, + prompt_id=prompt_id, conversation_id=conversation_id), mimetype="text/event-stream" ) diff --git a/application/api/user/routes.py b/application/api/user/routes.py index e62f52f..4dcbfd4 100644 --- a/application/api/user/routes.py +++ b/application/api/user/routes.py @@ -16,6 +16,7 @@ mongo = MongoClient(settings.MONGO_URI) db = mongo["docsgpt"] conversations_collection = db["conversations"] vectors_collection = db["vectors"] +prompts_collection = db["prompts"] user = Blueprint('user', __name__) current_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) @@ -188,7 +189,7 @@ def combined_json(): "date": "default", "docLink": "default", "model": settings.EMBEDDINGS_NAME, - "location": "local", + "location": "remote", } ] # structure: name, language, version, description, fullName, date, docLink @@ -245,6 +246,59 @@ def check_docs(): return {"status": "loaded"} +@user.route("/api/create_prompt", methods=["POST"]) +def create_prompt(): + data = request.get_json() + prompt = data["prompt"] + name = data["name"] + user = "local" + # write to mongodb + prompts_collection.insert_one( + { + "name": name, + "prompt": prompt, + "user": user, + } + ) + return {"status": "ok"} + +@user.route("/api/get_prompts", methods=["GET"]) +def get_prompts(): + user = "local" + prompts = prompts_collection.find({"user": user}) + list_prompts = [] + list_prompts.append({"id": "default", "name": "default", "type": "public"}) + list_prompts.append({"id": "creative", "name": "creative", "type": "public"}) + list_prompts.append({"id": "precise", "name": "precise", "type": "public"}) + for prompt in prompts: + list_prompts.append({"id": str(prompt["_id"]), "name": prompt["name"], type: "private"}) + + return jsonify(list_prompts) + +@user.route("/api/get_single_prompt", methods=["GET"]) +def get_single_prompt(): + prompt_id = request.args.get("id") + prompt = prompts_collection.find_one({"_id": ObjectId(prompt_id)}) + return jsonify(prompt['prompt']) + +@user.route("/api/delete_prompt", methods=["POST"]) +def delete_prompt(): + prompt_id = request.args.get("id") + prompts_collection.delete_one( + { + "_id": ObjectId(prompt_id), + } + ) + return {"status": "ok"} + +@user.route("/api/update_prompt_name", methods=["POST"]) +def update_prompt_name(): + data = request.get_json() + id = data["id"] + name = data["name"] + prompts_collection.update_one({"_id": ObjectId(id)},{"$set":{"name":name}}) + return {"status": "ok"} + diff --git a/application/prompts/chat_combine_prompt.txt b/application/prompts/chat_combine_creative.txt similarity index 100% rename from application/prompts/chat_combine_prompt.txt rename to application/prompts/chat_combine_creative.txt diff --git a/application/prompts/chat_combine_default.txt b/application/prompts/chat_combine_default.txt new file mode 100644 index 0000000..fdf0b2c --- /dev/null +++ b/application/prompts/chat_combine_default.txt @@ -0,0 +1,9 @@ +You are a helpful AI assistant, DocsGPT, specializing in document assistance, designed to offer detailed and informative responses. +If appropriate, your answers can include code examples, formatted as follows: +```(language) +(code) +``` +You effectively utilize chat history, ensuring relevant and tailored responses. +If a question doesn't align with your context, you provide friendly and helpful replies. +---------------- +{summaries} \ No newline at end of file diff --git a/application/prompts/chat_combine_strict.txt b/application/prompts/chat_combine_strict.txt new file mode 100644 index 0000000..b6de463 --- /dev/null +++ b/application/prompts/chat_combine_strict.txt @@ -0,0 +1,13 @@ +You are an AI Assistant, DocsGPT, adept at offering document assistance. +Your expertise lies in providing answer on top of provided context. +You can leverage the chat history if needed. +Answer the question based on the context below. +Keep the answer concise. Respond "Irrelevant context" if not sure about the answer. +If question is not related to the context, respond "Irrelevant context". +When using code examples, use the following format: +```(language) +(code) +``` + ---------------- + Context: + {summaries} \ No newline at end of file diff --git a/application/prompts/combine_prompt.txt b/application/prompts/combine_prompt.txt deleted file mode 100644 index a008da3..0000000 --- a/application/prompts/combine_prompt.txt +++ /dev/null @@ -1,25 +0,0 @@ -You are a DocsGPT, friendly and helpful AI assistant by Arc53 that provides help with documents. You give thorough answers with code examples if possible. - -QUESTION: How to merge tables in pandas? -========= -Content: pandas provides various facilities for easily combining together Series or DataFrame with various kinds of set logic for the indexes and relational algebra functionality in the case of join / merge-type operations. -Source: 28-pl -Content: pandas provides a single function, merge(), as the entry point for all standard database join operations between DataFrame or named Series objects: \n\npandas.merge(left, right, how='inner', on=None, left_on=None, right_on=None, left_index=False, right_index=False, sort=False, suffixes=('_x', '_y'), copy=True, indicator=False, validate=None) -Source: 30-pl -========= -FINAL ANSWER: To merge two tables in pandas, you can use the pd.merge() function. The basic syntax is: \n\npd.merge(left, right, on, how) \n\nwhere left and right are the two tables to merge, on is the column to merge on, and how is the type of merge to perform. \n\nFor example, to merge the two tables df1 and df2 on the column 'id', you can use: \n\npd.merge(df1, df2, on='id', how='inner') -SOURCES: 28-pl 30-pl - -QUESTION: How are you? -========= -CONTENT: -SOURCE: -========= -FINAL ANSWER: I am fine, thank you. How are you? -SOURCES: - -QUESTION: {{ question }} -========= -{{ summaries }} -========= -FINAL ANSWER: \ No newline at end of file diff --git a/application/prompts/combine_prompt_hist.txt b/application/prompts/combine_prompt_hist.txt deleted file mode 100644 index 509a4a0..0000000 --- a/application/prompts/combine_prompt_hist.txt +++ /dev/null @@ -1,33 +0,0 @@ -You are a DocsGPT, friendly and helpful AI assistant by Arc53 that provides help with documents. You give thorough answers with code examples if possible. - -QUESTION: How to merge tables in pandas? -========= -Content: pandas provides various facilities for easily combining together Series or DataFrame with various kinds of set logic for the indexes and relational algebra functionality in the case of join / merge-type operations. -Source: 28-pl -Content: pandas provides a single function, merge(), as the entry point for all standard database join operations between DataFrame or named Series objects: \n\npandas.merge(left, right, how='inner', on=None, left_on=None, right_on=None, left_index=False, right_index=False, sort=False, suffixes=('_x', '_y'), copy=True, indicator=False, validate=None) -Source: 30-pl -========= -FINAL ANSWER: To merge two tables in pandas, you can use the pd.merge() function. The basic syntax is: \n\npd.merge(left, right, on, how) \n\nwhere left and right are the two tables to merge, on is the column to merge on, and how is the type of merge to perform. \n\nFor example, to merge the two tables df1 and df2 on the column 'id', you can use: \n\npd.merge(df1, df2, on='id', how='inner') -SOURCES: 28-pl 30-pl - -QUESTION: How are you? -========= -CONTENT: -SOURCE: -========= -FINAL ANSWER: I am fine, thank you. How are you? -SOURCES: - -QUESTION: {{ historyquestion }} -========= -CONTENT: -SOURCE: -========= -FINAL ANSWER: {{ historyanswer }} -SOURCES: - -QUESTION: {{ question }} -========= -{{ summaries }} -========= -FINAL ANSWER: \ No newline at end of file diff --git a/application/prompts/question_prompt.txt b/application/prompts/question_prompt.txt deleted file mode 100644 index 0571b22..0000000 --- a/application/prompts/question_prompt.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the following portion of a long document to see if any of the text is relevant to answer the question. -{{ context }} -Question: {{ question }} -Provide all relevant text to the question verbatim. Summarize if needed. If nothing relevant return "-". \ No newline at end of file diff --git a/frontend/src/Navigation.tsx b/frontend/src/Navigation.tsx index 336ec95..7133dd6 100644 --- a/frontend/src/Navigation.tsx +++ b/frontend/src/Navigation.tsx @@ -6,7 +6,7 @@ import Documentation from './assets/documentation.svg'; import Discord from './assets/discord.svg'; import Arrow2 from './assets/dropdown-arrow.svg'; import Expand from './assets/expand.svg'; -import Exit from './assets/exit.svg'; +import Trash from './assets/trash.svg'; import Github from './assets/github.svg'; import Hamburger from './assets/hamburger.svg'; import Info from './assets/info.svg'; @@ -298,9 +298,9 @@ export default function Navigation({ navOpen, setNavOpen }: NavigationProps) {

{doc.location === 'local' && ( Exit { event.stopPropagation(); diff --git a/frontend/src/Setting.tsx b/frontend/src/Setting.tsx index f68df65..5f652a8 100644 --- a/frontend/src/Setting.tsx +++ b/frontend/src/Setting.tsx @@ -1,70 +1,112 @@ -import React, { useState } from 'react'; +import React, { useState, useEffect } from 'react'; +import { useSelector, useDispatch } from 'react-redux'; import Arrow2 from './assets/dropdown-arrow.svg'; import ArrowLeft from './assets/arrow-left.svg'; import ArrowRight from './assets/arrow-right.svg'; +import Trash from './assets/trash.svg'; +import { + selectPrompt, + setPrompt, + selectSourceDocs, +} from './preferences/preferenceSlice'; +import { Doc } from './preferences/preferenceApi'; type PromptProps = { - prompts: string[]; - selectedPrompt: string; - onSelectPrompt: (prompt: string) => void; + prompts: { name: string; id: string; type: string }[]; + selectedPrompt: { name: string; id: string }; + onSelectPrompt: (name: string, id: string) => void; onAddPrompt: (name: string) => void; newPromptName: string; onNewPromptNameChange: (name: string) => void; isAddPromptModalOpen: boolean; onToggleAddPromptModal: () => void; - onDeletePrompt: (name: string) => void; + onDeletePrompt: (name: string, id: string) => void; }; const Setting: React.FC = () => { - const tabs = ['General', 'Prompts', 'Documents', 'Widgets']; + const tabs = ['General', 'Prompts', 'Documents']; + //const tabs = ['General', 'Prompts', 'Documents', 'Widgets']; + const [activeTab, setActiveTab] = useState('General'); - const [prompts, setPrompts] = useState(['Prompt 1', 'Prompt 2']); - const [selectedPrompt, setSelectedPrompt] = useState(''); + const [prompts, setPrompts] = useState< + { name: string; id: string; type: string }[] + >([]); + const selectedPrompt = useSelector(selectPrompt); const [newPromptName, setNewPromptName] = useState(''); const [isAddPromptModalOpen, setAddPromptModalOpen] = useState(false); - const [documents, setDocuments] = useState([]); + const documents = useSelector(selectSourceDocs); const [isAddDocumentModalOpen, setAddDocumentModalOpen] = useState(false); const [newDocument, setNewDocument] = useState({ name: '', vectorDate: '', vectorLocation: '', }); - const onDeletePrompt = (name: string) => { - setPrompts(prompts.filter((prompt) => prompt !== name)); - setSelectedPrompt(''); // Clear the selected prompt - }; + const dispatch = useDispatch(); + + const apiHost = import.meta.env.VITE_API_HOST || 'https://docsapi.arc53.com'; const [widgetScreenshot, setWidgetScreenshot] = useState(null); const updateWidgetScreenshot = (screenshot: File | null) => { setWidgetScreenshot(screenshot); }; - // Function to add a new document - const addDocument = () => { - if ( - newDocument.name && - newDocument.vectorDate && - newDocument.vectorLocation - ) { - setDocuments([...documents, newDocument]); - setNewDocument({ - name: '', - vectorDate: '', - vectorLocation: '', - }); - toggleAddDocumentModal(); - } - }; - // Function to toggle the Add Document modal const toggleAddDocumentModal = () => { setAddDocumentModalOpen(!isAddDocumentModalOpen); }; + useEffect(() => { + const fetchPrompts = async () => { + try { + const response = await fetch(`${apiHost}/api/get_prompts`); + if (!response.ok) { + throw new Error('Failed to fetch prompts'); + } + const promptsData = await response.json(); + setPrompts(promptsData); + } catch (error) { + console.error(error); + } + }; + + fetchPrompts(); + }, []); + + const onDeletePrompt = (name: string, id: string) => { + setPrompts(prompts.filter((prompt) => prompt.id !== id)); + + fetch(`${apiHost}/api/delete_prompt`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + // send id in body only + body: JSON.stringify({ id: id }), + }) + .then((response) => { + if (!response.ok) { + throw new Error('Failed to delete prompt'); + } + }) + .catch((error) => { + console.error(error); + }); + }; - const handleDeleteDocument = (index: number) => { - const updatedDocuments = [...documents]; - updatedDocuments.splice(index, 1); - setDocuments(updatedDocuments); + const handleDeleteClick = (index: number, doc: Doc) => { + const docPath = 'indexes/' + 'local' + '/' + doc.name; + + fetch(`${apiHost}/api/delete_old?path=${docPath}`, { + method: 'GET', + }) + .then(() => { + // remove the image element from the DOM + const imageElement = document.querySelector( + `#img-${index}`, + ) as HTMLElement; + const parentElement = imageElement.parentNode as HTMLElement; + parentElement.parentNode?.removeChild(parentElement); + }) + .catch((error) => console.error(error)); }; return ( @@ -105,15 +147,6 @@ const Setting: React.FC = () => { {renderActiveTab()} - {isAddDocumentModalOpen && ( - - )} - {/* {activeTab === 'Widgets' && ( { + dispatch(setPrompt({ name: name, id: id })) + } onAddPrompt={addPrompt} newPromptName={''} onNewPromptNameChange={function (name: string): void { @@ -156,10 +191,7 @@ const Setting: React.FC = () => { return ( ); case 'Widgets': @@ -176,7 +208,6 @@ const Setting: React.FC = () => { function addPrompt(name: string) { if (name) { - setPrompts([...prompts, name]); setNewPromptName(''); toggleAddPromptModal(); } @@ -188,8 +219,8 @@ const Setting: React.FC = () => { }; const General: React.FC = () => { - const themes = ['Light', 'Dark']; - const languages = ['English', 'French', 'Hindi']; + const themes = ['Light']; + const languages = ['English']; const [selectedTheme, setSelectedTheme] = useState(themes[0]); const [selectedLanguage, setSelectedLanguage] = useState(languages[0]); @@ -235,30 +266,40 @@ const Prompts: React.FC = ({ setAddPromptModalOpen(false); }; - const handleDeletePrompt = () => { - if (selectedPrompt) { - onDeletePrompt(selectedPrompt); // Remove the selected prompt + const handleSelectPrompt = (name: string) => { + const selected = prompts.find((prompt) => prompt.name === name); + if (selected) { + onSelectPrompt(selected.name, selected.id); + } + }; + + const handleDeletePrompt = (name: string) => { + const selected = prompts.find((prompt) => prompt.name === name); + if (selected) { + onDeletePrompt(selected.name, selected.id); } }; return (
-

Select Prompt

- Active Prompt

+
-
+ {/*
-
+
*/} {isAddPromptModalOpen && ( = ({ onClose={closeAddPromptModal} /> )} -
- -
); }; +function DropdownPrompt({ + options, + selectedValue, + onSelect, + showDelete, + onDelete, +}: { + options: { name: string; id: string; type: string }[]; + selectedValue: string; + onSelect: (value: string) => void; + showDelete?: boolean; + onDelete: (value: string) => void; +}) { + const [isOpen, setIsOpen] = useState(false); + + return ( +
+ + {isOpen && ( +
+ {options.map((option, index) => ( +
+ { + onSelect(option.name); + setIsOpen(false); + }} + className="ml-2 flex-1 overflow-hidden overflow-ellipsis whitespace-nowrap py-3" + > + {option.name} + + {showDelete && option.type === 'private' && ( + + )} +
+ ))} +
+ )} +
+ ); +} + function Dropdown({ options, selectedValue, onSelect, + showDelete, + onDelete, }: { options: string[]; selectedValue: string; onSelect: (value: string) => void; + showDelete?: boolean; // optional + onDelete?: (value: string) => void; // optional }) { const [isOpen, setIsOpen] = useState(false); return ( -
+
{isOpen && ( -
+
{options.map((option, index) => (
{ - onSelect(option); - setIsOpen(false); - }} - className="flex cursor-pointer items-center justify-between border-b-2 py-3 hover:bg-gray-100" + className="flex cursor-pointer items-center justify-between hover:bg-gray-100" > - + { + onSelect(option); + setIsOpen(false); + }} + className="ml-2 flex-1 overflow-hidden overflow-ellipsis whitespace-nowrap py-3" + > {option} + {showDelete && onDelete && ( + + )}
))}
@@ -347,24 +454,24 @@ const AddPromptModal: React.FC = ({ }) => { return (
-
+

Add New Prompt

onNewPromptNameChange(e.target.value)} - className="mb-4 w-full rounded-lg border-2 p-2" + className="mb-4 w-full rounded-3xl border-2 p-2" /> @@ -374,19 +481,13 @@ const AddPromptModal: React.FC = ({ }; type DocumentsProps = { - documents: Document[]; - isAddDocumentModalOpen: boolean; - newDocument: Document; - handleDeleteDocument: (index: number) => void; - toggleAddDocumentModal: () => void; + documents: Doc[] | null; + handleDeleteDocument: (index: number, document: Doc) => void; }; const Documents: React.FC = ({ documents, - isAddDocumentModalOpen, - newDocument, handleDeleteDocument, - toggleAddDocumentModal, }) => { return (
@@ -394,42 +495,51 @@ const Documents: React.FC = ({ {/*

Documents

*/}
- +
- - + + - {documents.map((document, index) => ( - - - - - - - ))} + {documents && + documents.map((document, index) => ( + + + + + + + ))}
Document Name Vector DateVector LocationActionsType
{document.name}{document.vectorDate} - {document.vectorLocation} - - -
{document.name}{document.date} + {document.location === 'remote' + ? 'Pre-loaded' + : 'Private'} + + {document.location !== 'remote' && ( + Delete { + event.stopPropagation(); + handleDeleteDocument(index, document); + }} + /> + )} +
- + */}
{/* {isAddDocumentModalOpen && ( diff --git a/frontend/src/preferences/preferenceApi.ts b/frontend/src/preferences/preferenceApi.ts index 091eef4..aac06a0 100644 --- a/frontend/src/preferences/preferenceApi.ts +++ b/frontend/src/preferences/preferenceApi.ts @@ -6,7 +6,7 @@ export type Doc = { version: string; description: string; fullName: string; - dat: string; + date: string; docLink: string; model: string; }; diff --git a/frontend/src/preferences/preferenceSlice.ts b/frontend/src/preferences/preferenceSlice.ts index 263a6df..8b4f231 100644 --- a/frontend/src/preferences/preferenceSlice.ts +++ b/frontend/src/preferences/preferenceSlice.ts @@ -8,6 +8,7 @@ import { RootState } from '../store'; interface Preference { apiKey: string; + prompt: { name: string; id: string }; selectedDocs: Doc | null; sourceDocs: Doc[] | null; conversations: { name: string; id: string }[] | null; @@ -15,6 +16,7 @@ interface Preference { const initialState: Preference = { apiKey: 'xxx', + prompt: { name: 'default', id: 'default' }, selectedDocs: { name: 'default', language: 'default', @@ -22,7 +24,7 @@ const initialState: Preference = { version: 'default', description: 'default', fullName: 'default', - dat: 'default', + date: 'default', docLink: 'default', model: 'openai_text-embedding-ada-002', } as Doc, @@ -46,11 +48,19 @@ export const prefSlice = createSlice({ setConversations: (state, action) => { state.conversations = action.payload; }, + setPrompt: (state, action) => { + state.prompt = action.payload; + }, }, }); -export const { setApiKey, setSelectedDocs, setSourceDocs, setConversations } = - prefSlice.actions; +export const { + setApiKey, + setSelectedDocs, + setSourceDocs, + setConversations, + setPrompt, +} = prefSlice.actions; export default prefSlice.reducer; export const prefListenerMiddleware = createListenerMiddleware(); @@ -84,3 +94,4 @@ export const selectConversations = (state: RootState) => state.preference.conversations; export const selectConversationId = (state: RootState) => state.conversation.conversationId; +export const selectPrompt = (state: RootState) => state.preference.prompt; diff --git a/frontend/src/store.ts b/frontend/src/store.ts index c228964..81e7fa5 100644 --- a/frontend/src/store.ts +++ b/frontend/src/store.ts @@ -13,6 +13,7 @@ const store = configureStore({ preference: { apiKey: key ?? '', selectedDocs: doc !== null ? JSON.parse(doc) : null, + prompt: { name: 'default', id: 'default' }, conversations: null, sourceDocs: [ { @@ -20,7 +21,7 @@ const store = configureStore({ language: '', name: 'default', version: '', - dat: '', + date: '', description: '', docLink: '', fullName: '',