Merge pull request #290 from arc53/feature/hisotry

history
pull/293/head
Pavel 11 months ago committed by GitHub
commit 1687e6682a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -33,12 +33,14 @@ from langchain.prompts.chat import (
HumanMessagePromptTemplate, HumanMessagePromptTemplate,
AIMessagePromptTemplate, AIMessagePromptTemplate,
) )
from langchain.schema import HumanMessage, AIMessage
from pymongo import MongoClient from pymongo import MongoClient
from werkzeug.utils import secure_filename from werkzeug.utils import secure_filename
from core.settings import settings from core.settings import settings
from error import bad_request from error import bad_request
from worker import ingest_worker from worker import ingest_worker
from bson.objectid import ObjectId
# os.environ["LANGCHAIN_HANDLER"] = "langchain" # os.environ["LANGCHAIN_HANDLER"] = "langchain"
@ -94,6 +96,7 @@ celery.config_from_object("celeryconfig")
mongo = MongoClient(app.config["MONGO_URI"]) mongo = MongoClient(app.config["MONGO_URI"])
db = mongo["docsgpt"] db = mongo["docsgpt"]
vectors_collection = db["vectors"] vectors_collection = db["vectors"]
conversations_collection = db["conversations"]
async def async_generate(chain, question, chat_history): async def async_generate(chain, question, chat_history):
@ -159,7 +162,7 @@ def home():
) )
def complete_stream(question, docsearch, chat_history, api_key): def complete_stream(question, docsearch, chat_history, api_key, conversation_id):
openai.api_key = api_key openai.api_key = api_key
if is_azure_configured(): if is_azure_configured():
logger.debug("in Azure") logger.debug("in Azure")
@ -180,11 +183,14 @@ def complete_stream(question, docsearch, chat_history, api_key):
docs_together = "\n".join([doc.page_content for doc in docs]) docs_together = "\n".join([doc.page_content for doc in docs])
p_chat_combine = chat_combine_template.replace("{summaries}", docs_together) p_chat_combine = chat_combine_template.replace("{summaries}", docs_together)
messages_combine = [{"role": "system", "content": p_chat_combine}] messages_combine = [{"role": "system", "content": p_chat_combine}]
source_log_docs = []
for doc in docs: for doc in docs:
if doc.metadata: if doc.metadata:
data = json.dumps({"type": "source", "doc": doc.page_content, "metadata": doc.metadata}) data = json.dumps({"type": "source", "doc": doc.page_content, "metadata": doc.metadata})
source_log_docs.append({"title": doc.metadata['title'].split('/')[-1], "text": doc.page_content})
else: else:
data = json.dumps({"type": "source", "doc": doc.page_content}) data = json.dumps({"type": "source", "doc": doc.page_content})
source_log_docs.append({"title": doc.page_content, "text": doc.page_content})
yield f"data:{data}\n\n" yield f"data:{data}\n\n"
if len(chat_history) > 1: if len(chat_history) > 1:
@ -201,13 +207,43 @@ def complete_stream(question, docsearch, chat_history, api_key):
messages_combine.append({"role": "user", "content": question}) messages_combine.append({"role": "user", "content": question})
completion = openai.ChatCompletion.create(model=gpt_model, engine=settings.AZURE_DEPLOYMENT_NAME, completion = openai.ChatCompletion.create(model=gpt_model, engine=settings.AZURE_DEPLOYMENT_NAME,
messages=messages_combine, stream=True, max_tokens=500, temperature=0) messages=messages_combine, stream=True, max_tokens=500, temperature=0)
reponse_full = ""
for line in completion: for line in completion:
if "content" in line["choices"][0]["delta"]: if "content" in line["choices"][0]["delta"]:
# check if the delta contains content # check if the delta contains content
data = json.dumps({"answer": str(line["choices"][0]["delta"]["content"])}) data = json.dumps({"answer": str(line["choices"][0]["delta"]["content"])})
reponse_full += str(line["choices"][0]["delta"]["content"])
yield f"data: {data}\n\n" yield f"data: {data}\n\n"
# save conversation to database
if conversation_id is not None:
conversations_collection.update_one(
{"_id": ObjectId(conversation_id)},
{"$push": {"queries": {"prompt": question, "response": reponse_full, "sources": source_log_docs}}},
)
else:
# create new conversation
# generate summary
messages_summary = [{"role": "assistant", "content": "Summarise following conversation in no more than 3 "
"words, respond ONLY with the summary, use the same "
"language as the system \n\nUser: " + question + "\n\n" +
"AI: " +
reponse_full},
{"role": "user", "content": "Summarise following conversation in no more than 3 words, "
"respond ONLY with the summary, use the same language as the "
"system"}]
completion = openai.ChatCompletion.create(model='gpt-3.5-turbo', engine=settings.AZURE_DEPLOYMENT_NAME,
messages=messages_summary, max_tokens=30, temperature=0)
conversation_id = conversations_collection.insert_one(
{"user": "local",
"date": datetime.datetime.utcnow(),
"name": completion["choices"][0]["message"]["content"],
"queries": [{"prompt": question, "response": reponse_full, "sources": source_log_docs}]}
).inserted_id
# send data.type = "end" to indicate that the stream has ended as json # send data.type = "end" to indicate that the stream has ended as json
data = json.dumps({"type": "id", "id": str(conversation_id)})
yield f"data: {data}\n\n"
data = json.dumps({"type": "end"}) data = json.dumps({"type": "end"})
yield f"data: {data}\n\n" yield f"data: {data}\n\n"
@ -220,6 +256,7 @@ def stream():
history = data["history"] history = data["history"]
# history to json object from string # history to json object from string
history = json.loads(history) history = json.loads(history)
conversation_id = data["conversation_id"]
# check if active_docs is set # check if active_docs is set
@ -239,7 +276,9 @@ def stream():
# question = "Hi" # question = "Hi"
return Response( return Response(
complete_stream(question, docsearch, chat_history=history, api_key=api_key), mimetype="text/event-stream" complete_stream(question, docsearch,
chat_history=history, api_key=api_key,
conversation_id=conversation_id), mimetype="text/event-stream"
) )
@ -252,6 +291,10 @@ def api_answer():
data = request.get_json() data = request.get_json()
question = data["question"] question = data["question"]
history = data["history"] history = data["history"]
if "conversation_id" not in data:
conversation_id = None
else:
conversation_id = data["conversation_id"]
print("-" * 5) print("-" * 5)
if not api_key_set: if not api_key_set:
api_key = data["api_key"] api_key = data["api_key"]
@ -364,6 +407,38 @@ def api_answer():
sources_doc.append({'title': doc.page_content, 'text': doc.page_content}) sources_doc.append({'title': doc.page_content, 'text': doc.page_content})
result['sources'] = sources_doc result['sources'] = sources_doc
# generate conversationId
if conversation_id is not None:
conversations_collection.update_one(
{"_id": ObjectId(conversation_id)},
{"$push": {"queries": {"prompt": question,
"response": result["answer"], "sources": result['sources']}}},
)
else:
# create new conversation
# generate summary
messages_summary = [AIMessage(content="Summarise following conversation in no more than 3 " +
"words, respond ONLY with the summary, use the same " +
"language as the system \n\nUser: " + question + "\n\nAI: " +
result["answer"]),
HumanMessage(content="Summarise following conversation in no more than 3 words, " +
"respond ONLY with the summary, use the same language as the " +
"system")]
# completion = openai.ChatCompletion.create(model='gpt-3.5-turbo', engine=settings.AZURE_DEPLOYMENT_NAME,
# messages=messages_summary, max_tokens=30, temperature=0)
completion = llm.predict_messages(messages_summary)
conversation_id = conversations_collection.insert_one(
{"user": "local",
"date": datetime.datetime.utcnow(),
"name": completion.content,
"queries": [{"prompt": question, "response": result["answer"], "sources": result['sources']}]}
).inserted_id
result["conversation_id"] = str(conversation_id)
# mock result # mock result
# result = { # result = {
# "answer": "The answer is 42", # "answer": "The answer is 42",
@ -591,6 +666,39 @@ def delete_old():
return {"status": "ok"} return {"status": "ok"}
@app.route("/api/get_conversations", methods=["get"])
def get_conversations():
# provides a list of conversations
conversations = conversations_collection.find().sort("date", -1)
list_conversations = []
for conversation in conversations:
list_conversations.append({"id": str(conversation["_id"]), "name": conversation["name"]})
#list_conversations = [{"id": "default", "name": "default"}, {"id": "jeff", "name": "jeff"}]
return jsonify(list_conversations)
@app.route("/api/get_single_conversation", methods=["get"])
def get_single_conversation():
# provides data for a conversation
conversation_id = request.args.get("id")
conversation = conversations_collection.find_one({"_id": ObjectId(conversation_id)})
return jsonify(conversation['queries'])
@app.route("/api/delete_conversation", methods=["POST"])
def delete_conversation():
# deletes a conversation from the database
conversation_id = request.args.get("id")
# write to mongodb
conversations_collection.delete_one(
{
"_id": ObjectId(conversation_id),
}
)
return {"status": "ok"}
# handling CORS # handling CORS
@app.after_request @app.after_request
def after_request(response): def after_request(response):

@ -19,10 +19,17 @@ import {
selectSelectedDocsStatus, selectSelectedDocsStatus,
selectSourceDocs, selectSourceDocs,
setSelectedDocs, setSelectedDocs,
selectConversations,
setConversations,
selectConversationId,
} from './preferences/preferenceSlice'; } from './preferences/preferenceSlice';
import {
setConversation,
updateConversationId,
} from './conversation/conversationSlice';
import { useOutsideAlerter } from './hooks'; import { useOutsideAlerter } from './hooks';
import Upload from './upload/Upload'; import Upload from './upload/Upload';
import { Doc } from './preferences/preferenceApi'; import { Doc, getConversations } from './preferences/preferenceApi';
export default function Navigation({ export default function Navigation({
navState, navState,
@ -34,6 +41,8 @@ export default function Navigation({
const dispatch = useDispatch(); const dispatch = useDispatch();
const docs = useSelector(selectSourceDocs); const docs = useSelector(selectSourceDocs);
const selectedDocs = useSelector(selectSelectedDocs); const selectedDocs = useSelector(selectSelectedDocs);
const conversations = useSelector(selectConversations);
const conversationId = useSelector(selectConversationId);
const [isDocsListOpen, setIsDocsListOpen] = useState(false); const [isDocsListOpen, setIsDocsListOpen] = useState(false);
@ -51,6 +60,33 @@ export default function Navigation({
const navRef = useRef(null); const navRef = useRef(null);
const apiHost = import.meta.env.VITE_API_HOST || 'https://docsapi.arc53.com'; const apiHost = import.meta.env.VITE_API_HOST || 'https://docsapi.arc53.com';
useEffect(() => {
if (!conversations) {
getConversations()
.then((fetchedConversations) => {
dispatch(setConversations(fetchedConversations));
})
.catch((error) => {
console.error('Failed to fetch conversations: ', error);
});
}
}, [conversations, dispatch]);
const handleDeleteConversation = (id: string) => {
fetch(`${apiHost}/api/delete_conversation?id=${id}`, {
method: 'POST',
})
.then(() => {
// remove the image element from the DOM
const imageElement = document.querySelector(
`#img-${id}`,
) as HTMLElement;
const parentElement = imageElement.parentNode as HTMLElement;
parentElement.parentNode?.removeChild(parentElement);
})
.catch((error) => console.error(error));
};
const handleDeleteClick = (index: number, doc: Doc) => { const handleDeleteClick = (index: number, doc: Doc) => {
const docPath = 'indexes/' + 'local' + '/' + doc.name; const docPath = 'indexes/' + 'local' + '/' + doc.name;
@ -67,6 +103,22 @@ export default function Navigation({
}) })
.catch((error) => console.error(error)); .catch((error) => console.error(error));
}; };
const handleConversationClick = (index: string) => {
// fetch the conversation from the server and setConversation in the store
fetch(`${apiHost}/api/get_single_conversation?id=${index}`, {
method: 'GET',
})
.then((response) => response.json())
.then((data) => {
dispatch(setConversation(data));
dispatch(
updateConversationId({
query: { conversationId: index },
}),
);
});
};
useOutsideAlerter( useOutsideAlerter(
navRef, navRef,
() => { () => {
@ -121,15 +173,56 @@ export default function Navigation({
</div> </div>
<NavLink <NavLink
to={'/'} to={'/'}
onClick={() => {
dispatch(setConversation([]));
dispatch(updateConversationId({ query: { conversationId: null } }));
}}
className={({ isActive }) => className={({ isActive }) =>
`${ `${
isActive ? 'bg-gray-3000' : '' isActive && conversationId === null ? 'bg-gray-3000' : ''
} my-auto mx-4 mt-4 flex h-12 cursor-pointer gap-4 rounded-md hover:bg-gray-100` } my-auto mx-4 mt-4 flex h-12 cursor-pointer gap-4 rounded-md hover:bg-gray-100`
} }
> >
<img src={Message} className="ml-2 w-5"></img> <img src={Message} className="ml-2 w-5"></img>
<p className="my-auto text-eerie-black">Chat</p> <p className="my-auto text-eerie-black">New Chat</p>
</NavLink> </NavLink>
<div className="conversations-container max-h-[25rem] overflow-y-auto">
{conversations
? conversations.map((conversation) => {
return (
<div
key={conversation.id}
onClick={() => {
handleConversationClick(conversation.id);
}}
className={`my-auto mx-4 mt-4 flex h-12 cursor-pointer items-center justify-between gap-4 rounded-md hover:bg-gray-100 ${
conversationId === conversation.id ? 'bg-gray-100' : ''
}`}
>
<div className="flex gap-4">
<img src={Message} className="ml-2 w-5"></img>
<p className="my-auto text-eerie-black">
{conversation.name}
</p>
</div>
{conversationId === conversation.id ? (
<img
src={Exit}
alt="Exit"
className="mr-4 h-3 w-3 cursor-pointer hover:opacity-50"
id={`img-${conversation.id}`}
onClick={(event) => {
event.stopPropagation();
handleDeleteConversation(conversation.id);
}}
/>
) : null}
</div>
);
})
: null}
</div>
<div className="flex-grow border-b-2 border-gray-100"></div> <div className="flex-grow border-b-2 border-gray-100"></div>
<div className="flex flex-col-reverse border-b-2"> <div className="flex flex-col-reverse border-b-2">

@ -8,7 +8,24 @@ export function fetchAnswerApi(
apiKey: string, apiKey: string,
selectedDocs: Doc, selectedDocs: Doc,
history: Array<any> = [], history: Array<any> = [],
): Promise<Answer> { conversationId: string | null,
): Promise<
| {
result: any;
answer: any;
sources: any;
conversationId: any;
query: string;
}
| {
result: any;
answer: any;
sources: any;
query: string;
conversationId: any;
title: any;
}
> {
let namePath = selectedDocs.name; let namePath = selectedDocs.name;
if (selectedDocs.language === namePath) { if (selectedDocs.language === namePath) {
namePath = '.project'; namePath = '.project';
@ -44,6 +61,7 @@ export function fetchAnswerApi(
embeddings_key: apiKey, embeddings_key: apiKey,
history: history, history: history,
active_docs: docPath, active_docs: docPath,
conversation_id: conversationId,
}), }),
}) })
.then((response) => { .then((response) => {
@ -55,7 +73,13 @@ export function fetchAnswerApi(
}) })
.then((data) => { .then((data) => {
const result = data.answer; const result = data.answer;
return { answer: result, query: question, result, sources: data.sources }; return {
answer: result,
query: question,
result,
sources: data.sources,
conversationId: data.conversation_id,
};
}); });
} }
@ -64,6 +88,7 @@ export function fetchAnswerSteaming(
apiKey: string, apiKey: string,
selectedDocs: Doc, selectedDocs: Doc,
history: Array<any> = [], history: Array<any> = [],
conversationId: string | null,
onEvent: (event: MessageEvent) => void, onEvent: (event: MessageEvent) => void,
): Promise<Answer> { ): Promise<Answer> {
let namePath = selectedDocs.name; let namePath = selectedDocs.name;
@ -97,8 +122,9 @@ export function fetchAnswerSteaming(
embeddings_key: apiKey, embeddings_key: apiKey,
active_docs: docPath, active_docs: docPath,
history: JSON.stringify(history), history: JSON.stringify(history),
conversation_id: conversationId,
}; };
fetch(apiHost + '/stream', { fetch(apiHost + '/stream', {
method: 'POST', method: 'POST',
headers: { headers: {
@ -107,48 +133,51 @@ export function fetchAnswerSteaming(
body: JSON.stringify(body), body: JSON.stringify(body),
}) })
.then((response) => { .then((response) => {
if (!response.body) throw Error("No response body"); if (!response.body) throw Error('No response body');
const reader = response.body.getReader(); const reader = response.body.getReader();
const decoder = new TextDecoder('utf-8'); const decoder = new TextDecoder('utf-8');
var counterrr = 0 let counterrr = 0;
const processStream = ({ done, value }: ReadableStreamReadResult<Uint8Array>) => { const processStream = ({
done,
value,
}: ReadableStreamReadResult<Uint8Array>) => {
if (done) { if (done) {
console.log(counterrr); console.log(counterrr);
return; return;
} }
counterrr += 1; counterrr += 1;
const chunk = decoder.decode(value); const chunk = decoder.decode(value);
const lines = chunk.split("\n"); const lines = chunk.split('\n');
for (let line of lines) { for (let line of lines) {
if (line.trim() == "") { if (line.trim() == '') {
continue; continue;
} }
if (line.startsWith('data:')) { if (line.startsWith('data:')) {
line = line.substring(5); line = line.substring(5);
} }
const messageEvent: MessageEvent = new MessageEvent("message", { const messageEvent: MessageEvent = new MessageEvent('message', {
data: line, data: line,
}); });
onEvent(messageEvent); // handle each message onEvent(messageEvent); // handle each message
} }
reader.read().then(processStream).catch(reject); reader.read().then(processStream).catch(reject);
} };
reader.read().then(processStream).catch(reject); reader.read().then(processStream).catch(reject);
}) })
.catch((error) => { .catch((error) => {
console.error('Connection failed:', error); console.error('Connection failed:', error);
reject(error); reject(error);
}); });
}); });
} }
export function sendFeedback( export function sendFeedback(

@ -10,6 +10,7 @@ export interface Message {
export interface ConversationState { export interface ConversationState {
queries: Query[]; queries: Query[];
status: Status; status: Status;
conversationId: string | null;
} }
export interface Answer { export interface Answer {
@ -17,6 +18,8 @@ export interface Answer {
query: string; query: string;
result: string; result: string;
sources: { title: string; text: string }[]; sources: { title: string; text: string }[];
conversationId: string | null;
title: string | null;
} }
export interface Query { export interface Query {
@ -25,4 +28,6 @@ export interface Query {
feedback?: FEEDBACK; feedback?: FEEDBACK;
error?: string; error?: string;
sources?: { title: string; text: string }[]; sources?: { title: string; text: string }[];
conversationId?: string | null;
title?: string | null;
} }

@ -2,10 +2,13 @@ import { createAsyncThunk, createSlice, PayloadAction } from '@reduxjs/toolkit';
import store from '../store'; import store from '../store';
import { fetchAnswerApi, fetchAnswerSteaming } from './conversationApi'; import { fetchAnswerApi, fetchAnswerSteaming } from './conversationApi';
import { Answer, ConversationState, Query, Status } from './conversationModels'; import { Answer, ConversationState, Query, Status } from './conversationModels';
import { getConversations } from '../preferences/preferenceApi';
import { setConversations } from '../preferences/preferenceSlice';
const initialState: ConversationState = { const initialState: ConversationState = {
queries: [], queries: [],
status: 'idle', status: 'idle',
conversationId: null,
}; };
const API_STREAMING = import.meta.env.VITE_API_STREAMING === 'true'; const API_STREAMING = import.meta.env.VITE_API_STREAMING === 'true';
@ -21,6 +24,7 @@ export const fetchAnswer = createAsyncThunk<Answer, { question: string }>(
state.preference.apiKey, state.preference.apiKey,
state.preference.selectedDocs!, state.preference.selectedDocs!,
state.conversation.queries, state.conversation.queries,
state.conversation.conversationId,
(event) => { (event) => {
const data = JSON.parse(event.data); const data = JSON.parse(event.data);
@ -28,6 +32,13 @@ export const fetchAnswer = createAsyncThunk<Answer, { question: string }>(
if (data.type === 'end') { if (data.type === 'end') {
// set status to 'idle' // set status to 'idle'
dispatch(conversationSlice.actions.setStatus('idle')); dispatch(conversationSlice.actions.setStatus('idle'));
getConversations()
.then((fetchedConversations) => {
dispatch(setConversations(fetchedConversations));
})
.catch((error) => {
console.error('Failed to fetch conversations: ', error);
});
} else if (data.type === 'source') { } else if (data.type === 'source') {
// check if data.metadata exists // check if data.metadata exists
let result; let result;
@ -46,6 +57,12 @@ export const fetchAnswer = createAsyncThunk<Answer, { question: string }>(
query: { sources: [result] }, query: { sources: [result] },
}), }),
); );
} else if (data.type === 'id') {
dispatch(
updateConversationId({
query: { conversationId: data.id },
}),
);
} else { } else {
const result = data.answer; const result = data.answer;
dispatch( dispatch(
@ -63,10 +80,11 @@ export const fetchAnswer = createAsyncThunk<Answer, { question: string }>(
state.preference.apiKey, state.preference.apiKey,
state.preference.selectedDocs!, state.preference.selectedDocs!,
state.conversation.queries, state.conversation.queries,
state.conversation.conversationId,
); );
if (answer) { if (answer) {
let sourcesPrepped = []; let sourcesPrepped = [];
sourcesPrepped = answer.sources.map((source) => { sourcesPrepped = answer.sources.map((source: { title: string }) => {
if (source && source.title) { if (source && source.title) {
const titleParts = source.title.split('/'); const titleParts = source.title.split('/');
return { return {
@ -83,11 +101,30 @@ export const fetchAnswer = createAsyncThunk<Answer, { question: string }>(
query: { response: answer.answer, sources: sourcesPrepped }, query: { response: answer.answer, sources: sourcesPrepped },
}), }),
); );
dispatch(
updateConversationId({
query: { conversationId: answer.conversationId },
}),
);
dispatch(conversationSlice.actions.setStatus('idle')); dispatch(conversationSlice.actions.setStatus('idle'));
getConversations()
.then((fetchedConversations) => {
dispatch(setConversations(fetchedConversations));
})
.catch((error) => {
console.error('Failed to fetch conversations: ', error);
});
} }
} }
} }
return { answer: '', query: question, result: '', sources: [] }; return {
conversationId: null,
title: null,
answer: '',
query: question,
result: '',
sources: [],
};
}, },
); );
@ -98,6 +135,9 @@ export const conversationSlice = createSlice({
addQuery(state, action: PayloadAction<Query>) { addQuery(state, action: PayloadAction<Query>) {
state.queries.push(action.payload); state.queries.push(action.payload);
}, },
setConversation(state, action: PayloadAction<Query[]>) {
state.queries = action.payload;
},
updateStreamingQuery( updateStreamingQuery(
state, state,
action: PayloadAction<{ index: number; query: Partial<Query> }>, action: PayloadAction<{ index: number; query: Partial<Query> }>,
@ -113,6 +153,12 @@ export const conversationSlice = createSlice({
}; };
} }
}, },
updateConversationId(
state,
action: PayloadAction<{ query: Partial<Query> }>,
) {
state.conversationId = action.payload.query.conversationId ?? null;
},
updateStreamingSource( updateStreamingSource(
state, state,
action: PayloadAction<{ index: number; query: Partial<Query> }>, action: PayloadAction<{ index: number; query: Partial<Query> }>,
@ -161,6 +207,8 @@ export const {
addQuery, addQuery,
updateQuery, updateQuery,
updateStreamingQuery, updateStreamingQuery,
updateConversationId,
updateStreamingSource, updateStreamingSource,
setConversation,
} = conversationSlice.actions; } = conversationSlice.actions;
export default conversationSlice.reducer; export default conversationSlice.reducer;

@ -33,6 +33,29 @@ export async function getDocs(): Promise<Doc[] | null> {
} }
} }
export async function getConversations(): Promise<
{ name: string; id: string }[] | null
> {
try {
const apiHost =
import.meta.env.VITE_API_HOST || 'https://docsapi.arc53.com';
const response = await fetch(apiHost + '/api/get_conversations');
const data = await response.json();
const conversations: { name: string; id: string }[] = [];
data.forEach((conversation: object) => {
conversations.push(conversation as { name: string; id: string });
});
return conversations;
} catch (error) {
console.log(error);
return null;
}
}
export function getLocalApiKey(): string | null { export function getLocalApiKey(): string | null {
const key = localStorage.getItem('DocsGPTApiKey'); const key = localStorage.getItem('DocsGPTApiKey');
return key; return key;

@ -10,6 +10,7 @@ interface Preference {
apiKey: string; apiKey: string;
selectedDocs: Doc | null; selectedDocs: Doc | null;
sourceDocs: Doc[] | null; sourceDocs: Doc[] | null;
conversations: { name: string; id: string }[] | null;
} }
const initialState: Preference = { const initialState: Preference = {
@ -26,6 +27,7 @@ const initialState: Preference = {
model: 'openai_text-embedding-ada-002', model: 'openai_text-embedding-ada-002',
} as Doc, } as Doc,
sourceDocs: null, sourceDocs: null,
conversations: null,
}; };
export const prefSlice = createSlice({ export const prefSlice = createSlice({
@ -41,10 +43,14 @@ export const prefSlice = createSlice({
setSourceDocs: (state, action) => { setSourceDocs: (state, action) => {
state.sourceDocs = action.payload; state.sourceDocs = action.payload;
}, },
setConversations: (state, action) => {
state.conversations = action.payload;
},
}, },
}); });
export const { setApiKey, setSelectedDocs, setSourceDocs } = prefSlice.actions; export const { setApiKey, setSelectedDocs, setSourceDocs, setConversations } =
prefSlice.actions;
export default prefSlice.reducer; export default prefSlice.reducer;
export const prefListenerMiddleware = createListenerMiddleware(); export const prefListenerMiddleware = createListenerMiddleware();
@ -74,3 +80,7 @@ export const selectSourceDocs = (state: RootState) =>
state.preference.sourceDocs; state.preference.sourceDocs;
export const selectSelectedDocs = (state: RootState) => export const selectSelectedDocs = (state: RootState) =>
state.preference.selectedDocs; state.preference.selectedDocs;
export const selectConversations = (state: RootState) =>
state.preference.conversations;
export const selectConversationId = (state: RootState) =>
state.conversation.conversationId;

@ -13,6 +13,7 @@ const store = configureStore({
preference: { preference: {
apiKey: key ?? '', apiKey: key ?? '',
selectedDocs: doc !== null ? JSON.parse(doc) : null, selectedDocs: doc !== null ? JSON.parse(doc) : null,
conversations: null,
sourceDocs: [ sourceDocs: [
{ {
location: '', location: '',

Loading…
Cancel
Save