Merge branch 'main' into changing_discord_github_icons

This commit is contained in:
Alex 2023-10-05 16:33:18 +01:00 committed by GitHub
commit aa9a024ee1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
25 changed files with 187 additions and 140 deletions

View File

@ -1,8 +1,7 @@
OPENAI_API_KEY=<LLM api key (for example, open ai key)>
SELF_HOSTED_MODEL=false
API_KEY=<LLM api key (for example, open ai key)>
VITE_API_STREAMING=true
#For Azure
#For Azure (you can delete it if you don't use Azure)
OPENAI_API_BASE=
OPENAI_API_VERSION=
AZURE_DEPLOYMENT_NAME=

View File

@ -38,7 +38,7 @@ When deploying your DocsGPT to a live environment, we're eager to provide person
You can find our [Roadmap](https://github.com/orgs/arc53/projects/2) here. Please don't hesitate to contribute or create issues, it helps us make DocsGPT better!
## Our open source models optimised for DocsGPT:
## Our Open-Source models optimised for DocsGPT:
| Name | Base Model | Requirements (or similar) |
|-------------------|------------|----------------------------------------------------------|
@ -120,16 +120,11 @@ docker compose -f docker-compose-dev.yaml up -d
Make sure you have Python 3.10 or 3.11 installed.
1. Export required environment variables
```commandline
export CELERY_BROKER_URL=redis://localhost:6379/0
export CELERY_RESULT_BACKEND=redis://localhost:6379/1
export MONGO_URI=mongodb://localhost:27017/docsgpt
export FLASK_APP=application/app.py
export FLASK_DEBUG=true
```
2. Prepare .env file
Copy `.env_sample` and create `.env` with your OpenAI API token
1. Export required environment variables or prep .env file in application folder
Prepare .env file
Copy `.env_sample` and create `.env` with your OpenAI API token for the API_KEY and EMBEDDINGS_KEY fields
(check out application/core/settings.py if you want to see more config options)
3. (optional) Create a Python virtual environment
```commandline
python -m venv venv
@ -151,6 +146,11 @@ Make sure you have Node version 16 or higher.
3. Run the app
`npm run dev`
## All Thanks To Our Contributors
<a href="[https://github.com/arc53/DocsGPT/graphs/contributors](https://docsgpt.arc53.com/)">
<img src="https://contrib.rocks/image?repo=arc53/DocsGPT" />
</a>
Built with [🦜️🔗 LangChain](https://github.com/hwchase17/langchain)

View File

@ -1,9 +1,8 @@
API_KEY=your_api_key
EMBEDDINGS_KEY=your_api_key
CELERY_BROKER_URL=redis://localhost:6379/0
CELERY_RESULT_BACKEND=redis://localhost:6379/1
MONGO_URI=mongodb://localhost:27017/docsgpt
API_URL=http://localhost:7091
FLASK_APP=application/app.py
FLASK_DEBUG=true
#For OPENAI on Azure
OPENAI_API_BASE=

View File

@ -84,13 +84,14 @@ def delete_old():
path = request.args.get("path")
dirs = path.split("/")
dirs_clean = []
for i in range(1, len(dirs)):
for i in range(0, len(dirs)):
dirs_clean.append(secure_filename(dirs[i]))
# check that path strats with indexes or vectors
if dirs[0] not in ["indexes", "vectors"]:
if dirs_clean[0] not in ["indexes", "vectors"]:
return {"status": "error"}
path_clean = "/".join(dirs)
vectors_collection.delete_one({"location": path})
path_clean = "/".join(dirs_clean)
vectors_collection.delete_one({"name": dirs_clean[-1], 'user': dirs_clean[-2]})
if settings.VECTOR_STORE == "faiss":
try:
shutil.rmtree(os.path.join(current_dir, path_clean))

View File

@ -2,13 +2,26 @@ from application.llm.base import BaseLLM
class HuggingFaceLLM(BaseLLM):
def __init__(self, api_key, llm_name='Arc53/DocsGPT-7B'):
def __init__(self, api_key, llm_name='Arc53/DocsGPT-7B',q=False):
global hf
from langchain.llms import HuggingFacePipeline
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
tokenizer = AutoTokenizer.from_pretrained(llm_name)
model = AutoModelForCausalLM.from_pretrained(llm_name)
if q:
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline, BitsAndBytesConfig
tokenizer = AutoTokenizer.from_pretrained(llm_name)
bnb_config = BitsAndBytesConfig(
load_in_4bit=True,
bnb_4bit_use_double_quant=True,
bnb_4bit_quant_type="nf4",
bnb_4bit_compute_dtype=torch.bfloat16
)
model = AutoModelForCausalLM.from_pretrained(llm_name,quantization_config=bnb_config)
else:
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
tokenizer = AutoTokenizer.from_pretrained(llm_name)
model = AutoModelForCausalLM.from_pretrained(llm_name)
pipe = pipeline(
"text-generation", model=model,
tokenizer=tokenizer, max_new_tokens=2000,

View File

@ -41,7 +41,7 @@ Jinja2==3.1.2
jmespath==1.0.1
joblib==1.2.0
kombu==5.2.4
langchain==0.0.263
langchain==0.0.308
loguru==0.6.0
lxml==4.9.2
MarkupSafe==2.1.2
@ -59,7 +59,7 @@ numpy==1.24.2
openai==0.27.8
packaging==23.0
pathos==0.3.0
Pillow==9.4.0
Pillow==10.0.1
pox==0.3.2
ppft==1.7.6.6
prompt-toolkit==3.0.38

View File

@ -12,9 +12,9 @@ Here's a step-by-step guide on how to setup an Amazon Lightsail instance to host
### 3. Create your instance
The first step is to select the "Instance location". In most cases there's no need to switch locations as the default one will work well.
The first step is to select the "Instance location". In most cases, there's no need to switch locations as the default one will work well.
After that it is time to pick your Instance Image. We recommend using "Linux/Unix" as the image and "Ubuntu 20.04 LTS" for Operating System.
After that, it is time to pick your Instance Image. We recommend using "Linux/Unix" as the image and "Ubuntu 20.04 LTS" as the Operating System.
As for instance plan, it'll vary depending on your unique demands, but a "1 GB, 1vCPU, 40GB SSD and 2TB transfer" setup should cover most scenarios.
@ -30,13 +30,13 @@ PS: Once you create your instance, it'll likely take a few minutes for the setup
- 40GB SSD Hard Drive
- 2TB transfer
### Connecting to your the newly created instance
### Connecting to your newly created instance
Your instance will be ready for use a few minutes after being created. To access, just open it up and click on "Connect using SSH".
Your instance will be ready for use a few minutes after being created. To access it, just open it up and click on "Connect using SSH".
#### Clone the repository
A terminal window will pop up, and the first step will be to clone DocsGPT git repository.
A terminal window will pop up, and the first step will be to clone the DocsGPT git repository.
`git clone https://github.com/arc53/DocsGPT.git`
@ -48,7 +48,7 @@ Once it has finished cloning the repository, it is time to download the package
#### Install Docker and Docker Compose
DocsGPT backend and worker use python, Frontend is written on React and the whole application is containerized using Docker. To install Docker and Docker Compose, enter the following commands:
DocsGPT backend and worker use Python, Frontend is written on React and the whole application is containerized using Docker. To install Docker and Docker Compose, enter the following commands:
`sudo apt install docker.io`
@ -58,7 +58,7 @@ And now install docker-compose:
#### Access the DocsGPT folder
Enter the following command to access the folder in which DocsGPT docker-compose file is.
Enter the following command to access the folder in which DocsGPT docker-compose file is present.
`cd DocsGPT/`
@ -76,9 +76,9 @@ VITE_API_STREAMING=true
SELF_HOSTED_MODEL=false
```
To save the file, press CTRL+X, then Y and then ENTER.
To save the file, press CTRL+X, then Y, and then ENTER.
Next we need to set a correct IP for our Backend. To do so, open the docker-compose.yml file:
Next, we need to set a correct IP for our Backend. To do so, open the docker-compose.yml file:
`nano docker-compose.yml`
@ -93,17 +93,17 @@ You're almost there! Now that all the necessary bits and pieces have been instal
`sudo docker-compose up -d`
If you launch it for the first time it will take a few minutes to download all the necessary dependencies and build.
Launching it for the first time will take a few minutes to download all the necessary dependencies and build.
Once this is done you can go ahead and close the terminal window.
#### Enabling ports
Before you being able to access your live instance, you must first enable the port which it is using.
Before you are able to access your live instance, you must first enable the port that it is using.
Open your Lightsail instance and head to "Networking".
Then click on "Add rule" under "IPv4 Firewall", enter 5173 as your your port and hit "Create".
Then click on "Add rule" under "IPv4 Firewall", enter 5173 as your port, and hit "Create".
Repeat the process for port 7091.
#### Access your instance

View File

@ -1,16 +1,16 @@
## Launching Web App
Note: Make sure you have docker installed
Note: Make sure you have Docker installed
On Mac OS or Linux just write:
`./setup.sh`
It will install all the dependencies and give you an option to download local model or use OpenAI
It will install all the dependencies and give you an option to download the local model or use OpenAI
Otherwise refer to this Guide:
Otherwise, refer to this Guide:
1. Open download this repository with `git clone https://github.com/arc53/DocsGPT.git`
2. Create .env file in your root directory and set your `API_KEY` with your openai api key
1. Open and download this repository with `git clone https://github.com/arc53/DocsGPT.git`
2. Create a .env file in your root directory and set your `API_KEY` with your openai api key
3. Run `docker-compose build && docker-compose up`
4. Navigate to `http://localhost:5173/`

View File

@ -29,4 +29,4 @@ That's it!
### Hosting everything locally and privately (for using our optimised open-source models)
If you are working with important data and dont want anything to leave your premises.
Make sure you set SELF_HOSTED_MODEL as true in you .env variable and for your LLM_NAME you can use anything that's on Huggingface
Make sure you set SELF_HOSTED_MODEL as true in you .env variable and for your LLM_NAME you can use anything that's on Hugging Face

View File

@ -1,9 +1,12 @@
{
"name": "doc-ext",
"version": "0.0.1",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"version": "0.0.1",
"license": "MIT",
"devDependencies": {
"tailwindcss": "^3.2.4"
}
@ -407,10 +410,16 @@
}
},
"node_modules/nanoid": {
"version": "3.3.4",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.4.tgz",
"integrity": "sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==",
"version": "3.3.6",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.6.tgz",
"integrity": "sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==",
"dev": true,
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"bin": {
"nanoid": "bin/nanoid.cjs"
},
@ -470,9 +479,9 @@
}
},
"node_modules/postcss": {
"version": "8.4.21",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.21.tgz",
"integrity": "sha512-tP7u/Sn/dVxK2NnruI4H9BG+x+Wxz6oeZ1cJ8P6G/PZY0IKk4k/63TDsQf2kQq3+qoJeLm2kIBUNlZe3zgb4Zg==",
"version": "8.4.31",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz",
"integrity": "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==",
"dev": true,
"funding": [
{
@ -482,10 +491,14 @@
{
"type": "tidelift",
"url": "https://tidelift.com/funding/github/npm/postcss"
},
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"dependencies": {
"nanoid": "^3.3.4",
"nanoid": "^3.3.6",
"picocolors": "^1.0.0",
"source-map-js": "^1.0.2"
},
@ -1094,9 +1107,9 @@
"dev": true
},
"nanoid": {
"version": "3.3.4",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.4.tgz",
"integrity": "sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==",
"version": "3.3.6",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.6.tgz",
"integrity": "sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==",
"dev": true
},
"normalize-path": {
@ -1136,12 +1149,12 @@
"dev": true
},
"postcss": {
"version": "8.4.21",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.21.tgz",
"integrity": "sha512-tP7u/Sn/dVxK2NnruI4H9BG+x+Wxz6oeZ1cJ8P6G/PZY0IKk4k/63TDsQf2kQq3+qoJeLm2kIBUNlZe3zgb4Zg==",
"version": "8.4.31",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz",
"integrity": "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==",
"dev": true,
"requires": {
"nanoid": "^3.3.4",
"nanoid": "^3.3.6",
"picocolors": "^1.0.0",
"source-map-js": "^1.0.2"
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,12 +1,12 @@
{
"name": "docsgpt",
"version": "0.2.3",
"version": "0.2.4",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "docsgpt",
"version": "0.2.3",
"version": "0.2.4",
"license": "Apache-2.0",
"dependencies": {
"postcss-cli": "^10.1.0",
@ -19,7 +19,7 @@
"@types/react-dom": "^18.0.9",
"@vitejs/plugin-react-swc": "^3.0.0",
"autoprefixer": "^10.4.13",
"postcss": "^8.4.20",
"postcss": "^8.4.31",
"typescript": "^4.9.3",
"vite": "^4.0.0",
"vite-plugin-dts": "^1.7.1"
@ -1784,9 +1784,9 @@
}
},
"node_modules/postcss": {
"version": "8.4.29",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.29.tgz",
"integrity": "sha512-cbI+jaqIeu/VGqXEarWkRCCffhjgXc0qjBtXpqJhTBohMUjUQnbBr0xqX3vEKudc4iviTewcJo5ajcec5+wdJw==",
"version": "8.4.31",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz",
"integrity": "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==",
"funding": [
{
"type": "opencollective",

View File

@ -38,7 +38,7 @@
"@types/react-dom": "^18.0.9",
"@vitejs/plugin-react-swc": "^3.0.0",
"autoprefixer": "^10.4.13",
"postcss": "^8.4.20",
"postcss": "^8.4.31",
"typescript": "^4.9.3",
"vite": "^4.0.0",
"vite-plugin-dts": "^1.7.1"

View File

@ -620,9 +620,9 @@
}
},
"node_modules/postcss": {
"version": "8.4.23",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.23.tgz",
"integrity": "sha512-bQ3qMcpF6A/YjR55xtoTr0jGOlnPOKAIMdOWiv0EIT6HVPEaJiJB4NLljSbiHoC2RX7DN5Uvjtpbg1NPdwv1oA==",
"version": "8.4.31",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz",
"integrity": "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==",
"dev": true,
"funding": [
{

View File

@ -2,24 +2,19 @@ import { Routes, Route } from 'react-router-dom';
import Navigation from './Navigation';
import Conversation from './conversation/Conversation';
import About from './About';
import { useState } from 'react';
import { ActiveState } from './models/misc';
import { inject } from '@vercel/analytics';
import { useMediaQuery } from './hooks';
inject();
export default function App() {
//TODO : below media query is disjoint from tailwind. Please wire it together.
const [navState, setNavState] = useState<ActiveState>(
window.matchMedia('(min-width: 768px)').matches ? 'ACTIVE' : 'INACTIVE',
);
const { isMobile } = useMediaQuery();
return (
<div className="min-h-full min-w-full">
<Navigation navState={navState} setNavState={setNavState} />
<Navigation />
<div
className={`transition-all duration-200 ${
navState === 'ACTIVE' ? 'ml-0 md:ml-72 lg:ml-60' : 'ml-0 md:ml-16'
!isMobile ? 'ml-0 md:ml-72 lg:ml-60' : 'ml-0 md:ml-16'
}`}
>
<Routes>

View File

@ -10,8 +10,8 @@ export default function Hero({ className = '' }: { className?: string }) {
</p>
<p className="mb-3 text-center leading-6 text-black-1000">
Enter a query related to the information in the documentation you
selected to receive and we will provide you with the most relevant
answers.
selected to receive
<br /> and we will provide you with the most relevant answers.
</p>
<p className="mb-3 text-center leading-6 text-black-1000">
Start by entering your query in the input field below and we will do the

View File

@ -28,23 +28,19 @@ import {
setConversation,
updateConversationId,
} from './conversation/conversationSlice';
import { useOutsideAlerter } from './hooks';
import { useMediaQuery, useOutsideAlerter } from './hooks';
import Upload from './upload/Upload';
import { Doc, getConversations } from './preferences/preferenceApi';
import SelectDocsModal from './preferences/SelectDocsModal';
export default function Navigation({
navState,
setNavState,
}: {
navState: ActiveState;
setNavState: React.Dispatch<React.SetStateAction<ActiveState>>;
}) {
export default function Navigation() {
const dispatch = useDispatch();
const docs = useSelector(selectSourceDocs);
const selectedDocs = useSelector(selectSelectedDocs);
const conversations = useSelector(selectConversations);
const conversationId = useSelector(selectConversationId);
const { isMobile } = useMediaQuery();
const [navOpen, setNavOpen] = useState(!isMobile);
const [isDocsListOpen, setIsDocsListOpen] = useState(false);
@ -126,51 +122,46 @@ export default function Navigation({
useOutsideAlerter(
navRef,
() => {
if (
window.matchMedia('(max-width: 768px)').matches &&
navState === 'ACTIVE' &&
apiKeyModalState === 'INACTIVE'
) {
setNavState('INACTIVE');
if (isMobile && navOpen && apiKeyModalState === 'INACTIVE') {
setNavOpen(false);
setIsDocsListOpen(false);
}
},
[navState, isDocsListOpen, apiKeyModalState],
[navOpen, isDocsListOpen, apiKeyModalState],
);
/*
Needed to fix bug where if mobile nav was closed and then window was resized to desktop, nav would still be closed but the button to open would be gone, as per #1 on issue #146
*/
useEffect(() => {
window.addEventListener('resize', () => {
if (window.matchMedia('(min-width: 768px)').matches) {
setNavState('ACTIVE');
} else {
setNavState('INACTIVE');
}
});
}, []);
if (isMobile) {
setNavOpen(false);
return;
}
setNavOpen(true);
}, [isMobile]);
return (
<>
<div
ref={navRef}
className={`${
navState === 'INACTIVE' && '-ml-96 md:-ml-[14rem]'
!navOpen && '-ml-96 md:-ml-[14rem]'
} duration-20 fixed z-20 flex h-full w-72 flex-col border-r-2 bg-gray-50 transition-all`}
>
<div className={'visible h-16 w-full border-b-2 md:hidden'}>
<button
className="float-right mr-5 mt-5 h-5 w-5"
onClick={() =>
setNavState(navState === 'ACTIVE' ? 'INACTIVE' : 'ACTIVE')
}
onClick={() => {
setNavOpen(!navOpen);
}}
>
<img
src={Arrow1}
alt="menu toggle"
className={`${
navState === 'INACTIVE' ? 'rotate-180' : 'rotate-0'
!navOpen ? 'rotate-180' : 'rotate-0'
} m-auto w-3 transition-all duration-200`}
/>
</button>
@ -179,7 +170,11 @@ export default function Navigation({
to={'/'}
onClick={() => {
dispatch(setConversation([]));
dispatch(updateConversationId({ query: { conversationId: null } }));
dispatch(
updateConversationId({
query: { conversationId: null },
}),
);
}}
className={({ isActive }) =>
`${
@ -340,7 +335,6 @@ export default function Navigation({
<img src={Discord} alt="link" className="ml-2 w-5" />
<p className="my-auto text-eerie-black">Visit our Discord</p>
</a>
<a
href="https://github.com/arc53/DocsGPT"
target="_blank"
@ -356,7 +350,7 @@ export default function Navigation({
<div className="fixed h-16 w-full border-b-2 bg-gray-50 md:hidden">
<button
className="mt-5 ml-6 h-6 w-6 md:hidden"
onClick={() => setNavState('ACTIVE')}
onClick={() => setNavOpen(true)}
>
<img src={Hamburger} alt="menu toggle" className="w-7" />
</button>

View File

@ -61,6 +61,8 @@ export default function Conversation() {
};
const handleQuestion = (question: string) => {
question = question.trim();
if (question === '') return;
dispatch(addQuery({ prompt: question }));
dispatch(fetchAnswer({ question }));
};
@ -149,6 +151,7 @@ export default function Conversation() {
<div className="flex w-full">
<div
ref={inputRef}
placeholder="Type your message here..."
contentEditable
onPaste={handlePaste}
className={`border-000000 overflow-x-hidden; max-h-24 min-h-[2.6rem] w-full overflow-y-auto whitespace-pre-wrap rounded-xl border bg-white py-2 pl-4 pr-9 leading-7 opacity-100 focus:outline-none`}

View File

@ -43,7 +43,7 @@ const ConversationBubble = forwardRef<
<div ref={ref} className={`flex flex-row-reverse self-end ${className}`}>
<Avatar className="mt-2 text-2xl" avatar="🧑‍💻"></Avatar>
<div className="mr-2 ml-10 flex items-center rounded-3xl bg-blue-1000 p-3.5 text-white">
<ReactMarkdown className="whitespace-pre-wrap break-words">
<ReactMarkdown className="whitespace-pre-wrap break-all">
{message}
</ReactMarkdown>
</div>

View File

@ -1,19 +0,0 @@
import { useEffect, RefObject } from 'react';
export function useOutsideAlerter<T extends HTMLElement>(
ref: RefObject<T>,
handler: () => void,
additionalDeps: unknown[],
) {
useEffect(() => {
function handleClickOutside(this: Document, event: MouseEvent) {
if (ref.current && !ref.current.contains(event.target as Node)) {
handler();
}
}
document.addEventListener('mousedown', handleClickOutside);
return () => {
document.removeEventListener('mousedown', handleClickOutside);
};
}, [ref, ...additionalDeps]);
}

View File

@ -0,0 +1,45 @@
import { useEffect, RefObject, useState } from 'react';
export function useOutsideAlerter<T extends HTMLElement>(
ref: RefObject<T>,
handler: () => void,
additionalDeps: unknown[],
) {
useEffect(() => {
function handleClickOutside(this: Document, event: MouseEvent) {
if (ref.current && !ref.current.contains(event.target as Node)) {
handler();
}
}
document.addEventListener('mousedown', handleClickOutside);
return () => {
document.removeEventListener('mousedown', handleClickOutside);
};
}, [ref, ...additionalDeps]);
}
// Use isMobile for checking if the width is in the expected mobile range (less than 768px)
// use IsDesktop for effects you explicitly only want when width is wider than 960px.
export function useMediaQuery() {
const mobileQuery = '(max-width: 768px)';
const desktopQuery = '(min-width: 960px)';
const [isMobile, setIsMobile] = useState(false);
const [isDesktop, setIsDesktop] = useState(false);
useEffect(() => {
const mobileMedia = window.matchMedia(mobileQuery);
const desktopMedia = window.matchMedia(desktopQuery);
const updateMediaQueries = () => {
setIsMobile(mobileMedia.matches);
setIsDesktop(desktopMedia.matches);
};
updateMediaQueries();
const listener = () => updateMediaQueries();
window.addEventListener('resize', listener);
return () => {
window.removeEventListener('resize', listener);
};
}, [mobileQuery, desktopQuery]);
return { isMobile, isDesktop };
}

View File

@ -358,3 +358,9 @@ template {
[hidden] {
display: none;
}
[contentEditable]:empty:before {
content: attr(placeholder);
color: #9ca3af;
opacity: 1;
}

View File

@ -2,7 +2,7 @@ import { useRef, useState } from 'react';
import { useDispatch, useSelector } from 'react-redux';
import { ActiveState } from '../models/misc';
import { selectApiKey, setApiKey } from './preferenceSlice';
import { useOutsideAlerter } from './../hooks';
import { useMediaQuery, useOutsideAlerter } from './../hooks';
import Modal from '../Modal';
export default function APIKeyModal({
@ -19,14 +19,12 @@ export default function APIKeyModal({
const [key, setKey] = useState(apiKey);
const [isError, setIsError] = useState(false);
const modalRef = useRef(null);
const { isMobile } = useMediaQuery();
useOutsideAlerter(
modalRef,
() => {
if (
window.matchMedia('(max-width: 768px)').matches &&
modalState === 'ACTIVE'
) {
if (isMobile && modalState === 'ACTIVE') {
setModalState('INACTIVE');
}
},

View File

@ -47,7 +47,7 @@ javalang==0.13.0
Jinja2==3.1.2
jmespath==1.0.1
joblib==1.3.1
langchain==0.0.252
langchain==0.0.308
lxml==4.9.3
manifest-ml==0.1.8
MarkupSafe==2.1.3