mirror of
https://github.com/arc53/DocsGPT
synced 2024-11-03 23:15:37 +00:00
commit
5e5e2b8aee
1
.gitignore
vendored
1
.gitignore
vendored
@ -75,6 +75,7 @@ target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
**/*.ipynb
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
|
26
application/parser/remote/reddit_loader.py
Normal file
26
application/parser/remote/reddit_loader.py
Normal file
@ -0,0 +1,26 @@
|
||||
from application.parser.remote.base import BaseRemote
|
||||
from langchain_community.document_loaders import RedditPostsLoader
|
||||
|
||||
|
||||
class RedditPostsLoaderRemote(BaseRemote):
|
||||
def load_data(self, inputs):
|
||||
data = eval(inputs)
|
||||
client_id = data.get("client_id")
|
||||
client_secret = data.get("client_secret")
|
||||
user_agent = data.get("user_agent")
|
||||
categories = data.get("categories", ["new", "hot"])
|
||||
mode = data.get("mode", "subreddit")
|
||||
search_queries = data.get("search_queries")
|
||||
number_posts = data.get("number_posts", 10)
|
||||
self.loader = RedditPostsLoader(
|
||||
client_id=client_id,
|
||||
client_secret=client_secret,
|
||||
user_agent=user_agent,
|
||||
categories=categories,
|
||||
mode=mode,
|
||||
search_queries=search_queries,
|
||||
number_posts=number_posts,
|
||||
)
|
||||
documents = self.loader.load()
|
||||
print(f"Loaded {len(documents)} documents from Reddit")
|
||||
return documents
|
@ -1,13 +1,15 @@
|
||||
from application.parser.remote.sitemap_loader import SitemapLoader
|
||||
from application.parser.remote.crawler_loader import CrawlerLoader
|
||||
from application.parser.remote.web_loader import WebLoader
|
||||
from application.parser.remote.reddit_loader import RedditPostsLoaderRemote
|
||||
|
||||
|
||||
class RemoteCreator:
|
||||
loaders = {
|
||||
'url': WebLoader,
|
||||
'sitemap': SitemapLoader,
|
||||
'crawler': CrawlerLoader
|
||||
"url": WebLoader,
|
||||
"sitemap": SitemapLoader,
|
||||
"crawler": CrawlerLoader,
|
||||
"reddit": RedditPostsLoaderRemote,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@ -15,4 +17,4 @@ class RemoteCreator:
|
||||
loader_class = cls.loaders.get(type.lower())
|
||||
if not loader_class:
|
||||
raise ValueError(f"No LLM class found for type {type}")
|
||||
return loader_class(*args, **kwargs)
|
||||
return loader_class(*args, **kwargs)
|
||||
|
@ -15,23 +15,27 @@ from application.parser.schema.base import Document
|
||||
from application.parser.token_func import group_split
|
||||
|
||||
try:
|
||||
nltk.download('punkt', quiet=True)
|
||||
nltk.download('averaged_perceptron_tagger', quiet=True)
|
||||
nltk.download("punkt", quiet=True)
|
||||
nltk.download("averaged_perceptron_tagger", quiet=True)
|
||||
except FileExistsError:
|
||||
pass
|
||||
|
||||
|
||||
# Define a function to extract metadata from a given filename.
|
||||
def metadata_from_filename(title):
|
||||
store = '/'.join(title.split('/')[1:3])
|
||||
return {'title': title, 'store': store}
|
||||
store = "/".join(title.split("/")[1:3])
|
||||
return {"title": title, "store": store}
|
||||
|
||||
|
||||
# Define a function to generate a random string of a given length.
|
||||
def generate_random_string(length):
|
||||
return ''.join([string.ascii_letters[i % 52] for i in range(length)])
|
||||
return "".join([string.ascii_letters[i % 52] for i in range(length)])
|
||||
|
||||
|
||||
current_dir = os.path.dirname(
|
||||
os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
)
|
||||
|
||||
current_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
# Define the main function for ingesting and processing documents.
|
||||
def ingest_worker(self, directory, formats, name_job, filename, user):
|
||||
@ -62,38 +66,52 @@ def ingest_worker(self, directory, formats, name_job, filename, user):
|
||||
token_check = True
|
||||
min_tokens = 150
|
||||
max_tokens = 1250
|
||||
full_path = directory + '/' + user + '/' + name_job
|
||||
full_path = directory + "/" + user + "/" + name_job
|
||||
import sys
|
||||
|
||||
print(full_path, file=sys.stderr)
|
||||
# check if API_URL env variable is set
|
||||
file_data = {'name': name_job, 'file': filename, 'user': user}
|
||||
response = requests.get(urljoin(settings.API_URL, "/api/download"), params=file_data)
|
||||
file_data = {"name": name_job, "file": filename, "user": user}
|
||||
response = requests.get(
|
||||
urljoin(settings.API_URL, "/api/download"), params=file_data
|
||||
)
|
||||
# check if file is in the response
|
||||
print(response, file=sys.stderr)
|
||||
file = response.content
|
||||
|
||||
if not os.path.exists(full_path):
|
||||
os.makedirs(full_path)
|
||||
with open(full_path + '/' + filename, 'wb') as f:
|
||||
with open(full_path + "/" + filename, "wb") as f:
|
||||
f.write(file)
|
||||
|
||||
# check if file is .zip and extract it
|
||||
if filename.endswith('.zip'):
|
||||
with zipfile.ZipFile(full_path + '/' + filename, 'r') as zip_ref:
|
||||
if filename.endswith(".zip"):
|
||||
with zipfile.ZipFile(full_path + "/" + filename, "r") as zip_ref:
|
||||
zip_ref.extractall(full_path)
|
||||
os.remove(full_path + '/' + filename)
|
||||
os.remove(full_path + "/" + filename)
|
||||
|
||||
self.update_state(state='PROGRESS', meta={'current': 1})
|
||||
self.update_state(state="PROGRESS", meta={"current": 1})
|
||||
|
||||
raw_docs = SimpleDirectoryReader(input_dir=full_path, input_files=input_files, recursive=recursive,
|
||||
required_exts=formats, num_files_limit=limit,
|
||||
exclude_hidden=exclude, file_metadata=metadata_from_filename).load_data()
|
||||
raw_docs = group_split(documents=raw_docs, min_tokens=min_tokens, max_tokens=max_tokens, token_check=token_check)
|
||||
raw_docs = SimpleDirectoryReader(
|
||||
input_dir=full_path,
|
||||
input_files=input_files,
|
||||
recursive=recursive,
|
||||
required_exts=formats,
|
||||
num_files_limit=limit,
|
||||
exclude_hidden=exclude,
|
||||
file_metadata=metadata_from_filename,
|
||||
).load_data()
|
||||
raw_docs = group_split(
|
||||
documents=raw_docs,
|
||||
min_tokens=min_tokens,
|
||||
max_tokens=max_tokens,
|
||||
token_check=token_check,
|
||||
)
|
||||
|
||||
docs = [Document.to_langchain_format(raw_doc) for raw_doc in raw_docs]
|
||||
|
||||
call_openai_api(docs, full_path, self)
|
||||
self.update_state(state='PROGRESS', meta={'current': 100})
|
||||
self.update_state(state="PROGRESS", meta={"current": 100})
|
||||
|
||||
if sample:
|
||||
for i in range(min(5, len(raw_docs))):
|
||||
@ -101,70 +119,80 @@ def ingest_worker(self, directory, formats, name_job, filename, user):
|
||||
|
||||
# get files from outputs/inputs/index.faiss and outputs/inputs/index.pkl
|
||||
# and send them to the server (provide user and name in form)
|
||||
file_data = {'name': name_job, 'user': user}
|
||||
file_data = {"name": name_job, "user": user}
|
||||
if settings.VECTOR_STORE == "faiss":
|
||||
files = {'file_faiss': open(full_path + '/index.faiss', 'rb'),
|
||||
'file_pkl': open(full_path + '/index.pkl', 'rb')}
|
||||
response = requests.post(urljoin(settings.API_URL, "/api/upload_index"), files=files, data=file_data)
|
||||
response = requests.get(urljoin(settings.API_URL, "/api/delete_old?path=" + full_path))
|
||||
files = {
|
||||
"file_faiss": open(full_path + "/index.faiss", "rb"),
|
||||
"file_pkl": open(full_path + "/index.pkl", "rb"),
|
||||
}
|
||||
response = requests.post(
|
||||
urljoin(settings.API_URL, "/api/upload_index"), files=files, data=file_data
|
||||
)
|
||||
response = requests.get(
|
||||
urljoin(settings.API_URL, "/api/delete_old?path=" + full_path)
|
||||
)
|
||||
else:
|
||||
response = requests.post(urljoin(settings.API_URL, "/api/upload_index"), data=file_data)
|
||||
response = requests.post(
|
||||
urljoin(settings.API_URL, "/api/upload_index"), data=file_data
|
||||
)
|
||||
|
||||
|
||||
# delete local
|
||||
shutil.rmtree(full_path)
|
||||
|
||||
return {
|
||||
'directory': directory,
|
||||
'formats': formats,
|
||||
'name_job': name_job,
|
||||
'filename': filename,
|
||||
'user': user,
|
||||
'limited': False
|
||||
"directory": directory,
|
||||
"formats": formats,
|
||||
"name_job": name_job,
|
||||
"filename": filename,
|
||||
"user": user,
|
||||
"limited": False,
|
||||
}
|
||||
|
||||
def remote_worker(self, source_data, name_job, user, directory = 'temp', loader = 'url'):
|
||||
|
||||
def remote_worker(self, source_data, name_job, user, loader, directory="temp"):
|
||||
# sample = False
|
||||
token_check = True
|
||||
min_tokens = 150
|
||||
max_tokens = 1250
|
||||
full_path = directory + '/' + user + '/' + name_job
|
||||
full_path = directory + "/" + user + "/" + name_job
|
||||
|
||||
if not os.path.exists(full_path):
|
||||
os.makedirs(full_path)
|
||||
|
||||
self.update_state(state='PROGRESS', meta={'current': 1})
|
||||
|
||||
self.update_state(state="PROGRESS", meta={"current": 1})
|
||||
|
||||
# source_data {"data": [url]} for url type task just urls
|
||||
|
||||
|
||||
# Use RemoteCreator to load data from URL
|
||||
remote_loader = RemoteCreator.create_loader(loader)
|
||||
raw_docs = remote_loader.load_data(source_data)
|
||||
|
||||
docs = group_split(documents=raw_docs, min_tokens=min_tokens, max_tokens=max_tokens, token_check=token_check)
|
||||
docs = group_split(
|
||||
documents=raw_docs,
|
||||
min_tokens=min_tokens,
|
||||
max_tokens=max_tokens,
|
||||
token_check=token_check,
|
||||
)
|
||||
|
||||
#docs = [Document.to_langchain_format(raw_doc) for raw_doc in raw_docs]
|
||||
# docs = [Document.to_langchain_format(raw_doc) for raw_doc in raw_docs]
|
||||
|
||||
call_openai_api(docs, full_path, self)
|
||||
self.update_state(state='PROGRESS', meta={'current': 100})
|
||||
|
||||
|
||||
self.update_state(state="PROGRESS", meta={"current": 100})
|
||||
|
||||
# Proceed with uploading and cleaning as in the original function
|
||||
file_data = {'name': name_job, 'user': user}
|
||||
file_data = {"name": name_job, "user": user}
|
||||
if settings.VECTOR_STORE == "faiss":
|
||||
files = {'file_faiss': open(full_path + '/index.faiss', 'rb'),
|
||||
'file_pkl': open(full_path + '/index.pkl', 'rb')}
|
||||
requests.post(urljoin(settings.API_URL, "/api/upload_index"), files=files, data=file_data)
|
||||
files = {
|
||||
"file_faiss": open(full_path + "/index.faiss", "rb"),
|
||||
"file_pkl": open(full_path + "/index.pkl", "rb"),
|
||||
}
|
||||
requests.post(
|
||||
urljoin(settings.API_URL, "/api/upload_index"), files=files, data=file_data
|
||||
)
|
||||
requests.get(urljoin(settings.API_URL, "/api/delete_old?path=" + full_path))
|
||||
else:
|
||||
requests.post(urljoin(settings.API_URL, "/api/upload_index"), data=file_data)
|
||||
|
||||
shutil.rmtree(full_path)
|
||||
|
||||
return {
|
||||
'urls': source_data,
|
||||
'name_job': name_job,
|
||||
'user': user,
|
||||
'limited': False
|
||||
}
|
||||
return {"urls": source_data, "name_job": name_job, "user": user, "limited": False}
|
||||
|
@ -35,10 +35,10 @@ function Dropdown({
|
||||
isOpen
|
||||
? typeof selectedValue === 'string'
|
||||
? 'rounded-t-xl'
|
||||
: 'rounded-t-2xl'
|
||||
: 'rounded-t-3xl'
|
||||
: typeof selectedValue === 'string'
|
||||
? 'rounded-xl'
|
||||
: 'rounded-full'
|
||||
: 'rounded-3xl'
|
||||
}`}
|
||||
>
|
||||
{typeof selectedValue === 'string' ? (
|
||||
|
@ -17,10 +17,18 @@ export default function Upload({
|
||||
const [docName, setDocName] = useState('');
|
||||
const [urlName, setUrlName] = useState('');
|
||||
const [url, setUrl] = useState('');
|
||||
const [redditData, setRedditData] = useState({
|
||||
client_id: '',
|
||||
client_secret: '',
|
||||
user_agent: '',
|
||||
search_queries: [''],
|
||||
number_posts: 10,
|
||||
});
|
||||
const urlOptions: { label: string; value: string }[] = [
|
||||
{ label: 'Crawler', value: 'crawler' },
|
||||
// { label: 'Sitemap', value: 'sitemap' },
|
||||
{ label: 'Link', value: 'url' },
|
||||
{ label: 'Reddit', value: 'reddit' },
|
||||
];
|
||||
const [urlType, setUrlType] = useState<{ label: string; value: string }>({
|
||||
label: 'Link',
|
||||
@ -163,7 +171,6 @@ export default function Upload({
|
||||
};
|
||||
|
||||
const uploadRemote = () => {
|
||||
console.log('here');
|
||||
const formData = new FormData();
|
||||
formData.append('name', urlName);
|
||||
formData.append('user', 'local');
|
||||
@ -171,6 +178,13 @@ export default function Upload({
|
||||
formData.append('source', urlType?.value);
|
||||
}
|
||||
formData.append('data', url);
|
||||
if (
|
||||
redditData.client_id.length > 0 &&
|
||||
redditData.client_secret.length > 0
|
||||
) {
|
||||
formData.set('name', 'other');
|
||||
formData.set('data', JSON.stringify(redditData));
|
||||
}
|
||||
const apiHost = import.meta.env.VITE_API_HOST;
|
||||
const xhr = new XMLHttpRequest();
|
||||
xhr.upload.addEventListener('progress', (event) => {
|
||||
@ -202,6 +216,19 @@ export default function Upload({
|
||||
['.docx'],
|
||||
},
|
||||
});
|
||||
const handleChange = (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const { name, value } = e.target;
|
||||
if (name === 'search_queries' && value.length > 0) {
|
||||
setRedditData({
|
||||
...redditData,
|
||||
[name]: value.split(',').map((item) => item.trim()),
|
||||
});
|
||||
} else
|
||||
setRedditData({
|
||||
...redditData,
|
||||
[name]: value,
|
||||
});
|
||||
};
|
||||
let view;
|
||||
if (progress?.type === 'UPLOAD') {
|
||||
view = <UploadProgress></UploadProgress>;
|
||||
@ -281,30 +308,102 @@ export default function Upload({
|
||||
setUrlType(value)
|
||||
}
|
||||
/>
|
||||
<input
|
||||
placeholder="Enter name"
|
||||
type="text"
|
||||
className="h-10 w-full rounded-full border-2 border-silver px-3 outline-none dark:bg-transparent dark:text-silver"
|
||||
value={urlName}
|
||||
onChange={(e) => setUrlName(e.target.value)}
|
||||
></input>
|
||||
<div className="relative bottom-12 left-2 mt-[-18.39px]">
|
||||
<span className="bg-white px-2 text-xs text-silver dark:bg-outer-space dark:text-silver">
|
||||
Name
|
||||
</span>
|
||||
</div>
|
||||
<input
|
||||
placeholder="URL Link"
|
||||
type="text"
|
||||
className="h-10 w-full rounded-full border-2 border-silver px-3 outline-none dark:bg-transparent dark:text-silver"
|
||||
value={url}
|
||||
onChange={(e) => setUrl(e.target.value)}
|
||||
></input>
|
||||
<div className="relative bottom-12 left-2 mt-[-18.39px]">
|
||||
<span className="bg-white px-2 text-xs text-silver dark:bg-outer-space dark:text-silver">
|
||||
Link
|
||||
</span>
|
||||
</div>
|
||||
{urlType.label !== 'Reddit' ? (
|
||||
<>
|
||||
<input
|
||||
placeholder="Enter name"
|
||||
type="text"
|
||||
className="h-10 w-full rounded-full border-2 border-silver px-3 outline-none dark:bg-transparent dark:text-silver"
|
||||
value={urlName}
|
||||
onChange={(e) => setUrlName(e.target.value)}
|
||||
></input>
|
||||
<div className="relative bottom-12 left-2 mt-[-18.39px]">
|
||||
<span className="bg-white px-2 text-xs text-silver dark:bg-outer-space dark:text-silver">
|
||||
Name
|
||||
</span>
|
||||
</div>
|
||||
<input
|
||||
placeholder="URL Link"
|
||||
type="text"
|
||||
className="h-10 w-full rounded-full border-2 border-silver px-3 outline-none dark:bg-transparent dark:text-silver"
|
||||
value={url}
|
||||
onChange={(e) => setUrl(e.target.value)}
|
||||
></input>
|
||||
<div className="relative bottom-12 left-2 mt-[-18.39px]">
|
||||
<span className="bg-white px-2 text-xs text-silver dark:bg-outer-space dark:text-silver">
|
||||
Link
|
||||
</span>
|
||||
</div>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<input
|
||||
placeholder="Enter client ID"
|
||||
type="text"
|
||||
className="h-10 w-full rounded-full border-2 border-silver px-3 outline-none dark:bg-transparent dark:text-silver"
|
||||
name="client_id"
|
||||
value={redditData.client_id}
|
||||
onChange={handleChange}
|
||||
></input>
|
||||
<div className="relative bottom-12 left-2 mt-[-18.39px]">
|
||||
<span className="bg-white px-2 text-xs text-silver dark:bg-outer-space dark:text-silver">
|
||||
Client ID
|
||||
</span>
|
||||
</div>
|
||||
<input
|
||||
placeholder="Enter client secret"
|
||||
type="text"
|
||||
className="h-10 w-full rounded-full border-2 border-silver px-3 outline-none dark:bg-transparent dark:text-silver"
|
||||
name="client_secret"
|
||||
value={redditData.client_secret}
|
||||
onChange={handleChange}
|
||||
></input>
|
||||
<div className="relative bottom-12 left-2 mt-[-18.39px]">
|
||||
<span className="bg-white px-2 text-xs text-silver dark:bg-outer-space dark:text-silver">
|
||||
Client secret
|
||||
</span>
|
||||
</div>
|
||||
<input
|
||||
placeholder="Enter user agent"
|
||||
type="text"
|
||||
className="h-10 w-full rounded-full border-2 border-silver px-3 outline-none dark:bg-transparent dark:text-silver"
|
||||
name="user_agent"
|
||||
value={redditData.user_agent}
|
||||
onChange={handleChange}
|
||||
></input>
|
||||
<div className="relative bottom-12 left-2 mt-[-18.39px]">
|
||||
<span className="bg-white px-2 text-xs text-silver dark:bg-outer-space dark:text-silver">
|
||||
User agent
|
||||
</span>
|
||||
</div>
|
||||
<input
|
||||
placeholder="Enter search queries"
|
||||
type="text"
|
||||
className="h-10 w-full rounded-full border-2 border-silver px-3 outline-none dark:bg-transparent dark:text-silver"
|
||||
name="search_queries"
|
||||
value={redditData.search_queries}
|
||||
onChange={handleChange}
|
||||
></input>
|
||||
<div className="relative bottom-12 left-2 mt-[-18.39px]">
|
||||
<span className="bg-white px-2 text-xs text-silver dark:bg-outer-space dark:text-silver">
|
||||
Search queries
|
||||
</span>
|
||||
</div>
|
||||
<input
|
||||
placeholder="Enter number of posts"
|
||||
type="number"
|
||||
className="h-10 w-full rounded-full border-2 border-silver px-3 outline-none dark:bg-transparent dark:text-silver"
|
||||
name="number_posts"
|
||||
value={redditData.number_posts}
|
||||
onChange={handleChange}
|
||||
></input>
|
||||
<div className="relative bottom-12 left-2 mt-[-18.39px]">
|
||||
<span className="bg-white px-2 text-xs text-silver dark:bg-outer-space dark:text-silver">
|
||||
Number of posts
|
||||
</span>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
<div className="flex flex-row-reverse">
|
||||
|
Loading…
Reference in New Issue
Block a user