mirror of
https://github.com/Mintplex-Labs/anything-llm.git
synced 2025-05-02 17:07:13 +00:00
Split large PDFS into subfolder in documents (#176)
append time value to folder name to prevent duplicate uploads
This commit is contained in:
parent
6e8d81c01e
commit
b42493c6de
2 changed files with 5 additions and 4 deletions
|
@ -1,4 +1,4 @@
|
|||
import os
|
||||
import os, time
|
||||
from langchain.document_loaders import PyPDFLoader
|
||||
from slugify import slugify
|
||||
from ..utils import guid, file_creation_time, write_to_server_documents, move_source
|
||||
|
@ -11,6 +11,7 @@ def as_pdf(**kwargs):
|
|||
ext = kwargs.get('ext', '.txt')
|
||||
remove = kwargs.get('remove_on_complete', False)
|
||||
fullpath = f"{parent_dir}/{filename}{ext}"
|
||||
destination = f"../server/storage/documents/{slugify(filename)}-{int(time.time())}"
|
||||
|
||||
loader = PyPDFLoader(fullpath)
|
||||
pages = loader.load_and_split()
|
||||
|
@ -31,7 +32,7 @@ def as_pdf(**kwargs):
|
|||
'pageContent': content,
|
||||
'token_count_estimate': len(tokenize(content))
|
||||
}
|
||||
write_to_server_documents(data, f"{slugify(filename)}-pg{pg_num}-{data.get('id')}")
|
||||
write_to_server_documents(data, f"{slugify(filename)}-pg{pg_num}-{data.get('id')}", destination)
|
||||
|
||||
move_source(parent_dir, f"{filename}{ext}", remove=remove)
|
||||
print(f"[SUCCESS]: {filename}{ext} converted & ready for embedding.\n")
|
|
@ -28,8 +28,8 @@ def move_source(working_dir='hotdir', new_destination_filename='', failed=False,
|
|||
os.replace(f"{working_dir}/{new_destination_filename}", f"{destination}/{new_destination_filename}")
|
||||
return
|
||||
|
||||
def write_to_server_documents(data, filename):
|
||||
destination = f"../server/storage/documents/custom-documents"
|
||||
def write_to_server_documents(data, filename, override_destination = None):
|
||||
destination = f"../server/storage/documents/custom-documents" if override_destination == None else override_destination
|
||||
if os.path.exists(destination) == False: os.makedirs(destination)
|
||||
with open(f"{destination}/{filename}.json", 'w', encoding='utf-8') as file:
|
||||
json.dump(data, file, ensure_ascii=True, indent=4)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue