Split entries by max tokens while converting Beancount entries To JSONL

This commit is contained in:
Debanjum Singh Solanky 2022-12-26 15:14:32 -03:00
parent f209e30a3b
commit 17fa123b4e

View file

@ -35,6 +35,12 @@ class BeancountToJsonl(TextToJsonl):
end = time.time()
logger.debug(f"Parse transactions from Beancount files into dictionaries: {end - start} seconds")
# Split entries by max tokens supported by model
start = time.time()
current_entries = self.split_entries_by_max_tokens(current_entries, max_tokens=256)
end = time.time()
logger.debug(f"Split entries by max token size supported by model: {end - start} seconds")
# Identify, mark and merge any new entries with previous entries
start = time.time()
if not previous_entries: