diff --git a/src/configure.py b/src/configure.py index 81952280..45e1661e 100644 --- a/src/configure.py +++ b/src/configure.py @@ -34,7 +34,9 @@ def configure_server(args, required=False): state.config = args.config # Initialize the search model from Config + state.search_index_lock.acquire() state.model = configure_search(state.model, state.config, args.regenerate) + state.search_index_lock.release() # Initialize Processor from Config state.processor_config = configure_processor(args.config.processor) @@ -42,7 +44,9 @@ def configure_server(args, required=False): @schedule.repeat(schedule.every(1).hour) def update_search_index(): + state.search_index_lock.acquire() state.model = configure_search(state.model, state.config, regenerate=False) + state.search_index_lock.release() logger.info("Search Index updated via Scheduler") diff --git a/src/routers/api.py b/src/routers/api.py index 313e48c9..f92a6f1e 100644 --- a/src/routers/api.py +++ b/src/routers/api.py @@ -125,7 +125,9 @@ def search(q: str, n: Optional[int] = 5, t: Optional[SearchType] = None, r: Opti @api.get('/update') def update(t: Optional[SearchType] = None, force: Optional[bool] = False): + state.search_index_lock.acquire() state.model = configure_search(state.model, state.config, regenerate=force, t=t) + state.search_index_lock.release() logger.info("Search Index updated via API call") return {'status': 'ok', 'message': 'index updated'} diff --git a/src/utils/state.py b/src/utils/state.py index 283d2b5a..0e323b89 100644 --- a/src/utils/state.py +++ b/src/utils/state.py @@ -1,4 +1,5 @@ # Standard Packages +import threading from packaging import version # External Packages @@ -20,6 +21,7 @@ host: str = None port: int = None cli_args: list[str] = None query_cache = LRU() +search_index_lock = threading.Lock() if torch.cuda.is_available(): # Use CUDA GPU