Update CLIP model load path

This commit is contained in:
Debanjum Singh Solanky 2021-10-02 16:50:06 -07:00
parent 31e21a6e4e
commit f59e321419

View file

@ -18,7 +18,7 @@ from src.utils.config import ImageSearchModel, ImageSearchConfig
def initialize_model():
# Initialize Model
torch.set_num_threads(4)
encoder = SentenceTransformer('clip-ViT-B-32') #Load the CLIP model
encoder = SentenceTransformer('sentence-transformers/clip-ViT-B-32') #Load the CLIP model
return encoder