Skip to content

Commit

Permalink
Log the embeddings cache location to debug
Browse files Browse the repository at this point in the history
  • Loading branch information
yonromai committed Oct 12, 2023
1 parent d1b8c0e commit 8b2121d
Showing 1 changed file with 4 additions and 3 deletions.
7 changes: 4 additions & 3 deletions nxontology_ml/text_embeddings/embeddings_model.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import logging
import pickle
import re
from collections import Counter
Expand Down Expand Up @@ -117,11 +118,11 @@ def from_pretrained(
Note: pretrained_model_name should be an encoder only model (e.g. BERT)
"""
# FIXME: should we add truncation of input??
cache_filename = (cache_path or _cache_path(pretrained_model_name)).as_posix()
logging.debug(f"Caching embeddings into: {cache_filename}")
return cls(
lazy_model=lazy_model or _LazyAutoModel(pretrained_model_name),
pooler_attr=_model_poolers[pretrained_model_name],
cache=LazyLSM(
filename=(cache_path or _cache_path(pretrained_model_name)).as_posix()
),
cache=LazyLSM(filename=cache_filename),
counter=counter or Counter(),
)

0 comments on commit 8b2121d

Please sign in to comment.