Browse Source

Fix: Add a INFO-level log when fallback to gpt2tokenizer (#12508)

tags/0.15.1
Gen Sato 9 months ago
parent
commit
dbe7a7c4fd
No account linked to committer's email address

+ 4
- 0
api/core/model_runtime/model_providers/__base/tokenizers/gpt2_tokenzier.py View File

@@ -1,6 +1,9 @@
import logging
from threading import Lock
from typing import Any

logger = logging.getLogger(__name__)

_tokenizer: Any = None
_lock = Lock()

@@ -43,5 +46,6 @@ class GPT2Tokenizer:
base_path = abspath(__file__)
gpt2_tokenizer_path = join(dirname(base_path), "gpt2")
_tokenizer = TransformerGPT2Tokenizer.from_pretrained(gpt2_tokenizer_path)
logger.info("Fallback to Transformers' GPT-2 tokenizer from tiktoken")

return _tokenizer

Loading…
Cancel
Save