Browse Source

fix: bedrock get_num_tokens prompt_messages parameter name err (#4932)

tags/0.6.10
takatost 1 year ago
parent
commit
f44d1e62d2
No account linked to committer's email address
1 changed files with 5 additions and 6 deletions
  1. 5
    6
      api/core/model_runtime/model_providers/bedrock/llm/llm.py

+ 5
- 6
api/core/model_runtime/model_providers/bedrock/llm/llm.py View File

@@ -358,26 +358,25 @@ class BedrockLargeLanguageModel(LargeLanguageModel):

return message_dict

def get_num_tokens(self, model: str, credentials: dict, messages: list[PromptMessage] | str,
def get_num_tokens(self, model: str, credentials: dict, prompt_messages: list[PromptMessage] | str,
tools: Optional[list[PromptMessageTool]] = None) -> int:
"""
Get number of tokens for given prompt messages

:param model: model name
:param credentials: model credentials
:param messages: prompt messages or message string
:param prompt_messages: prompt messages or message string
:param tools: tools for tool calling
:return:md = genai.GenerativeModel(model)
"""
prefix = model.split('.')[0]
model_name = model.split('.')[1]
if isinstance(messages, str):
prompt = messages
if isinstance(prompt_messages, str):
prompt = prompt_messages
else:
prompt = self._convert_messages_to_prompt(messages, prefix, model_name)
prompt = self._convert_messages_to_prompt(prompt_messages, prefix, model_name)

return self._get_num_tokens_by_gpt2(prompt)

def validate_credentials(self, model: str, credentials: dict) -> None:
"""

Loading…
Cancel
Save