Browse Source

fix: possible unsent function call in the last chunk of streaming response in OpenAI provider (#2422)

tags/0.5.5
Bowen Liang 1 year ago
parent
commit
589099a005
No account linked to committer's email address
1 changed files with 5 additions and 3 deletions
  1. 5
    3
      api/core/model_runtime/model_providers/openai/llm/llm.py

+ 5
- 3
api/core/model_runtime/model_providers/openai/llm/llm.py View File

@@ -497,8 +497,9 @@ class OpenAILargeLanguageModel(_CommonOpenAI, LargeLanguageModel):
continue

delta = chunk.choices[0]
has_finish_reason = delta.finish_reason is not None

if delta.finish_reason is None and (delta.delta.content is None or delta.delta.content == '') and \
if not has_finish_reason and (delta.delta.content is None or delta.delta.content == '') and \
delta.delta.function_call is None:
continue

@@ -520,7 +521,8 @@ class OpenAILargeLanguageModel(_CommonOpenAI, LargeLanguageModel):
if assistant_message_function_call:
# start of stream function call
delta_assistant_message_function_call_storage = assistant_message_function_call
continue
if not has_finish_reason:
continue

# tool_calls = self._extract_response_tool_calls(assistant_message_tool_calls)
function_call = self._extract_response_function_call(assistant_message_function_call)
@@ -534,7 +536,7 @@ class OpenAILargeLanguageModel(_CommonOpenAI, LargeLanguageModel):

full_assistant_content += delta.delta.content if delta.delta.content else ''

if delta.finish_reason is not None:
if has_finish_reason:
# calculate num tokens
prompt_tokens = self._num_tokens_from_messages(model, prompt_messages, tools)


Loading…
Cancel
Save