Browse Source

fix: possible unsent function call in the last chunk of streaming response in OpenAI provider (#2422)

tags/0.5.5
Bowen Liang 1 year ago
parent
commit
589099a005
No account linked to committer's email address
1 changed files with 5 additions and 3 deletions
  1. 5
    3
      api/core/model_runtime/model_providers/openai/llm/llm.py

+ 5
- 3
api/core/model_runtime/model_providers/openai/llm/llm.py View File

continue continue


delta = chunk.choices[0] delta = chunk.choices[0]
has_finish_reason = delta.finish_reason is not None


if delta.finish_reason is None and (delta.delta.content is None or delta.delta.content == '') and \
if not has_finish_reason and (delta.delta.content is None or delta.delta.content == '') and \
delta.delta.function_call is None: delta.delta.function_call is None:
continue continue


if assistant_message_function_call: if assistant_message_function_call:
# start of stream function call # start of stream function call
delta_assistant_message_function_call_storage = assistant_message_function_call delta_assistant_message_function_call_storage = assistant_message_function_call
continue
if not has_finish_reason:
continue


# tool_calls = self._extract_response_tool_calls(assistant_message_tool_calls) # tool_calls = self._extract_response_tool_calls(assistant_message_tool_calls)
function_call = self._extract_response_function_call(assistant_message_function_call) function_call = self._extract_response_function_call(assistant_message_function_call)


full_assistant_content += delta.delta.content if delta.delta.content else '' full_assistant_content += delta.delta.content if delta.delta.content else ''


if delta.finish_reason is not None:
if has_finish_reason:
# calculate num tokens # calculate num tokens
prompt_tokens = self._num_tokens_from_messages(model, prompt_messages, tools) prompt_tokens = self._num_tokens_from_messages(model, prompt_messages, tools)



Loading…
Cancel
Save