Ver código fonte

feat:add deepseek r1 think display for ollama provider (#13272)

tags/0.15.3
呆萌闷油瓶 9 meses atrás
pai
commit
0d13aee15c
Nenhuma conta vinculada ao e-mail do autor do commit

+ 9
- 0
api/core/model_runtime/model_providers/ollama/llm/llm.py Ver arquivo

""" """
full_text = "" full_text = ""
chunk_index = 0 chunk_index = 0
is_reasoning_started = False


def create_final_llm_result_chunk( def create_final_llm_result_chunk(
index: int, message: AssistantPromptMessage, finish_reason: str index: int, message: AssistantPromptMessage, finish_reason: str


# transform assistant message to prompt message # transform assistant message to prompt message
text = chunk_json["response"] text = chunk_json["response"]
if "<think>" in text:
is_reasoning_started = True
text = text.replace("<think>", "> 💭 ")
elif "</think>" in text:
is_reasoning_started = False
text = text.replace("</think>", "") + "\n\n"
elif is_reasoning_started:
text = text.replace("\n", "\n> ")


assistant_prompt_message = AssistantPromptMessage(content=text) assistant_prompt_message = AssistantPromptMessage(content=text)



Carregando…
Cancelar
Salvar