Sfoglia il codice sorgente

fix: template string in template error (#162)

tags/0.2.2
John Wang 2 anni fa
parent
commit
f3219ff107
Nessun account collegato all'indirizzo email del committer
1 ha cambiato i file con 15 aggiunte e 2 eliminazioni
  1. 15
    2
      api/core/completion.py

+ 15
- 2
api/core/completion.py Vedi File

@@ -151,6 +151,11 @@ And answer according to the language of the user's question.

if chain_output:
inputs['context'] = chain_output
context_params = OutLinePromptTemplate.from_template(template=chain_output).input_variables
if context_params:
for context_param in context_params:
if context_param not in inputs:
inputs[context_param] = '{' + context_param + '}'

prompt_inputs = {k: inputs[k] for k in prompt_template.input_variables if k in inputs}
prompt_content = prompt_template.format(
@@ -210,8 +215,16 @@ And answer according to the language of the user's question.
rest_tokens = llm_constant.max_context_token_length[memory.llm.model_name] \
- memory.llm.max_tokens - curr_message_tokens
rest_tokens = max(rest_tokens, 0)
history_messages = cls.get_history_messages_from_memory(memory, rest_tokens)
human_message_prompt += "\n\n" + history_messages
histories = cls.get_history_messages_from_memory(memory, rest_tokens)

# disable template string in query
histories_params = OutLinePromptTemplate.from_template(template=histories).input_variables
if histories_params:
for histories_param in histories_params:
if histories_param not in human_inputs:
human_inputs[histories_param] = '{' + histories_param + '}'

human_message_prompt += "\n\n" + histories

human_message_prompt += query_prompt


Loading…
Annulla
Salva