Ver código fonte

fix: generate summary error when tokens=4097 (#488)

tags/0.3.7
John Wang 2 anos atrás
pai
commit
b9b0866a46
Nenhuma conta vinculada ao e-mail do autor do commit

+ 4
- 1
api/core/generator/llm_generator.py Ver arquivo

prompt = CONVERSATION_SUMMARY_PROMPT prompt = CONVERSATION_SUMMARY_PROMPT
prompt_with_empty_context = prompt.format(context='') prompt_with_empty_context = prompt.format(context='')
prompt_tokens = TokenCalculator.get_num_tokens(model, prompt_with_empty_context) prompt_tokens = TokenCalculator.get_num_tokens(model, prompt_with_empty_context)
rest_tokens = llm_constant.max_context_token_length[model] - prompt_tokens - max_tokens
rest_tokens = llm_constant.max_context_token_length[model] - prompt_tokens - max_tokens - 1


context = '' context = ''
for message in messages: for message in messages:
if rest_tokens - TokenCalculator.get_num_tokens(model, context + message_qa_text) > 0: if rest_tokens - TokenCalculator.get_num_tokens(model, context + message_qa_text) > 0:
context += message_qa_text context += message_qa_text


if not context:
return '[message too long, no summary]'

prompt = prompt.format(context=context) prompt = prompt.format(context=context)


llm: StreamableOpenAI = LLMBuilder.to_llm( llm: StreamableOpenAI = LLMBuilder.to_llm(

+ 1
- 1
api/tasks/generate_conversation_summary_task.py Ver arquivo

try: try:
# get conversation messages count # get conversation messages count
history_message_count = conversation.message_count history_message_count = conversation.message_count
if history_message_count >= 5:
if history_message_count >= 5 and not conversation.summary:
app_model = conversation.app app_model = conversation.app
if not app_model: if not app_model:
return return

Carregando…
Cancelar
Salvar