Browse Source

Fix: claude max tokens. (#6484)

### What problem does this PR solve?

#6458

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
tags/v0.18.0
Kevin Hu 7 months ago
parent
commit
095fc84cf2
No account linked to committer's email address
1 changed files with 2 additions and 2 deletions
  1. 2
    2
      rag/llm/chat_model.py

+ 2
- 2
rag/llm/chat_model.py View File

@@ -1443,7 +1443,7 @@ class AnthropicChat(Base):
del gen_conf["presence_penalty"]
if "frequency_penalty" in gen_conf:
del gen_conf["frequency_penalty"]
gen_conf["max_tokens"] = 8196
gen_conf["max_tokens"] = 8192
if "haiku" in self.model_name or "opus" in self.model_name:
gen_conf["max_tokens"] = 4096

@@ -1477,7 +1477,7 @@ class AnthropicChat(Base):
del gen_conf["presence_penalty"]
if "frequency_penalty" in gen_conf:
del gen_conf["frequency_penalty"]
gen_conf["max_tokens"] = 8196
gen_conf["max_tokens"] = 8192
if "haiku" in self.model_name or "opus" in self.model_name:
gen_conf["max_tokens"] = 4096


Loading…
Cancel
Save