Browse Source

set ollama keep_alive (#985)

### What problem does this PR solve?

#980 

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
tags/v0.7.0
KevinHuSh 1 year ago
parent
commit
f077b57f8b
No account linked to committer's email address
2 changed files with 6 additions and 4 deletions
  1. 2
    2
      api/db/init_data.py
  2. 4
    2
      rag/llm/chat_model.py

+ 2
- 2
api/db/init_data.py View File

@@ -532,8 +532,8 @@ def init_llm_factory():
{
"fid": factory_infos[12]["name"],
"llm_name": "BAAI/bge-reranker-v2-m3",
"tags": "LLM,CHAT,",
"max_tokens": 16385,
"tags": "RE-RANK,2k",
"max_tokens": 2048,
"model_type": LLMType.RERANK.value
},
]

+ 4
- 2
rag/llm/chat_model.py View File

@@ -303,7 +303,8 @@ class OllamaChat(Base):
response = self.client.chat(
model=self.model_name,
messages=history,
options=options
options=options,
keep_alive=-1
)
ans = response["message"]["content"].strip()
return ans, response["eval_count"] + response.get("prompt_eval_count", 0)
@@ -325,7 +326,8 @@ class OllamaChat(Base):
model=self.model_name,
messages=history,
stream=True,
options=options
options=options,
keep_alive=-1
)
for resp in response:
if resp["done"]:

Loading…
Cancel
Save