Kaynağa Gözat

fix jina module not find bug (#1779)

### What problem does this PR solve?

fix jina module not find bug

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)

Co-authored-by: Zhedong Cen <cenzhedong2@126.com>
tags/v0.9.0
黄腾 1 yıl önce
ebeveyn
işleme
4ba1ba973a
No account linked to committer's email address
1 değiştirilmiş dosya ile 2 ekleme ve 1 silme
  1. 2
    1
      rag/llm/chat_model.py

+ 2
- 1
rag/llm/chat_model.py Dosyayı Görüntüle

@@ -28,7 +28,6 @@ import os
import json
import requests
import asyncio
from rag.svr.jina_server import Prompt,Generation

class Base(ABC):
def __init__(self, key, model_name, base_url):
@@ -413,6 +412,7 @@ class LocalLLM(Base):
self.client = Client(port=12345, protocol="grpc", asyncio=True)

def _prepare_prompt(self, system, history, gen_conf):
from rag.svr.jina_server import Prompt,Generation
if system:
history.insert(0, {"role": "system", "content": system})
if "max_tokens" in gen_conf:
@@ -420,6 +420,7 @@ class LocalLLM(Base):
return Prompt(message=history, gen_conf=gen_conf)

def _stream_response(self, endpoint, prompt):
from rag.svr.jina_server import Prompt,Generation
answer = ""
try:
res = self.client.stream_doc(

Loading…
İptal
Kaydet