Browse Source

fix ollama max token issue (#1489)

### What problem does this PR solve?


### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
tags/v0.9.0
Kevin Hu 1 year ago
parent
commit
a5a617b7a3
No account linked to committer's email address
3 changed files with 3 additions and 3 deletions
  1. 1
    1
      api/db/services/dialog_service.py
  2. 1
    1
      api/ragflow_server.py
  3. 1
    1
      graph/component/base.py

+ 1
- 1
api/db/services/dialog_service.py View File

@@ -80,7 +80,7 @@ def chat(dialog, messages, stream=True, **kwargs):
llm = TenantLLMService.query(tenant_id=dialog.tenant_id, llm_name=dialog.llm_id)
if not llm:
raise LookupError("LLM(%s) not found" % dialog.llm_id)
max_tokens = 1024
max_tokens = 8192
else:
max_tokens = llm[0].max_tokens
kbs = KnowledgebaseService.get_by_ids(dialog.kb_ids)

+ 1
- 1
api/ragflow_server.py View File

@@ -94,7 +94,7 @@ if __name__ == '__main__':
werkzeug_logger = logging.getLogger("werkzeug")
for h in access_logger.handlers:
werkzeug_logger.addHandler(h)
run_simple(hostname=HOST, port=HTTP_PORT, application=app, threaded=True, use_reloader=RuntimeConfig.DEBUG, use_debugger=RuntimeConfig.DEBUG)
run_simple(hostname=HOST, port=HTTP_PORT, application=app, processes=5, use_reloader=RuntimeConfig.DEBUG, use_debugger=RuntimeConfig.DEBUG)
except Exception:
traceback.print_exc()
os.kill(os.getpid(), signal.SIGKILL)

+ 1
- 1
graph/component/base.py View File

@@ -35,7 +35,7 @@ _IS_RAW_CONF = "_is_raw_conf"
class ComponentParamBase(ABC):
def __init__(self):
self.output_var_name = "output"
self.message_history_window_size = 4
self.message_history_window_size = 22

def set_name(self, name: str):
self._name = name

Loading…
Cancel
Save