Du kan inte välja fler än 25 ämnen Ämnen måste starta med en bokstav eller siffra, kan innehålla bindestreck ('-') och vara max 35 tecken långa.

conversation_app.py 8.0KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207
  1. #
  2. # Copyright 2019 The InfiniFlow Authors. All Rights Reserved.
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. import re
  17. import tiktoken
  18. from flask import request
  19. from flask_login import login_required, current_user
  20. from api.db.services.dialog_service import DialogService, ConversationService
  21. from api.db import StatusEnum, LLMType
  22. from api.db.services.kb_service import KnowledgebaseService
  23. from api.db.services.llm_service import LLMService, TenantLLMService
  24. from api.db.services.user_service import TenantService
  25. from api.utils.api_utils import server_error_response, get_data_error_result, validate_request
  26. from api.utils import get_uuid
  27. from api.utils.api_utils import get_json_result
  28. from rag.llm import ChatModel
  29. from rag.nlp import retrievaler
  30. from rag.nlp.query import EsQueryer
  31. from rag.utils import num_tokens_from_string, encoder
  32. @manager.route('/set', methods=['POST'])
  33. @login_required
  34. @validate_request("dialog_id")
  35. def set():
  36. req = request.json
  37. conv_id = req.get("conversation_id")
  38. if conv_id:
  39. del req["conversation_id"]
  40. try:
  41. if not ConversationService.update_by_id(conv_id, req):
  42. return get_data_error_result(retmsg="Conversation not found!")
  43. e, conv = ConversationService.get_by_id(conv_id)
  44. if not e:
  45. return get_data_error_result(
  46. retmsg="Fail to update a conversation!")
  47. conv = conv.to_dict()
  48. return get_json_result(data=conv)
  49. except Exception as e:
  50. return server_error_response(e)
  51. try:
  52. e, dia = DialogService.get_by_id(req["dialog_id"])
  53. if not e:
  54. return get_data_error_result(retmsg="Dialog not found")
  55. conv = {
  56. "id": get_uuid(),
  57. "dialog_id": req["dialog_id"],
  58. "name": "New conversation",
  59. "message": [{"role": "assistant", "content": dia.prompt_config["prologue"]}]
  60. }
  61. ConversationService.save(**conv)
  62. e, conv = ConversationService.get_by_id(conv["id"])
  63. if not e:
  64. return get_data_error_result(retmsg="Fail to new a conversation!")
  65. conv = conv.to_dict()
  66. return get_json_result(data=conv)
  67. except Exception as e:
  68. return server_error_response(e)
  69. @manager.route('/get', methods=['GET'])
  70. @login_required
  71. def get():
  72. conv_id = request.args["conversation_id"]
  73. try:
  74. e, conv = ConversationService.get_by_id(conv_id)
  75. if not e:
  76. return get_data_error_result(retmsg="Conversation not found!")
  77. conv = conv.to_dict()
  78. return get_json_result(data=conv)
  79. except Exception as e:
  80. return server_error_response(e)
  81. @manager.route('/rm', methods=['POST'])
  82. @login_required
  83. def rm():
  84. conv_ids = request.json["conversation_ids"]
  85. try:
  86. for cid in conv_ids:
  87. ConversationService.delete_by_id(cid)
  88. return get_json_result(data=True)
  89. except Exception as e:
  90. return server_error_response(e)
  91. @manager.route('/list', methods=['GET'])
  92. @login_required
  93. def list():
  94. dialog_id = request.args["dialog_id"]
  95. try:
  96. convs = ConversationService.query(dialog_id=dialog_id)
  97. convs = [d.to_dict() for d in convs]
  98. return get_json_result(data=convs)
  99. except Exception as e:
  100. return server_error_response(e)
  101. def message_fit_in(msg, max_length=4000):
  102. def count():
  103. nonlocal msg
  104. tks_cnts = []
  105. for m in msg:tks_cnts.append({"role": m["role"], "count": num_tokens_from_string(m["content"])})
  106. total = 0
  107. for m in tks_cnts: total += m["count"]
  108. return total
  109. c = count()
  110. if c < max_length: return c, msg
  111. msg = [m for m in msg if m.role in ["system", "user"]]
  112. c = count()
  113. if c < max_length:return c, msg
  114. msg_ = [m for m in msg[:-1] if m.role == "system"]
  115. msg_.append(msg[-1])
  116. msg = msg_
  117. c = count()
  118. if c < max_length:return c, msg
  119. ll = num_tokens_from_string(msg_[0].content)
  120. l = num_tokens_from_string(msg_[-1].content)
  121. if ll/(ll + l) > 0.8:
  122. m = msg_[0].content
  123. m = encoder.decode(encoder.encode(m)[:max_length-l])
  124. msg[0].content = m
  125. return max_length, msg
  126. m = msg_[1].content
  127. m = encoder.decode(encoder.encode(m)[:max_length-l])
  128. msg[1].content = m
  129. return max_length, msg
  130. def chat(dialog, messages, **kwargs):
  131. assert messages[-1]["role"] == "user", "The last content of this conversation is not from user."
  132. llm = LLMService.query(llm_name=dialog.llm_id)
  133. if not llm:
  134. raise LookupError("LLM(%s) not found"%dialog.llm_id)
  135. llm = llm[0]
  136. prompt_config = dialog.prompt_config
  137. for p in prompt_config["parameters"]:
  138. if p["key"] == "knowledge":continue
  139. if p["key"] not in kwargs and not p["optional"]:raise KeyError("Miss parameter: " + p["key"])
  140. if p["key"] not in kwargs:
  141. prompt_config["system"] = prompt_config["system"].replace("{%s}"%p["key"], " ")
  142. model_config = TenantLLMService.get_api_key(dialog.tenant_id, LLMType.CHAT.value, dialog.llm_id)
  143. if not model_config: raise LookupError("LLM(%s) API key not found"%dialog.llm_id)
  144. question = messages[-1]["content"]
  145. embd_mdl = TenantLLMService.model_instance(
  146. dialog.tenant_id, LLMType.EMBEDDING.value)
  147. kbinfos = retrievaler.retrieval(question, embd_mdl, dialog.tenant_id, dialog.kb_ids, 1, dialog.top_n, dialog.similarity_threshold,
  148. dialog.vector_similarity_weight, top=1024, aggs=False)
  149. knowledges = [ck["content_ltks"] for ck in kbinfos["chunks"]]
  150. if not knowledges and prompt_config["empty_response"]:
  151. return {"answer": prompt_config["empty_response"], "retrieval": kbinfos}
  152. kwargs["knowledge"] = "\n".join(knowledges)
  153. gen_conf = dialog.llm_setting[dialog.llm_setting_type]
  154. msg = [{"role": m["role"], "content": m["content"]} for m in messages if m["role"] != "system"]
  155. used_token_count = message_fit_in(msg, int(llm.max_tokens * 0.97))
  156. if "max_tokens" in gen_conf:
  157. gen_conf["max_tokens"] = min(gen_conf["max_tokens"], llm.max_tokens - used_token_count)
  158. mdl = ChatModel[model_config.llm_factory](model_config["api_key"], dialog.llm_id)
  159. answer = mdl.chat(prompt_config["system"].format(**kwargs), msg, gen_conf)
  160. answer = retrievaler.insert_citations(answer,
  161. [ck["content_ltks"] for ck in kbinfos["chunks"]],
  162. [ck["vector"] for ck in kbinfos["chunks"]],
  163. embd_mdl,
  164. tkweight=1-dialog.vector_similarity_weight,
  165. vtweight=dialog.vector_similarity_weight)
  166. return {"answer": answer, "retrieval": kbinfos}
  167. @manager.route('/completion', methods=['POST'])
  168. @login_required
  169. @validate_request("dialog_id", "messages")
  170. def completion():
  171. req = request.json
  172. msg = []
  173. for m in req["messages"]:
  174. if m["role"] == "system":continue
  175. if m["role"] == "assistant" and not msg:continue
  176. msg.append({"role": m["role"], "content": m["content"]})
  177. try:
  178. e, dia = DialogService.get_by_id(req["dialog_id"])
  179. if not e:
  180. return get_data_error_result(retmsg="Dialog not found!")
  181. del req["dialog_id"]
  182. del req["messages"]
  183. return get_json_result(data=chat(dia, msg, **req))
  184. except Exception as e:
  185. return server_error_response(e)