Nelze vybrat více než 25 témat Téma musí začínat písmenem nebo číslem, může obsahovat pomlčky („-“) a může být dlouhé až 35 znaků.

conversation_app.py 7.9KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203
  1. #
  2. # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. from flask import request
  17. from flask_login import login_required
  18. from api.db.services.dialog_service import DialogService, ConversationService
  19. from api.db import LLMType
  20. from api.db.services.llm_service import LLMService, TenantLLMService
  21. from api.utils.api_utils import server_error_response, get_data_error_result, validate_request
  22. from api.utils import get_uuid
  23. from api.utils.api_utils import get_json_result
  24. from rag.llm import ChatModel
  25. from rag.nlp import retrievaler
  26. from rag.utils import num_tokens_from_string, encoder
  27. @manager.route('/set', methods=['POST'])
  28. @login_required
  29. @validate_request("dialog_id")
  30. def set():
  31. req = request.json
  32. conv_id = req.get("conversation_id")
  33. if conv_id:
  34. del req["conversation_id"]
  35. try:
  36. if not ConversationService.update_by_id(conv_id, req):
  37. return get_data_error_result(retmsg="Conversation not found!")
  38. e, conv = ConversationService.get_by_id(conv_id)
  39. if not e:
  40. return get_data_error_result(
  41. retmsg="Fail to update a conversation!")
  42. conv = conv.to_dict()
  43. return get_json_result(data=conv)
  44. except Exception as e:
  45. return server_error_response(e)
  46. try:
  47. e, dia = DialogService.get_by_id(req["dialog_id"])
  48. if not e:
  49. return get_data_error_result(retmsg="Dialog not found")
  50. conv = {
  51. "id": get_uuid(),
  52. "dialog_id": req["dialog_id"],
  53. "name": "New conversation",
  54. "message": [{"role": "assistant", "content": dia.prompt_config["prologue"]}]
  55. }
  56. ConversationService.save(**conv)
  57. e, conv = ConversationService.get_by_id(conv["id"])
  58. if not e:
  59. return get_data_error_result(retmsg="Fail to new a conversation!")
  60. conv = conv.to_dict()
  61. return get_json_result(data=conv)
  62. except Exception as e:
  63. return server_error_response(e)
  64. @manager.route('/get', methods=['GET'])
  65. @login_required
  66. def get():
  67. conv_id = request.args["conversation_id"]
  68. try:
  69. e, conv = ConversationService.get_by_id(conv_id)
  70. if not e:
  71. return get_data_error_result(retmsg="Conversation not found!")
  72. conv = conv.to_dict()
  73. return get_json_result(data=conv)
  74. except Exception as e:
  75. return server_error_response(e)
  76. @manager.route('/rm', methods=['POST'])
  77. @login_required
  78. def rm():
  79. conv_ids = request.json["conversation_ids"]
  80. try:
  81. for cid in conv_ids:
  82. ConversationService.delete_by_id(cid)
  83. return get_json_result(data=True)
  84. except Exception as e:
  85. return server_error_response(e)
  86. @manager.route('/list', methods=['GET'])
  87. @login_required
  88. def list():
  89. dialog_id = request.args["dialog_id"]
  90. try:
  91. convs = ConversationService.query(dialog_id=dialog_id)
  92. convs = [d.to_dict() for d in convs]
  93. return get_json_result(data=convs)
  94. except Exception as e:
  95. return server_error_response(e)
  96. def message_fit_in(msg, max_length=4000):
  97. def count():
  98. nonlocal msg
  99. tks_cnts = []
  100. for m in msg:tks_cnts.append({"role": m["role"], "count": num_tokens_from_string(m["content"])})
  101. total = 0
  102. for m in tks_cnts: total += m["count"]
  103. return total
  104. c = count()
  105. if c < max_length: return c, msg
  106. msg = [m for m in msg if m.role in ["system", "user"]]
  107. c = count()
  108. if c < max_length:return c, msg
  109. msg_ = [m for m in msg[:-1] if m.role == "system"]
  110. msg_.append(msg[-1])
  111. msg = msg_
  112. c = count()
  113. if c < max_length:return c, msg
  114. ll = num_tokens_from_string(msg_[0].content)
  115. l = num_tokens_from_string(msg_[-1].content)
  116. if ll/(ll + l) > 0.8:
  117. m = msg_[0].content
  118. m = encoder.decode(encoder.encode(m)[:max_length-l])
  119. msg[0].content = m
  120. return max_length, msg
  121. m = msg_[1].content
  122. m = encoder.decode(encoder.encode(m)[:max_length-l])
  123. msg[1].content = m
  124. return max_length, msg
  125. @manager.route('/completion', methods=['POST'])
  126. @login_required
  127. @validate_request("dialog_id", "messages")
  128. def completion():
  129. req = request.json
  130. msg = []
  131. for m in req["messages"]:
  132. if m["role"] == "system":continue
  133. if m["role"] == "assistant" and not msg:continue
  134. msg.append({"role": m["role"], "content": m["content"]})
  135. try:
  136. e, dia = DialogService.get_by_id(req["dialog_id"])
  137. if not e:
  138. return get_data_error_result(retmsg="Dialog not found!")
  139. del req["dialog_id"]
  140. del req["messages"]
  141. return get_json_result(data=chat(dia, msg, **req))
  142. except Exception as e:
  143. return server_error_response(e)
  144. def chat(dialog, messages, **kwargs):
  145. assert messages[-1]["role"] == "user", "The last content of this conversation is not from user."
  146. llm = LLMService.query(llm_name=dialog.llm_id)
  147. if not llm:
  148. raise LookupError("LLM(%s) not found"%dialog.llm_id)
  149. llm = llm[0]
  150. prompt_config = dialog.prompt_config
  151. for p in prompt_config["parameters"]:
  152. if p["key"] == "knowledge":continue
  153. if p["key"] not in kwargs and not p["optional"]:raise KeyError("Miss parameter: " + p["key"])
  154. if p["key"] not in kwargs:
  155. prompt_config["system"] = prompt_config["system"].replace("{%s}"%p["key"], " ")
  156. model_config = TenantLLMService.get_api_key(dialog.tenant_id, dialog.llm_id)
  157. if not model_config: raise LookupError("LLM({}) API key not found".format(dialog.llm_id))
  158. question = messages[-1]["content"]
  159. embd_mdl = TenantLLMService.model_instance(
  160. dialog.tenant_id, LLMType.EMBEDDING.value)
  161. kbinfos = retrievaler.retrieval(question, embd_mdl, dialog.tenant_id, dialog.kb_ids, 1, dialog.top_n, dialog.similarity_threshold,
  162. dialog.vector_similarity_weight, top=1024, aggs=False)
  163. knowledges = [ck["content_ltks"] for ck in kbinfos["chunks"]]
  164. if not knowledges and prompt_config["empty_response"]:
  165. return {"answer": prompt_config["empty_response"], "retrieval": kbinfos}
  166. kwargs["knowledge"] = "\n".join(knowledges)
  167. gen_conf = dialog.llm_setting[dialog.llm_setting_type]
  168. msg = [{"role": m["role"], "content": m["content"]} for m in messages if m["role"] != "system"]
  169. used_token_count, msg = message_fit_in(msg, int(llm.max_tokens * 0.97))
  170. if "max_tokens" in gen_conf:
  171. gen_conf["max_tokens"] = min(gen_conf["max_tokens"], llm.max_tokens - used_token_count)
  172. mdl = ChatModel[model_config.llm_factory](model_config.api_key, dialog.llm_id)
  173. answer = mdl.chat(prompt_config["system"].format(**kwargs), msg, gen_conf)
  174. answer = retrievaler.insert_citations(answer,
  175. [ck["content_ltks"] for ck in kbinfos["chunks"]],
  176. [ck["vector"] for ck in kbinfos["chunks"]],
  177. embd_mdl,
  178. tkweight=1-dialog.vector_similarity_weight,
  179. vtweight=dialog.vector_similarity_weight)
  180. for c in kbinfos["chunks"]:
  181. if c.get("vector"):del c["vector"]
  182. return {"answer": answer, "retrieval": kbinfos}