您最多选择25个主题 主题必须以字母或数字开头,可以包含连字符 (-),并且长度不得超过35个字符

llm_app.py 8.7KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230
  1. #
  2. # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. from flask import request
  17. from flask_login import login_required, current_user
  18. from api.db.services.llm_service import LLMFactoriesService, TenantLLMService, LLMService
  19. from api.utils.api_utils import server_error_response, get_data_error_result, validate_request
  20. from api.db import StatusEnum, LLMType
  21. from api.db.db_models import TenantLLM
  22. from api.utils.api_utils import get_json_result
  23. from rag.llm import EmbeddingModel, ChatModel, RerankModel
  24. @manager.route('/factories', methods=['GET'])
  25. @login_required
  26. def factories():
  27. try:
  28. fac = LLMFactoriesService.get_all()
  29. return get_json_result(data=[f.to_dict() for f in fac if f.name not in ["Youdao", "FastEmbed", "BAAI"]])
  30. except Exception as e:
  31. return server_error_response(e)
  32. @manager.route('/set_api_key', methods=['POST'])
  33. @login_required
  34. @validate_request("llm_factory", "api_key")
  35. def set_api_key():
  36. req = request.json
  37. # test if api key works
  38. chat_passed = False
  39. factory = req["llm_factory"]
  40. msg = ""
  41. for llm in LLMService.query(fid=factory):
  42. if llm.model_type == LLMType.EMBEDDING.value:
  43. mdl = EmbeddingModel[factory](
  44. req["api_key"], llm.llm_name, base_url=req.get("base_url"))
  45. try:
  46. arr, tc = mdl.encode(["Test if the api key is available"])
  47. if len(arr[0]) == 0 or tc == 0:
  48. raise Exception("Fail")
  49. except Exception as e:
  50. msg += f"\nFail to access embedding model({llm.llm_name}) using this api key." + str(e)
  51. elif not chat_passed and llm.model_type == LLMType.CHAT.value:
  52. mdl = ChatModel[factory](
  53. req["api_key"], llm.llm_name, base_url=req.get("base_url"))
  54. try:
  55. m, tc = mdl.chat(None, [{"role": "user", "content": "Hello! How are you doing!"}], {
  56. "temperature": 0.9})
  57. if not tc:
  58. raise Exception(m)
  59. chat_passed = True
  60. except Exception as e:
  61. msg += f"\nFail to access model({llm.llm_name}) using this api key." + str(
  62. e)
  63. elif llm.model_type == LLMType.RERANK:
  64. mdl = RerankModel[factory](
  65. req["api_key"], llm.llm_name, base_url=req.get("base_url"))
  66. try:
  67. m, tc = mdl.similarity("What's the weather?", ["Is it sunny today?"])
  68. if len(arr[0]) == 0 or tc == 0:
  69. raise Exception("Fail")
  70. except Exception as e:
  71. msg += f"\nFail to access model({llm.llm_name}) using this api key." + str(
  72. e)
  73. if msg:
  74. return get_data_error_result(retmsg=msg)
  75. llm = {
  76. "api_key": req["api_key"],
  77. "api_base": req.get("base_url", "")
  78. }
  79. for n in ["model_type", "llm_name"]:
  80. if n in req:
  81. llm[n] = req[n]
  82. if not TenantLLMService.filter_update(
  83. [TenantLLM.tenant_id == current_user.id, TenantLLM.llm_factory == factory], llm):
  84. for llm in LLMService.query(fid=factory):
  85. TenantLLMService.save(
  86. tenant_id=current_user.id,
  87. llm_factory=factory,
  88. llm_name=llm.llm_name,
  89. model_type=llm.model_type,
  90. api_key=req["api_key"],
  91. api_base=req.get("base_url", "")
  92. )
  93. return get_json_result(data=True)
  94. @manager.route('/add_llm', methods=['POST'])
  95. @login_required
  96. @validate_request("llm_factory", "llm_name", "model_type")
  97. def add_llm():
  98. req = request.json
  99. factory = req["llm_factory"]
  100. # For VolcEngine, due to its special authentication method
  101. # Assemble volc_ak, volc_sk, endpoint_id into api_key
  102. if factory == "VolcEngine":
  103. temp = list(eval(req["llm_name"]).items())[0]
  104. llm_name = temp[0]
  105. endpoint_id = temp[1]
  106. api_key = '{' + f'"volc_ak": "{req.get("volc_ak", "")}", ' \
  107. f'"volc_sk": "{req.get("volc_sk", "")}", ' \
  108. f'"ep_id": "{endpoint_id}", ' + '}'
  109. else:
  110. llm_name = req["llm_name"]
  111. api_key = "xxxxxxxxxxxxxxx"
  112. llm = {
  113. "tenant_id": current_user.id,
  114. "llm_factory": factory,
  115. "model_type": req["model_type"],
  116. "llm_name": llm_name,
  117. "api_base": req.get("api_base", ""),
  118. "api_key": api_key
  119. }
  120. msg = ""
  121. if llm["model_type"] == LLMType.EMBEDDING.value:
  122. mdl = EmbeddingModel[factory](
  123. key=None, model_name=llm["llm_name"], base_url=llm["api_base"])
  124. try:
  125. arr, tc = mdl.encode(["Test if the api key is available"])
  126. if len(arr[0]) == 0 or tc == 0:
  127. raise Exception("Fail")
  128. except Exception as e:
  129. msg += f"\nFail to access embedding model({llm['llm_name']})." + str(e)
  130. elif llm["model_type"] == LLMType.CHAT.value:
  131. mdl = ChatModel[factory](
  132. key=llm['api_key'] if factory == "VolcEngine" else None,
  133. model_name=llm["llm_name"],
  134. base_url=llm["api_base"]
  135. )
  136. try:
  137. m, tc = mdl.chat(None, [{"role": "user", "content": "Hello! How are you doing!"}], {
  138. "temperature": 0.9})
  139. if not tc:
  140. raise Exception(m)
  141. except Exception as e:
  142. msg += f"\nFail to access model({llm['llm_name']})." + str(
  143. e)
  144. else:
  145. # TODO: check other type of models
  146. pass
  147. if msg:
  148. return get_data_error_result(retmsg=msg)
  149. if not TenantLLMService.filter_update(
  150. [TenantLLM.tenant_id == current_user.id, TenantLLM.llm_factory == factory, TenantLLM.llm_name == llm["llm_name"]], llm):
  151. TenantLLMService.save(**llm)
  152. return get_json_result(data=True)
  153. @manager.route('/delete_llm', methods=['POST'])
  154. @login_required
  155. @validate_request("llm_factory", "llm_name")
  156. def delete_llm():
  157. req = request.json
  158. TenantLLMService.filter_delete(
  159. [TenantLLM.tenant_id == current_user.id, TenantLLM.llm_factory == req["llm_factory"], TenantLLM.llm_name == req["llm_name"]])
  160. return get_json_result(data=True)
  161. @manager.route('/my_llms', methods=['GET'])
  162. @login_required
  163. def my_llms():
  164. try:
  165. res = {}
  166. for o in TenantLLMService.get_my_llms(current_user.id):
  167. if o["llm_factory"] not in res:
  168. res[o["llm_factory"]] = {
  169. "tags": o["tags"],
  170. "llm": []
  171. }
  172. res[o["llm_factory"]]["llm"].append({
  173. "type": o["model_type"],
  174. "name": o["llm_name"],
  175. "used_token": o["used_tokens"]
  176. })
  177. return get_json_result(data=res)
  178. except Exception as e:
  179. return server_error_response(e)
  180. @manager.route('/list', methods=['GET'])
  181. @login_required
  182. def list_app():
  183. model_type = request.args.get("model_type")
  184. try:
  185. objs = TenantLLMService.query(tenant_id=current_user.id)
  186. facts = set([o.to_dict()["llm_factory"] for o in objs if o.api_key])
  187. llms = LLMService.get_all()
  188. llms = [m.to_dict()
  189. for m in llms if m.status == StatusEnum.VALID.value]
  190. for m in llms:
  191. m["available"] = m["fid"] in facts or m["llm_name"].lower() == "flag-embedding" or m["fid"] in ["Youdao","FastEmbed", "BAAI"]
  192. llm_set = set([m["llm_name"] for m in llms])
  193. for o in objs:
  194. if not o.api_key:continue
  195. if o.llm_name in llm_set:continue
  196. llms.append({"llm_name": o.llm_name, "model_type": o.model_type, "fid": o.llm_factory, "available": True})
  197. res = {}
  198. for m in llms:
  199. if model_type and m["model_type"].find(model_type)<0:
  200. continue
  201. if m["fid"] not in res:
  202. res[m["fid"]] = []
  203. res[m["fid"]].append(m)
  204. return get_json_result(data=res)
  205. except Exception as e:
  206. return server_error_response(e)