Ви не можете вибрати більше 25 тем Теми мають розпочинатися з літери або цифри, можуть містити дефіси (-) і не повинні перевищувати 35 символів.

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012
  1. #
  2. # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. import json
  17. import os
  18. import time
  19. import uuid
  20. from copy import deepcopy
  21. from api.db import LLMType, UserTenantRole
  22. from api.db.db_models import init_database_tables as init_web_db, LLMFactories, LLM, TenantLLM
  23. from api.db.services import UserService
  24. from api.db.services.canvas_service import CanvasTemplateService
  25. from api.db.services.document_service import DocumentService
  26. from api.db.services.knowledgebase_service import KnowledgebaseService
  27. from api.db.services.llm_service import LLMFactoriesService, LLMService, TenantLLMService, LLMBundle
  28. from api.db.services.user_service import TenantService, UserTenantService
  29. from api.settings import CHAT_MDL, EMBEDDING_MDL, ASR_MDL, IMAGE2TEXT_MDL, PARSERS, LLM_FACTORY, API_KEY, LLM_BASE_URL
  30. from api.utils.file_utils import get_project_base_directory
  31. def init_superuser():
  32. user_info = {
  33. "id": uuid.uuid1().hex,
  34. "password": "admin",
  35. "nickname": "admin",
  36. "is_superuser": True,
  37. "email": "admin@ragflow.io",
  38. "creator": "system",
  39. "status": "1",
  40. }
  41. tenant = {
  42. "id": user_info["id"],
  43. "name": user_info["nickname"] + "‘s Kingdom",
  44. "llm_id": CHAT_MDL,
  45. "embd_id": EMBEDDING_MDL,
  46. "asr_id": ASR_MDL,
  47. "parser_ids": PARSERS,
  48. "img2txt_id": IMAGE2TEXT_MDL
  49. }
  50. usr_tenant = {
  51. "tenant_id": user_info["id"],
  52. "user_id": user_info["id"],
  53. "invited_by": user_info["id"],
  54. "role": UserTenantRole.OWNER
  55. }
  56. tenant_llm = []
  57. for llm in LLMService.query(fid=LLM_FACTORY):
  58. tenant_llm.append(
  59. {"tenant_id": user_info["id"], "llm_factory": LLM_FACTORY, "llm_name": llm.llm_name, "model_type": llm.model_type,
  60. "api_key": API_KEY, "api_base": LLM_BASE_URL})
  61. if not UserService.save(**user_info):
  62. print("\033[93m【ERROR】\033[0mcan't init admin.")
  63. return
  64. TenantService.insert(**tenant)
  65. UserTenantService.insert(**usr_tenant)
  66. TenantLLMService.insert_many(tenant_llm)
  67. print(
  68. "【INFO】Super user initialized. \033[93memail: admin@ragflow.io, password: admin\033[0m. Changing the password after logining is strongly recomanded.")
  69. chat_mdl = LLMBundle(tenant["id"], LLMType.CHAT, tenant["llm_id"])
  70. msg = chat_mdl.chat(system="", history=[
  71. {"role": "user", "content": "Hello!"}], gen_conf={})
  72. if msg.find("ERROR: ") == 0:
  73. print(
  74. "\33[91m【ERROR】\33[0m: ",
  75. "'{}' dosen't work. {}".format(
  76. tenant["llm_id"],
  77. msg))
  78. embd_mdl = LLMBundle(tenant["id"], LLMType.EMBEDDING, tenant["embd_id"])
  79. v, c = embd_mdl.encode(["Hello!"])
  80. if c == 0:
  81. print(
  82. "\33[91m【ERROR】\33[0m:",
  83. " '{}' dosen't work!".format(
  84. tenant["embd_id"]))
  85. factory_infos = [{
  86. "name": "OpenAI",
  87. "logo": "",
  88. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  89. "status": "1",
  90. }, {
  91. "name": "Tongyi-Qianwen",
  92. "logo": "",
  93. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  94. "status": "1",
  95. }, {
  96. "name": "ZHIPU-AI",
  97. "logo": "",
  98. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  99. "status": "1",
  100. },
  101. {
  102. "name": "Ollama",
  103. "logo": "",
  104. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  105. "status": "1",
  106. }, {
  107. "name": "Moonshot",
  108. "logo": "",
  109. "tags": "LLM,TEXT EMBEDDING",
  110. "status": "1",
  111. }, {
  112. "name": "FastEmbed",
  113. "logo": "",
  114. "tags": "TEXT EMBEDDING",
  115. "status": "1",
  116. }, {
  117. "name": "Xinference",
  118. "logo": "",
  119. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION,TEXT RE-RANK",
  120. "status": "1",
  121. },{
  122. "name": "Youdao",
  123. "logo": "",
  124. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  125. "status": "1",
  126. },{
  127. "name": "DeepSeek",
  128. "logo": "",
  129. "tags": "LLM",
  130. "status": "1",
  131. },{
  132. "name": "VolcEngine",
  133. "logo": "",
  134. "tags": "LLM, TEXT EMBEDDING",
  135. "status": "1",
  136. },{
  137. "name": "BaiChuan",
  138. "logo": "",
  139. "tags": "LLM,TEXT EMBEDDING",
  140. "status": "1",
  141. },{
  142. "name": "Jina",
  143. "logo": "",
  144. "tags": "TEXT EMBEDDING, TEXT RE-RANK",
  145. "status": "1",
  146. },{
  147. "name": "BAAI",
  148. "logo": "",
  149. "tags": "TEXT EMBEDDING, TEXT RE-RANK",
  150. "status": "1",
  151. },{
  152. "name": "MiniMax",
  153. "logo": "",
  154. "tags": "LLM,TEXT EMBEDDING",
  155. "status": "1",
  156. },{
  157. "name": "Mistral",
  158. "logo": "",
  159. "tags": "LLM,TEXT EMBEDDING",
  160. "status": "1",
  161. },{
  162. "name": "Azure-OpenAI",
  163. "logo": "",
  164. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  165. "status": "1",
  166. },{
  167. "name": "Bedrock",
  168. "logo": "",
  169. "tags": "LLM,TEXT EMBEDDING",
  170. "status": "1",
  171. },{
  172. "name": "Gemini",
  173. "logo": "",
  174. "tags": "LLM,TEXT EMBEDDING,IMAGE2TEXT",
  175. "status": "1",
  176. }
  177. # {
  178. # "name": "文心一言",
  179. # "logo": "",
  180. # "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  181. # "status": "1",
  182. # },
  183. ]
  184. def init_llm_factory():
  185. llm_infos = [
  186. # ---------------------- OpenAI ------------------------
  187. {
  188. "fid": factory_infos[0]["name"],
  189. "llm_name": "gpt-4o",
  190. "tags": "LLM,CHAT,128K",
  191. "max_tokens": 128000,
  192. "model_type": LLMType.CHAT.value + "," + LLMType.IMAGE2TEXT.value
  193. }, {
  194. "fid": factory_infos[0]["name"],
  195. "llm_name": "gpt-3.5-turbo",
  196. "tags": "LLM,CHAT,4K",
  197. "max_tokens": 4096,
  198. "model_type": LLMType.CHAT.value
  199. }, {
  200. "fid": factory_infos[0]["name"],
  201. "llm_name": "gpt-3.5-turbo-16k-0613",
  202. "tags": "LLM,CHAT,16k",
  203. "max_tokens": 16385,
  204. "model_type": LLMType.CHAT.value
  205. }, {
  206. "fid": factory_infos[0]["name"],
  207. "llm_name": "text-embedding-ada-002",
  208. "tags": "TEXT EMBEDDING,8K",
  209. "max_tokens": 8191,
  210. "model_type": LLMType.EMBEDDING.value
  211. }, {
  212. "fid": factory_infos[0]["name"],
  213. "llm_name": "text-embedding-3-small",
  214. "tags": "TEXT EMBEDDING,8K",
  215. "max_tokens": 8191,
  216. "model_type": LLMType.EMBEDDING.value
  217. }, {
  218. "fid": factory_infos[0]["name"],
  219. "llm_name": "text-embedding-3-large",
  220. "tags": "TEXT EMBEDDING,8K",
  221. "max_tokens": 8191,
  222. "model_type": LLMType.EMBEDDING.value
  223. }, {
  224. "fid": factory_infos[0]["name"],
  225. "llm_name": "whisper-1",
  226. "tags": "SPEECH2TEXT",
  227. "max_tokens": 25 * 1024 * 1024,
  228. "model_type": LLMType.SPEECH2TEXT.value
  229. }, {
  230. "fid": factory_infos[0]["name"],
  231. "llm_name": "gpt-4",
  232. "tags": "LLM,CHAT,8K",
  233. "max_tokens": 8191,
  234. "model_type": LLMType.CHAT.value
  235. }, {
  236. "fid": factory_infos[0]["name"],
  237. "llm_name": "gpt-4-turbo",
  238. "tags": "LLM,CHAT,8K",
  239. "max_tokens": 8191,
  240. "model_type": LLMType.CHAT.value
  241. },{
  242. "fid": factory_infos[0]["name"],
  243. "llm_name": "gpt-4-32k",
  244. "tags": "LLM,CHAT,32K",
  245. "max_tokens": 32768,
  246. "model_type": LLMType.CHAT.value
  247. }, {
  248. "fid": factory_infos[0]["name"],
  249. "llm_name": "gpt-4-vision-preview",
  250. "tags": "LLM,CHAT,IMAGE2TEXT",
  251. "max_tokens": 765,
  252. "model_type": LLMType.IMAGE2TEXT.value
  253. },
  254. # ----------------------- Qwen -----------------------
  255. {
  256. "fid": factory_infos[1]["name"],
  257. "llm_name": "qwen-turbo",
  258. "tags": "LLM,CHAT,8K",
  259. "max_tokens": 8191,
  260. "model_type": LLMType.CHAT.value
  261. }, {
  262. "fid": factory_infos[1]["name"],
  263. "llm_name": "qwen-plus",
  264. "tags": "LLM,CHAT,32K",
  265. "max_tokens": 32768,
  266. "model_type": LLMType.CHAT.value
  267. }, {
  268. "fid": factory_infos[1]["name"],
  269. "llm_name": "qwen-max-1201",
  270. "tags": "LLM,CHAT,6K",
  271. "max_tokens": 5899,
  272. "model_type": LLMType.CHAT.value
  273. }, {
  274. "fid": factory_infos[1]["name"],
  275. "llm_name": "text-embedding-v2",
  276. "tags": "TEXT EMBEDDING,2K",
  277. "max_tokens": 2048,
  278. "model_type": LLMType.EMBEDDING.value
  279. }, {
  280. "fid": factory_infos[1]["name"],
  281. "llm_name": "paraformer-realtime-8k-v1",
  282. "tags": "SPEECH2TEXT",
  283. "max_tokens": 25 * 1024 * 1024,
  284. "model_type": LLMType.SPEECH2TEXT.value
  285. }, {
  286. "fid": factory_infos[1]["name"],
  287. "llm_name": "qwen-vl-max",
  288. "tags": "LLM,CHAT,IMAGE2TEXT",
  289. "max_tokens": 765,
  290. "model_type": LLMType.IMAGE2TEXT.value
  291. },
  292. # ---------------------- ZhipuAI ----------------------
  293. {
  294. "fid": factory_infos[2]["name"],
  295. "llm_name": "glm-3-turbo",
  296. "tags": "LLM,CHAT,",
  297. "max_tokens": 128 * 1000,
  298. "model_type": LLMType.CHAT.value
  299. }, {
  300. "fid": factory_infos[2]["name"],
  301. "llm_name": "glm-4",
  302. "tags": "LLM,CHAT,",
  303. "max_tokens": 128 * 1000,
  304. "model_type": LLMType.CHAT.value
  305. }, {
  306. "fid": factory_infos[2]["name"],
  307. "llm_name": "glm-4v",
  308. "tags": "LLM,CHAT,IMAGE2TEXT",
  309. "max_tokens": 2000,
  310. "model_type": LLMType.IMAGE2TEXT.value
  311. },
  312. {
  313. "fid": factory_infos[2]["name"],
  314. "llm_name": "embedding-2",
  315. "tags": "TEXT EMBEDDING",
  316. "max_tokens": 512,
  317. "model_type": LLMType.EMBEDDING.value
  318. },
  319. # ------------------------ Moonshot -----------------------
  320. {
  321. "fid": factory_infos[4]["name"],
  322. "llm_name": "moonshot-v1-8k",
  323. "tags": "LLM,CHAT,",
  324. "max_tokens": 7900,
  325. "model_type": LLMType.CHAT.value
  326. }, {
  327. "fid": factory_infos[4]["name"],
  328. "llm_name": "moonshot-v1-32k",
  329. "tags": "LLM,CHAT,",
  330. "max_tokens": 32768,
  331. "model_type": LLMType.CHAT.value
  332. }, {
  333. "fid": factory_infos[4]["name"],
  334. "llm_name": "moonshot-v1-128k",
  335. "tags": "LLM,CHAT",
  336. "max_tokens": 128 * 1000,
  337. "model_type": LLMType.CHAT.value
  338. },
  339. # ------------------------ FastEmbed -----------------------
  340. {
  341. "fid": factory_infos[5]["name"],
  342. "llm_name": "BAAI/bge-small-en-v1.5",
  343. "tags": "TEXT EMBEDDING,",
  344. "max_tokens": 512,
  345. "model_type": LLMType.EMBEDDING.value
  346. }, {
  347. "fid": factory_infos[5]["name"],
  348. "llm_name": "BAAI/bge-small-zh-v1.5",
  349. "tags": "TEXT EMBEDDING,",
  350. "max_tokens": 512,
  351. "model_type": LLMType.EMBEDDING.value
  352. }, {
  353. }, {
  354. "fid": factory_infos[5]["name"],
  355. "llm_name": "BAAI/bge-base-en-v1.5",
  356. "tags": "TEXT EMBEDDING,",
  357. "max_tokens": 512,
  358. "model_type": LLMType.EMBEDDING.value
  359. }, {
  360. }, {
  361. "fid": factory_infos[5]["name"],
  362. "llm_name": "BAAI/bge-large-en-v1.5",
  363. "tags": "TEXT EMBEDDING,",
  364. "max_tokens": 512,
  365. "model_type": LLMType.EMBEDDING.value
  366. }, {
  367. "fid": factory_infos[5]["name"],
  368. "llm_name": "sentence-transformers/all-MiniLM-L6-v2",
  369. "tags": "TEXT EMBEDDING,",
  370. "max_tokens": 512,
  371. "model_type": LLMType.EMBEDDING.value
  372. }, {
  373. "fid": factory_infos[5]["name"],
  374. "llm_name": "nomic-ai/nomic-embed-text-v1.5",
  375. "tags": "TEXT EMBEDDING,",
  376. "max_tokens": 8192,
  377. "model_type": LLMType.EMBEDDING.value
  378. }, {
  379. "fid": factory_infos[5]["name"],
  380. "llm_name": "jinaai/jina-embeddings-v2-small-en",
  381. "tags": "TEXT EMBEDDING,",
  382. "max_tokens": 2147483648,
  383. "model_type": LLMType.EMBEDDING.value
  384. }, {
  385. "fid": factory_infos[5]["name"],
  386. "llm_name": "jinaai/jina-embeddings-v2-base-en",
  387. "tags": "TEXT EMBEDDING,",
  388. "max_tokens": 2147483648,
  389. "model_type": LLMType.EMBEDDING.value
  390. },
  391. # ------------------------ Youdao -----------------------
  392. {
  393. "fid": factory_infos[7]["name"],
  394. "llm_name": "maidalun1020/bce-embedding-base_v1",
  395. "tags": "TEXT EMBEDDING,",
  396. "max_tokens": 512,
  397. "model_type": LLMType.EMBEDDING.value
  398. },
  399. {
  400. "fid": factory_infos[7]["name"],
  401. "llm_name": "maidalun1020/bce-reranker-base_v1",
  402. "tags": "RE-RANK, 512",
  403. "max_tokens": 512,
  404. "model_type": LLMType.RERANK.value
  405. },
  406. # ------------------------ DeepSeek -----------------------
  407. {
  408. "fid": factory_infos[8]["name"],
  409. "llm_name": "deepseek-chat",
  410. "tags": "LLM,CHAT,",
  411. "max_tokens": 32768,
  412. "model_type": LLMType.CHAT.value
  413. },
  414. {
  415. "fid": factory_infos[8]["name"],
  416. "llm_name": "deepseek-coder",
  417. "tags": "LLM,CHAT,",
  418. "max_tokens": 16385,
  419. "model_type": LLMType.CHAT.value
  420. },
  421. # ------------------------ VolcEngine -----------------------
  422. {
  423. "fid": factory_infos[9]["name"],
  424. "llm_name": "Skylark2-pro-32k",
  425. "tags": "LLM,CHAT,32k",
  426. "max_tokens": 32768,
  427. "model_type": LLMType.CHAT.value
  428. },
  429. {
  430. "fid": factory_infos[9]["name"],
  431. "llm_name": "Skylark2-pro-4k",
  432. "tags": "LLM,CHAT,4k",
  433. "max_tokens": 4096,
  434. "model_type": LLMType.CHAT.value
  435. },
  436. # ------------------------ BaiChuan -----------------------
  437. {
  438. "fid": factory_infos[10]["name"],
  439. "llm_name": "Baichuan2-Turbo",
  440. "tags": "LLM,CHAT,32K",
  441. "max_tokens": 32768,
  442. "model_type": LLMType.CHAT.value
  443. },
  444. {
  445. "fid": factory_infos[10]["name"],
  446. "llm_name": "Baichuan2-Turbo-192k",
  447. "tags": "LLM,CHAT,192K",
  448. "max_tokens": 196608,
  449. "model_type": LLMType.CHAT.value
  450. },
  451. {
  452. "fid": factory_infos[10]["name"],
  453. "llm_name": "Baichuan3-Turbo",
  454. "tags": "LLM,CHAT,32K",
  455. "max_tokens": 32768,
  456. "model_type": LLMType.CHAT.value
  457. },
  458. {
  459. "fid": factory_infos[10]["name"],
  460. "llm_name": "Baichuan3-Turbo-128k",
  461. "tags": "LLM,CHAT,128K",
  462. "max_tokens": 131072,
  463. "model_type": LLMType.CHAT.value
  464. },
  465. {
  466. "fid": factory_infos[10]["name"],
  467. "llm_name": "Baichuan4",
  468. "tags": "LLM,CHAT,128K",
  469. "max_tokens": 131072,
  470. "model_type": LLMType.CHAT.value
  471. },
  472. {
  473. "fid": factory_infos[10]["name"],
  474. "llm_name": "Baichuan-Text-Embedding",
  475. "tags": "TEXT EMBEDDING",
  476. "max_tokens": 512,
  477. "model_type": LLMType.EMBEDDING.value
  478. },
  479. # ------------------------ Jina -----------------------
  480. {
  481. "fid": factory_infos[11]["name"],
  482. "llm_name": "jina-reranker-v1-base-en",
  483. "tags": "RE-RANK,8k",
  484. "max_tokens": 8196,
  485. "model_type": LLMType.RERANK.value
  486. },
  487. {
  488. "fid": factory_infos[11]["name"],
  489. "llm_name": "jina-reranker-v1-turbo-en",
  490. "tags": "RE-RANK,8k",
  491. "max_tokens": 8196,
  492. "model_type": LLMType.RERANK.value
  493. },
  494. {
  495. "fid": factory_infos[11]["name"],
  496. "llm_name": "jina-reranker-v1-tiny-en",
  497. "tags": "RE-RANK,8k",
  498. "max_tokens": 8196,
  499. "model_type": LLMType.RERANK.value
  500. },
  501. {
  502. "fid": factory_infos[11]["name"],
  503. "llm_name": "jina-colbert-v1-en",
  504. "tags": "RE-RANK,8k",
  505. "max_tokens": 8196,
  506. "model_type": LLMType.RERANK.value
  507. },
  508. {
  509. "fid": factory_infos[11]["name"],
  510. "llm_name": "jina-embeddings-v2-base-en",
  511. "tags": "TEXT EMBEDDING",
  512. "max_tokens": 8196,
  513. "model_type": LLMType.EMBEDDING.value
  514. },
  515. {
  516. "fid": factory_infos[11]["name"],
  517. "llm_name": "jina-embeddings-v2-base-de",
  518. "tags": "TEXT EMBEDDING",
  519. "max_tokens": 8196,
  520. "model_type": LLMType.EMBEDDING.value
  521. },
  522. {
  523. "fid": factory_infos[11]["name"],
  524. "llm_name": "jina-embeddings-v2-base-es",
  525. "tags": "TEXT EMBEDDING",
  526. "max_tokens": 8196,
  527. "model_type": LLMType.EMBEDDING.value
  528. },
  529. {
  530. "fid": factory_infos[11]["name"],
  531. "llm_name": "jina-embeddings-v2-base-code",
  532. "tags": "TEXT EMBEDDING",
  533. "max_tokens": 8196,
  534. "model_type": LLMType.EMBEDDING.value
  535. },
  536. {
  537. "fid": factory_infos[11]["name"],
  538. "llm_name": "jina-embeddings-v2-base-zh",
  539. "tags": "TEXT EMBEDDING",
  540. "max_tokens": 8196,
  541. "model_type": LLMType.EMBEDDING.value
  542. },
  543. # ------------------------ BAAI -----------------------
  544. {
  545. "fid": factory_infos[12]["name"],
  546. "llm_name": "BAAI/bge-large-zh-v1.5",
  547. "tags": "TEXT EMBEDDING,",
  548. "max_tokens": 1024,
  549. "model_type": LLMType.EMBEDDING.value
  550. },
  551. {
  552. "fid": factory_infos[12]["name"],
  553. "llm_name": "BAAI/bge-reranker-v2-m3",
  554. "tags": "RE-RANK,2k",
  555. "max_tokens": 2048,
  556. "model_type": LLMType.RERANK.value
  557. },
  558. # ------------------------ Minimax -----------------------
  559. {
  560. "fid": factory_infos[13]["name"],
  561. "llm_name": "abab6.5-chat",
  562. "tags": "LLM,CHAT,8k",
  563. "max_tokens": 8192,
  564. "model_type": LLMType.CHAT.value
  565. },
  566. {
  567. "fid": factory_infos[13]["name"],
  568. "llm_name": "abab6.5s-chat",
  569. "tags": "LLM,CHAT,245k",
  570. "max_tokens": 245760,
  571. "model_type": LLMType.CHAT.value
  572. },
  573. {
  574. "fid": factory_infos[13]["name"],
  575. "llm_name": "abab6.5t-chat",
  576. "tags": "LLM,CHAT,8k",
  577. "max_tokens": 8192,
  578. "model_type": LLMType.CHAT.value
  579. },
  580. {
  581. "fid": factory_infos[13]["name"],
  582. "llm_name": "abab6.5g-chat",
  583. "tags": "LLM,CHAT,8k",
  584. "max_tokens": 8192,
  585. "model_type": LLMType.CHAT.value
  586. },
  587. {
  588. "fid": factory_infos[13]["name"],
  589. "llm_name": "abab5.5-chat",
  590. "tags": "LLM,CHAT,16k",
  591. "max_tokens": 16384,
  592. "model_type": LLMType.CHAT.value
  593. },
  594. {
  595. "fid": factory_infos[13]["name"],
  596. "llm_name": "abab5.5s-chat",
  597. "tags": "LLM,CHAT,8k",
  598. "max_tokens": 8192,
  599. "model_type": LLMType.CHAT.value
  600. },
  601. # ------------------------ Mistral -----------------------
  602. {
  603. "fid": factory_infos[14]["name"],
  604. "llm_name": "open-mixtral-8x22b",
  605. "tags": "LLM,CHAT,64k",
  606. "max_tokens": 64000,
  607. "model_type": LLMType.CHAT.value
  608. },
  609. {
  610. "fid": factory_infos[14]["name"],
  611. "llm_name": "open-mixtral-8x7b",
  612. "tags": "LLM,CHAT,32k",
  613. "max_tokens": 32000,
  614. "model_type": LLMType.CHAT.value
  615. },
  616. {
  617. "fid": factory_infos[14]["name"],
  618. "llm_name": "open-mistral-7b",
  619. "tags": "LLM,CHAT,32k",
  620. "max_tokens": 32000,
  621. "model_type": LLMType.CHAT.value
  622. },
  623. {
  624. "fid": factory_infos[14]["name"],
  625. "llm_name": "mistral-large-latest",
  626. "tags": "LLM,CHAT,32k",
  627. "max_tokens": 32000,
  628. "model_type": LLMType.CHAT.value
  629. },
  630. {
  631. "fid": factory_infos[14]["name"],
  632. "llm_name": "mistral-small-latest",
  633. "tags": "LLM,CHAT,32k",
  634. "max_tokens": 32000,
  635. "model_type": LLMType.CHAT.value
  636. },
  637. {
  638. "fid": factory_infos[14]["name"],
  639. "llm_name": "mistral-medium-latest",
  640. "tags": "LLM,CHAT,32k",
  641. "max_tokens": 32000,
  642. "model_type": LLMType.CHAT.value
  643. },
  644. {
  645. "fid": factory_infos[14]["name"],
  646. "llm_name": "codestral-latest",
  647. "tags": "LLM,CHAT,32k",
  648. "max_tokens": 32000,
  649. "model_type": LLMType.CHAT.value
  650. },
  651. {
  652. "fid": factory_infos[14]["name"],
  653. "llm_name": "mistral-embed",
  654. "tags": "LLM,CHAT,8k",
  655. "max_tokens": 8192,
  656. "model_type": LLMType.EMBEDDING
  657. },
  658. # ------------------------ Azure OpenAI -----------------------
  659. # Please ensure the llm_name is the same as the name in Azure
  660. # OpenAI deployment name (e.g., azure-gpt-4o). And the llm_name
  661. # must different from the OpenAI llm_name
  662. #
  663. # Each model must be deployed in the Azure OpenAI service, otherwise,
  664. # you will receive an error message 'The API deployment for
  665. # this resource does not exist'
  666. {
  667. "fid": factory_infos[15]["name"],
  668. "llm_name": "azure-gpt-4o",
  669. "tags": "LLM,CHAT,128K",
  670. "max_tokens": 128000,
  671. "model_type": LLMType.CHAT.value + "," + LLMType.IMAGE2TEXT.value
  672. }, {
  673. "fid": factory_infos[15]["name"],
  674. "llm_name": "azure-gpt-35-turbo",
  675. "tags": "LLM,CHAT,4K",
  676. "max_tokens": 4096,
  677. "model_type": LLMType.CHAT.value
  678. }, {
  679. "fid": factory_infos[15]["name"],
  680. "llm_name": "azure-gpt-35-turbo-16k",
  681. "tags": "LLM,CHAT,16k",
  682. "max_tokens": 16385,
  683. "model_type": LLMType.CHAT.value
  684. }, {
  685. "fid": factory_infos[15]["name"],
  686. "llm_name": "azure-text-embedding-ada-002",
  687. "tags": "TEXT EMBEDDING,8K",
  688. "max_tokens": 8191,
  689. "model_type": LLMType.EMBEDDING.value
  690. }, {
  691. "fid": factory_infos[15]["name"],
  692. "llm_name": "azure-text-embedding-3-small",
  693. "tags": "TEXT EMBEDDING,8K",
  694. "max_tokens": 8191,
  695. "model_type": LLMType.EMBEDDING.value
  696. }, {
  697. "fid": factory_infos[15]["name"],
  698. "llm_name": "azure-text-embedding-3-large",
  699. "tags": "TEXT EMBEDDING,8K",
  700. "max_tokens": 8191,
  701. "model_type": LLMType.EMBEDDING.value
  702. },{
  703. "fid": factory_infos[15]["name"],
  704. "llm_name": "azure-whisper-1",
  705. "tags": "SPEECH2TEXT",
  706. "max_tokens": 25 * 1024 * 1024,
  707. "model_type": LLMType.SPEECH2TEXT.value
  708. },
  709. {
  710. "fid": factory_infos[15]["name"],
  711. "llm_name": "azure-gpt-4",
  712. "tags": "LLM,CHAT,8K",
  713. "max_tokens": 8191,
  714. "model_type": LLMType.CHAT.value
  715. }, {
  716. "fid": factory_infos[15]["name"],
  717. "llm_name": "azure-gpt-4-turbo",
  718. "tags": "LLM,CHAT,8K",
  719. "max_tokens": 8191,
  720. "model_type": LLMType.CHAT.value
  721. }, {
  722. "fid": factory_infos[15]["name"],
  723. "llm_name": "azure-gpt-4-32k",
  724. "tags": "LLM,CHAT,32K",
  725. "max_tokens": 32768,
  726. "model_type": LLMType.CHAT.value
  727. }, {
  728. "fid": factory_infos[15]["name"],
  729. "llm_name": "azure-gpt-4-vision-preview",
  730. "tags": "LLM,CHAT,IMAGE2TEXT",
  731. "max_tokens": 765,
  732. "model_type": LLMType.IMAGE2TEXT.value
  733. },
  734. # ------------------------ Bedrock -----------------------
  735. {
  736. "fid": factory_infos[16]["name"],
  737. "llm_name": "ai21.j2-ultra-v1",
  738. "tags": "LLM,CHAT,8k",
  739. "max_tokens": 8191,
  740. "model_type": LLMType.CHAT.value
  741. }, {
  742. "fid": factory_infos[16]["name"],
  743. "llm_name": "ai21.j2-mid-v1",
  744. "tags": "LLM,CHAT,8k",
  745. "max_tokens": 8191,
  746. "model_type": LLMType.CHAT.value
  747. }, {
  748. "fid": factory_infos[16]["name"],
  749. "llm_name": "cohere.command-text-v14",
  750. "tags": "LLM,CHAT,4k",
  751. "max_tokens": 4096,
  752. "model_type": LLMType.CHAT.value
  753. }, {
  754. "fid": factory_infos[16]["name"],
  755. "llm_name": "cohere.command-light-text-v14",
  756. "tags": "LLM,CHAT,4k",
  757. "max_tokens": 4096,
  758. "model_type": LLMType.CHAT.value
  759. }, {
  760. "fid": factory_infos[16]["name"],
  761. "llm_name": "cohere.command-r-v1:0",
  762. "tags": "LLM,CHAT,128k",
  763. "max_tokens": 128 * 1024,
  764. "model_type": LLMType.CHAT.value
  765. }, {
  766. "fid": factory_infos[16]["name"],
  767. "llm_name": "cohere.command-r-plus-v1:0",
  768. "tags": "LLM,CHAT,128k",
  769. "max_tokens": 128000,
  770. "model_type": LLMType.CHAT.value
  771. }, {
  772. "fid": factory_infos[16]["name"],
  773. "llm_name": "anthropic.claude-v2",
  774. "tags": "LLM,CHAT,100k",
  775. "max_tokens": 100 * 1024,
  776. "model_type": LLMType.CHAT.value
  777. }, {
  778. "fid": factory_infos[16]["name"],
  779. "llm_name": "anthropic.claude-v2:1",
  780. "tags": "LLM,CHAT,200k",
  781. "max_tokens": 200 * 1024,
  782. "model_type": LLMType.CHAT.value
  783. }, {
  784. "fid": factory_infos[16]["name"],
  785. "llm_name": "anthropic.claude-3-sonnet-20240229-v1:0",
  786. "tags": "LLM,CHAT,200k",
  787. "max_tokens": 200 * 1024,
  788. "model_type": LLMType.CHAT.value
  789. }, {
  790. "fid": factory_infos[16]["name"],
  791. "llm_name": "anthropic.claude-3-5-sonnet-20240620-v1:0",
  792. "tags": "LLM,CHAT,200k",
  793. "max_tokens": 200 * 1024,
  794. "model_type": LLMType.CHAT.value
  795. }, {
  796. "fid": factory_infos[16]["name"],
  797. "llm_name": "anthropic.claude-3-haiku-20240307-v1:0",
  798. "tags": "LLM,CHAT,200k",
  799. "max_tokens": 200 * 1024,
  800. "model_type": LLMType.CHAT.value
  801. }, {
  802. "fid": factory_infos[16]["name"],
  803. "llm_name": "anthropic.claude-3-opus-20240229-v1:0",
  804. "tags": "LLM,CHAT,200k",
  805. "max_tokens": 200 * 1024,
  806. "model_type": LLMType.CHAT.value
  807. }, {
  808. "fid": factory_infos[16]["name"],
  809. "llm_name": "anthropic.claude-instant-v1",
  810. "tags": "LLM,CHAT,100k",
  811. "max_tokens": 100 * 1024,
  812. "model_type": LLMType.CHAT.value
  813. }, {
  814. "fid": factory_infos[16]["name"],
  815. "llm_name": "amazon.titan-text-express-v1",
  816. "tags": "LLM,CHAT,8k",
  817. "max_tokens": 8192,
  818. "model_type": LLMType.CHAT.value
  819. }, {
  820. "fid": factory_infos[16]["name"],
  821. "llm_name": "amazon.titan-text-premier-v1:0",
  822. "tags": "LLM,CHAT,32k",
  823. "max_tokens": 32 * 1024,
  824. "model_type": LLMType.CHAT.value
  825. }, {
  826. "fid": factory_infos[16]["name"],
  827. "llm_name": "amazon.titan-text-lite-v1",
  828. "tags": "LLM,CHAT,4k",
  829. "max_tokens": 4096,
  830. "model_type": LLMType.CHAT.value
  831. }, {
  832. "fid": factory_infos[16]["name"],
  833. "llm_name": "meta.llama2-13b-chat-v1",
  834. "tags": "LLM,CHAT,4k",
  835. "max_tokens": 4096,
  836. "model_type": LLMType.CHAT.value
  837. }, {
  838. "fid": factory_infos[16]["name"],
  839. "llm_name": "meta.llama2-70b-chat-v1",
  840. "tags": "LLM,CHAT,4k",
  841. "max_tokens": 4096,
  842. "model_type": LLMType.CHAT.value
  843. }, {
  844. "fid": factory_infos[16]["name"],
  845. "llm_name": "meta.llama3-8b-instruct-v1:0",
  846. "tags": "LLM,CHAT,8k",
  847. "max_tokens": 8192,
  848. "model_type": LLMType.CHAT.value
  849. }, {
  850. "fid": factory_infos[16]["name"],
  851. "llm_name": "meta.llama3-70b-instruct-v1:0",
  852. "tags": "LLM,CHAT,8k",
  853. "max_tokens": 8192,
  854. "model_type": LLMType.CHAT.value
  855. }, {
  856. "fid": factory_infos[16]["name"],
  857. "llm_name": "mistral.mistral-7b-instruct-v0:2",
  858. "tags": "LLM,CHAT,8k",
  859. "max_tokens": 8192,
  860. "model_type": LLMType.CHAT.value
  861. }, {
  862. "fid": factory_infos[16]["name"],
  863. "llm_name": "mistral.mixtral-8x7b-instruct-v0:1",
  864. "tags": "LLM,CHAT,4k",
  865. "max_tokens": 4096,
  866. "model_type": LLMType.CHAT.value
  867. }, {
  868. "fid": factory_infos[16]["name"],
  869. "llm_name": "mistral.mistral-large-2402-v1:0",
  870. "tags": "LLM,CHAT,8k",
  871. "max_tokens": 8192,
  872. "model_type": LLMType.CHAT.value
  873. }, {
  874. "fid": factory_infos[16]["name"],
  875. "llm_name": "mistral.mistral-small-2402-v1:0",
  876. "tags": "LLM,CHAT,8k",
  877. "max_tokens": 8192,
  878. "model_type": LLMType.CHAT.value
  879. }, {
  880. "fid": factory_infos[16]["name"],
  881. "llm_name": "amazon.titan-embed-text-v2:0",
  882. "tags": "TEXT EMBEDDING",
  883. "max_tokens": 8192,
  884. "model_type": LLMType.EMBEDDING.value
  885. }, {
  886. "fid": factory_infos[16]["name"],
  887. "llm_name": "cohere.embed-english-v3",
  888. "tags": "TEXT EMBEDDING",
  889. "max_tokens": 2048,
  890. "model_type": LLMType.EMBEDDING.value
  891. }, {
  892. "fid": factory_infos[16]["name"],
  893. "llm_name": "cohere.embed-multilingual-v3",
  894. "tags": "TEXT EMBEDDING",
  895. "max_tokens": 2048,
  896. "model_type": LLMType.EMBEDDING.value
  897. }, {
  898. "fid": factory_infos[17]["name"],
  899. "llm_name": "gemini-1.5-pro-latest",
  900. "tags": "LLM,CHAT,1024K",
  901. "max_tokens": 1024*1024,
  902. "model_type": LLMType.CHAT.value
  903. }, {
  904. "fid": factory_infos[17]["name"],
  905. "llm_name": "gemini-1.5-flash-latest",
  906. "tags": "LLM,CHAT,1024K",
  907. "max_tokens": 1024*1024,
  908. "model_type": LLMType.CHAT.value
  909. }, {
  910. "fid": factory_infos[17]["name"],
  911. "llm_name": "gemini-1.0-pro",
  912. "tags": "LLM,CHAT,30K",
  913. "max_tokens": 30*1024,
  914. "model_type": LLMType.CHAT.value
  915. }, {
  916. "fid": factory_infos[17]["name"],
  917. "llm_name": "gemini-1.0-pro-vision-latest",
  918. "tags": "LLM,IMAGE2TEXT,12K",
  919. "max_tokens": 12*1024,
  920. "model_type": LLMType.IMAGE2TEXT.value
  921. }, {
  922. "fid": factory_infos[17]["name"],
  923. "llm_name": "text-embedding-004",
  924. "tags": "TEXT EMBEDDING",
  925. "max_tokens": 2048,
  926. "model_type": LLMType.EMBEDDING.value
  927. }
  928. ]
  929. for info in factory_infos:
  930. try:
  931. LLMFactoriesService.save(**info)
  932. except Exception as e:
  933. pass
  934. for info in llm_infos:
  935. try:
  936. LLMService.save(**info)
  937. except Exception as e:
  938. pass
  939. LLMFactoriesService.filter_delete([LLMFactories.name == "Local"])
  940. LLMService.filter_delete([LLM.fid == "Local"])
  941. LLMService.filter_delete([LLM.fid == "Moonshot", LLM.llm_name == "flag-embedding"])
  942. TenantLLMService.filter_delete([TenantLLM.llm_factory == "Moonshot", TenantLLM.llm_name == "flag-embedding"])
  943. LLMFactoriesService.filter_delete([LLMFactoriesService.model.name == "QAnything"])
  944. LLMService.filter_delete([LLMService.model.fid == "QAnything"])
  945. TenantLLMService.filter_update([TenantLLMService.model.llm_factory == "QAnything"], {"llm_factory": "Youdao"})
  946. ## insert openai two embedding models to the current openai user.
  947. print("Start to insert 2 OpenAI embedding models...")
  948. tenant_ids = set([row["tenant_id"] for row in TenantLLMService.get_openai_models()])
  949. for tid in tenant_ids:
  950. for row in TenantLLMService.query(llm_factory="OpenAI", tenant_id=tid):
  951. row = row.to_dict()
  952. row["model_type"] = LLMType.EMBEDDING.value
  953. row["llm_name"] = "text-embedding-3-small"
  954. row["used_tokens"] = 0
  955. try:
  956. TenantLLMService.save(**row)
  957. row = deepcopy(row)
  958. row["llm_name"] = "text-embedding-3-large"
  959. TenantLLMService.save(**row)
  960. except Exception as e:
  961. pass
  962. break
  963. for kb_id in KnowledgebaseService.get_all_ids():
  964. KnowledgebaseService.update_by_id(kb_id, {"doc_num": DocumentService.get_kb_doc_count(kb_id)})
  965. """
  966. drop table llm;
  967. drop table llm_factories;
  968. update tenant set parser_ids='naive:General,qa:Q&A,resume:Resume,manual:Manual,table:Table,paper:Paper,book:Book,laws:Laws,presentation:Presentation,picture:Picture,one:One';
  969. alter table knowledgebase modify avatar longtext;
  970. alter table user modify avatar longtext;
  971. alter table dialog modify icon longtext;
  972. """
  973. def add_graph_templates():
  974. dir = os.path.join(get_project_base_directory(), "graph", "templates")
  975. for fnm in os.listdir(dir):
  976. try:
  977. cnvs = json.load(open(os.path.join(dir, fnm), "r"))
  978. try:
  979. CanvasTemplateService.save(**cnvs)
  980. except:
  981. CanvasTemplateService.update_by_id(cnvs["id"], cnvs)
  982. except Exception as e:
  983. print("Add graph templates error: ", e)
  984. print("------------", flush=True)
  985. def init_web_data():
  986. start_time = time.time()
  987. init_llm_factory()
  988. if not UserService.get_all().count():
  989. init_superuser()
  990. add_graph_templates()
  991. print("init web data success:{}".format(time.time() - start_time))
  992. if __name__ == '__main__':
  993. init_web_db()
  994. init_web_data()