Você não pode selecionar mais de 25 tópicos Os tópicos devem começar com uma letra ou um número, podem incluir traços ('-') e podem ter até 35 caracteres.

init_data.py 37KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059
  1. #
  2. # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. import json
  17. import os
  18. import time
  19. import uuid
  20. from copy import deepcopy
  21. from api.db import LLMType, UserTenantRole
  22. from api.db.db_models import init_database_tables as init_web_db, LLMFactories, LLM, TenantLLM
  23. from api.db.services import UserService
  24. from api.db.services.canvas_service import CanvasTemplateService
  25. from api.db.services.document_service import DocumentService
  26. from api.db.services.knowledgebase_service import KnowledgebaseService
  27. from api.db.services.llm_service import LLMFactoriesService, LLMService, TenantLLMService, LLMBundle
  28. from api.db.services.user_service import TenantService, UserTenantService
  29. from api.settings import CHAT_MDL, EMBEDDING_MDL, ASR_MDL, IMAGE2TEXT_MDL, PARSERS, LLM_FACTORY, API_KEY, LLM_BASE_URL
  30. from api.utils.file_utils import get_project_base_directory
  31. def init_superuser():
  32. user_info = {
  33. "id": uuid.uuid1().hex,
  34. "password": "admin",
  35. "nickname": "admin",
  36. "is_superuser": True,
  37. "email": "admin@ragflow.io",
  38. "creator": "system",
  39. "status": "1",
  40. }
  41. tenant = {
  42. "id": user_info["id"],
  43. "name": user_info["nickname"] + "‘s Kingdom",
  44. "llm_id": CHAT_MDL,
  45. "embd_id": EMBEDDING_MDL,
  46. "asr_id": ASR_MDL,
  47. "parser_ids": PARSERS,
  48. "img2txt_id": IMAGE2TEXT_MDL
  49. }
  50. usr_tenant = {
  51. "tenant_id": user_info["id"],
  52. "user_id": user_info["id"],
  53. "invited_by": user_info["id"],
  54. "role": UserTenantRole.OWNER
  55. }
  56. tenant_llm = []
  57. for llm in LLMService.query(fid=LLM_FACTORY):
  58. tenant_llm.append(
  59. {"tenant_id": user_info["id"], "llm_factory": LLM_FACTORY, "llm_name": llm.llm_name, "model_type": llm.model_type,
  60. "api_key": API_KEY, "api_base": LLM_BASE_URL})
  61. if not UserService.save(**user_info):
  62. print("\033[93m【ERROR】\033[0mcan't init admin.")
  63. return
  64. TenantService.insert(**tenant)
  65. UserTenantService.insert(**usr_tenant)
  66. TenantLLMService.insert_many(tenant_llm)
  67. print(
  68. "【INFO】Super user initialized. \033[93memail: admin@ragflow.io, password: admin\033[0m. Changing the password after logining is strongly recomanded.")
  69. chat_mdl = LLMBundle(tenant["id"], LLMType.CHAT, tenant["llm_id"])
  70. msg = chat_mdl.chat(system="", history=[
  71. {"role": "user", "content": "Hello!"}], gen_conf={})
  72. if msg.find("ERROR: ") == 0:
  73. print(
  74. "\33[91m【ERROR】\33[0m: ",
  75. "'{}' dosen't work. {}".format(
  76. tenant["llm_id"],
  77. msg))
  78. embd_mdl = LLMBundle(tenant["id"], LLMType.EMBEDDING, tenant["embd_id"])
  79. v, c = embd_mdl.encode(["Hello!"])
  80. if c == 0:
  81. print(
  82. "\33[91m【ERROR】\33[0m:",
  83. " '{}' dosen't work!".format(
  84. tenant["embd_id"]))
  85. factory_infos = [{
  86. "name": "OpenAI",
  87. "logo": "",
  88. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  89. "status": "1",
  90. }, {
  91. "name": "Tongyi-Qianwen",
  92. "logo": "",
  93. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  94. "status": "1",
  95. }, {
  96. "name": "ZHIPU-AI",
  97. "logo": "",
  98. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  99. "status": "1",
  100. },
  101. {
  102. "name": "Ollama",
  103. "logo": "",
  104. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  105. "status": "1",
  106. }, {
  107. "name": "Moonshot",
  108. "logo": "",
  109. "tags": "LLM,TEXT EMBEDDING",
  110. "status": "1",
  111. }, {
  112. "name": "FastEmbed",
  113. "logo": "",
  114. "tags": "TEXT EMBEDDING",
  115. "status": "1",
  116. }, {
  117. "name": "Xinference",
  118. "logo": "",
  119. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION,TEXT RE-RANK",
  120. "status": "1",
  121. },{
  122. "name": "Youdao",
  123. "logo": "",
  124. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  125. "status": "1",
  126. },{
  127. "name": "DeepSeek",
  128. "logo": "",
  129. "tags": "LLM",
  130. "status": "1",
  131. },{
  132. "name": "VolcEngine",
  133. "logo": "",
  134. "tags": "LLM, TEXT EMBEDDING",
  135. "status": "1",
  136. },{
  137. "name": "BaiChuan",
  138. "logo": "",
  139. "tags": "LLM,TEXT EMBEDDING",
  140. "status": "1",
  141. },{
  142. "name": "Jina",
  143. "logo": "",
  144. "tags": "TEXT EMBEDDING, TEXT RE-RANK",
  145. "status": "1",
  146. },{
  147. "name": "BAAI",
  148. "logo": "",
  149. "tags": "TEXT EMBEDDING, TEXT RE-RANK",
  150. "status": "1",
  151. },{
  152. "name": "MiniMax",
  153. "logo": "",
  154. "tags": "LLM,TEXT EMBEDDING",
  155. "status": "1",
  156. },{
  157. "name": "Mistral",
  158. "logo": "",
  159. "tags": "LLM,TEXT EMBEDDING",
  160. "status": "1",
  161. },{
  162. "name": "Azure-OpenAI",
  163. "logo": "",
  164. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  165. "status": "1",
  166. },{
  167. "name": "Bedrock",
  168. "logo": "",
  169. "tags": "LLM,TEXT EMBEDDING",
  170. "status": "1",
  171. },{
  172. "name": "Gemini",
  173. "logo": "",
  174. "tags": "LLM,TEXT EMBEDDING,IMAGE2TEXT",
  175. "status": "1",
  176. },
  177. {
  178. "name": "Groq",
  179. "logo": "",
  180. "tags": "LLM",
  181. "status": "1",
  182. }
  183. # {
  184. # "name": "文心一言",
  185. # "logo": "",
  186. # "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  187. # "status": "1",
  188. # },
  189. ]
  190. def init_llm_factory():
  191. llm_infos = [
  192. # ---------------------- OpenAI ------------------------
  193. {
  194. "fid": factory_infos[0]["name"],
  195. "llm_name": "gpt-4o",
  196. "tags": "LLM,CHAT,128K",
  197. "max_tokens": 128000,
  198. "model_type": LLMType.CHAT.value + "," + LLMType.IMAGE2TEXT.value
  199. }, {
  200. "fid": factory_infos[0]["name"],
  201. "llm_name": "gpt-3.5-turbo",
  202. "tags": "LLM,CHAT,4K",
  203. "max_tokens": 4096,
  204. "model_type": LLMType.CHAT.value
  205. }, {
  206. "fid": factory_infos[0]["name"],
  207. "llm_name": "gpt-3.5-turbo-16k-0613",
  208. "tags": "LLM,CHAT,16k",
  209. "max_tokens": 16385,
  210. "model_type": LLMType.CHAT.value
  211. }, {
  212. "fid": factory_infos[0]["name"],
  213. "llm_name": "text-embedding-ada-002",
  214. "tags": "TEXT EMBEDDING,8K",
  215. "max_tokens": 8191,
  216. "model_type": LLMType.EMBEDDING.value
  217. }, {
  218. "fid": factory_infos[0]["name"],
  219. "llm_name": "text-embedding-3-small",
  220. "tags": "TEXT EMBEDDING,8K",
  221. "max_tokens": 8191,
  222. "model_type": LLMType.EMBEDDING.value
  223. }, {
  224. "fid": factory_infos[0]["name"],
  225. "llm_name": "text-embedding-3-large",
  226. "tags": "TEXT EMBEDDING,8K",
  227. "max_tokens": 8191,
  228. "model_type": LLMType.EMBEDDING.value
  229. }, {
  230. "fid": factory_infos[0]["name"],
  231. "llm_name": "whisper-1",
  232. "tags": "SPEECH2TEXT",
  233. "max_tokens": 25 * 1024 * 1024,
  234. "model_type": LLMType.SPEECH2TEXT.value
  235. }, {
  236. "fid": factory_infos[0]["name"],
  237. "llm_name": "gpt-4",
  238. "tags": "LLM,CHAT,8K",
  239. "max_tokens": 8191,
  240. "model_type": LLMType.CHAT.value
  241. }, {
  242. "fid": factory_infos[0]["name"],
  243. "llm_name": "gpt-4-turbo",
  244. "tags": "LLM,CHAT,8K",
  245. "max_tokens": 8191,
  246. "model_type": LLMType.CHAT.value
  247. },{
  248. "fid": factory_infos[0]["name"],
  249. "llm_name": "gpt-4-32k",
  250. "tags": "LLM,CHAT,32K",
  251. "max_tokens": 32768,
  252. "model_type": LLMType.CHAT.value
  253. }, {
  254. "fid": factory_infos[0]["name"],
  255. "llm_name": "gpt-4-vision-preview",
  256. "tags": "LLM,CHAT,IMAGE2TEXT",
  257. "max_tokens": 765,
  258. "model_type": LLMType.IMAGE2TEXT.value
  259. },
  260. # ----------------------- Qwen -----------------------
  261. {
  262. "fid": factory_infos[1]["name"],
  263. "llm_name": "qwen-turbo",
  264. "tags": "LLM,CHAT,8K",
  265. "max_tokens": 8191,
  266. "model_type": LLMType.CHAT.value
  267. }, {
  268. "fid": factory_infos[1]["name"],
  269. "llm_name": "qwen-plus",
  270. "tags": "LLM,CHAT,32K",
  271. "max_tokens": 32768,
  272. "model_type": LLMType.CHAT.value
  273. }, {
  274. "fid": factory_infos[1]["name"],
  275. "llm_name": "qwen-max-1201",
  276. "tags": "LLM,CHAT,6K",
  277. "max_tokens": 5899,
  278. "model_type": LLMType.CHAT.value
  279. }, {
  280. "fid": factory_infos[1]["name"],
  281. "llm_name": "text-embedding-v2",
  282. "tags": "TEXT EMBEDDING,2K",
  283. "max_tokens": 2048,
  284. "model_type": LLMType.EMBEDDING.value
  285. }, {
  286. "fid": factory_infos[1]["name"],
  287. "llm_name": "paraformer-realtime-8k-v1",
  288. "tags": "SPEECH2TEXT",
  289. "max_tokens": 25 * 1024 * 1024,
  290. "model_type": LLMType.SPEECH2TEXT.value
  291. }, {
  292. "fid": factory_infos[1]["name"],
  293. "llm_name": "qwen-vl-max",
  294. "tags": "LLM,CHAT,IMAGE2TEXT",
  295. "max_tokens": 765,
  296. "model_type": LLMType.IMAGE2TEXT.value
  297. },
  298. # ---------------------- ZhipuAI ----------------------
  299. {
  300. "fid": factory_infos[2]["name"],
  301. "llm_name": "glm-3-turbo",
  302. "tags": "LLM,CHAT,",
  303. "max_tokens": 128 * 1000,
  304. "model_type": LLMType.CHAT.value
  305. }, {
  306. "fid": factory_infos[2]["name"],
  307. "llm_name": "glm-4",
  308. "tags": "LLM,CHAT,",
  309. "max_tokens": 128 * 1000,
  310. "model_type": LLMType.CHAT.value
  311. }, {
  312. "fid": factory_infos[2]["name"],
  313. "llm_name": "glm-4v",
  314. "tags": "LLM,CHAT,IMAGE2TEXT",
  315. "max_tokens": 2000,
  316. "model_type": LLMType.IMAGE2TEXT.value
  317. },
  318. {
  319. "fid": factory_infos[2]["name"],
  320. "llm_name": "embedding-2",
  321. "tags": "TEXT EMBEDDING",
  322. "max_tokens": 512,
  323. "model_type": LLMType.EMBEDDING.value
  324. },
  325. # ------------------------ Moonshot -----------------------
  326. {
  327. "fid": factory_infos[4]["name"],
  328. "llm_name": "moonshot-v1-8k",
  329. "tags": "LLM,CHAT,",
  330. "max_tokens": 7900,
  331. "model_type": LLMType.CHAT.value
  332. }, {
  333. "fid": factory_infos[4]["name"],
  334. "llm_name": "moonshot-v1-32k",
  335. "tags": "LLM,CHAT,",
  336. "max_tokens": 32768,
  337. "model_type": LLMType.CHAT.value
  338. }, {
  339. "fid": factory_infos[4]["name"],
  340. "llm_name": "moonshot-v1-128k",
  341. "tags": "LLM,CHAT",
  342. "max_tokens": 128 * 1000,
  343. "model_type": LLMType.CHAT.value
  344. },
  345. # ------------------------ FastEmbed -----------------------
  346. {
  347. "fid": factory_infos[5]["name"],
  348. "llm_name": "BAAI/bge-small-en-v1.5",
  349. "tags": "TEXT EMBEDDING,",
  350. "max_tokens": 512,
  351. "model_type": LLMType.EMBEDDING.value
  352. }, {
  353. "fid": factory_infos[5]["name"],
  354. "llm_name": "BAAI/bge-small-zh-v1.5",
  355. "tags": "TEXT EMBEDDING,",
  356. "max_tokens": 512,
  357. "model_type": LLMType.EMBEDDING.value
  358. }, {
  359. }, {
  360. "fid": factory_infos[5]["name"],
  361. "llm_name": "BAAI/bge-base-en-v1.5",
  362. "tags": "TEXT EMBEDDING,",
  363. "max_tokens": 512,
  364. "model_type": LLMType.EMBEDDING.value
  365. }, {
  366. }, {
  367. "fid": factory_infos[5]["name"],
  368. "llm_name": "BAAI/bge-large-en-v1.5",
  369. "tags": "TEXT EMBEDDING,",
  370. "max_tokens": 512,
  371. "model_type": LLMType.EMBEDDING.value
  372. }, {
  373. "fid": factory_infos[5]["name"],
  374. "llm_name": "sentence-transformers/all-MiniLM-L6-v2",
  375. "tags": "TEXT EMBEDDING,",
  376. "max_tokens": 512,
  377. "model_type": LLMType.EMBEDDING.value
  378. }, {
  379. "fid": factory_infos[5]["name"],
  380. "llm_name": "nomic-ai/nomic-embed-text-v1.5",
  381. "tags": "TEXT EMBEDDING,",
  382. "max_tokens": 8192,
  383. "model_type": LLMType.EMBEDDING.value
  384. }, {
  385. "fid": factory_infos[5]["name"],
  386. "llm_name": "jinaai/jina-embeddings-v2-small-en",
  387. "tags": "TEXT EMBEDDING,",
  388. "max_tokens": 2147483648,
  389. "model_type": LLMType.EMBEDDING.value
  390. }, {
  391. "fid": factory_infos[5]["name"],
  392. "llm_name": "jinaai/jina-embeddings-v2-base-en",
  393. "tags": "TEXT EMBEDDING,",
  394. "max_tokens": 2147483648,
  395. "model_type": LLMType.EMBEDDING.value
  396. },
  397. # ------------------------ Youdao -----------------------
  398. {
  399. "fid": factory_infos[7]["name"],
  400. "llm_name": "maidalun1020/bce-embedding-base_v1",
  401. "tags": "TEXT EMBEDDING,",
  402. "max_tokens": 512,
  403. "model_type": LLMType.EMBEDDING.value
  404. },
  405. {
  406. "fid": factory_infos[7]["name"],
  407. "llm_name": "maidalun1020/bce-reranker-base_v1",
  408. "tags": "RE-RANK, 512",
  409. "max_tokens": 512,
  410. "model_type": LLMType.RERANK.value
  411. },
  412. # ------------------------ DeepSeek -----------------------
  413. {
  414. "fid": factory_infos[8]["name"],
  415. "llm_name": "deepseek-chat",
  416. "tags": "LLM,CHAT,",
  417. "max_tokens": 32768,
  418. "model_type": LLMType.CHAT.value
  419. },
  420. {
  421. "fid": factory_infos[8]["name"],
  422. "llm_name": "deepseek-coder",
  423. "tags": "LLM,CHAT,",
  424. "max_tokens": 16385,
  425. "model_type": LLMType.CHAT.value
  426. },
  427. # ------------------------ VolcEngine -----------------------
  428. {
  429. "fid": factory_infos[9]["name"],
  430. "llm_name": "Skylark2-pro-32k",
  431. "tags": "LLM,CHAT,32k",
  432. "max_tokens": 32768,
  433. "model_type": LLMType.CHAT.value
  434. },
  435. {
  436. "fid": factory_infos[9]["name"],
  437. "llm_name": "Skylark2-pro-4k",
  438. "tags": "LLM,CHAT,4k",
  439. "max_tokens": 4096,
  440. "model_type": LLMType.CHAT.value
  441. },
  442. # ------------------------ BaiChuan -----------------------
  443. {
  444. "fid": factory_infos[10]["name"],
  445. "llm_name": "Baichuan2-Turbo",
  446. "tags": "LLM,CHAT,32K",
  447. "max_tokens": 32768,
  448. "model_type": LLMType.CHAT.value
  449. },
  450. {
  451. "fid": factory_infos[10]["name"],
  452. "llm_name": "Baichuan2-Turbo-192k",
  453. "tags": "LLM,CHAT,192K",
  454. "max_tokens": 196608,
  455. "model_type": LLMType.CHAT.value
  456. },
  457. {
  458. "fid": factory_infos[10]["name"],
  459. "llm_name": "Baichuan3-Turbo",
  460. "tags": "LLM,CHAT,32K",
  461. "max_tokens": 32768,
  462. "model_type": LLMType.CHAT.value
  463. },
  464. {
  465. "fid": factory_infos[10]["name"],
  466. "llm_name": "Baichuan3-Turbo-128k",
  467. "tags": "LLM,CHAT,128K",
  468. "max_tokens": 131072,
  469. "model_type": LLMType.CHAT.value
  470. },
  471. {
  472. "fid": factory_infos[10]["name"],
  473. "llm_name": "Baichuan4",
  474. "tags": "LLM,CHAT,128K",
  475. "max_tokens": 131072,
  476. "model_type": LLMType.CHAT.value
  477. },
  478. {
  479. "fid": factory_infos[10]["name"],
  480. "llm_name": "Baichuan-Text-Embedding",
  481. "tags": "TEXT EMBEDDING",
  482. "max_tokens": 512,
  483. "model_type": LLMType.EMBEDDING.value
  484. },
  485. # ------------------------ Jina -----------------------
  486. {
  487. "fid": factory_infos[11]["name"],
  488. "llm_name": "jina-reranker-v1-base-en",
  489. "tags": "RE-RANK,8k",
  490. "max_tokens": 8196,
  491. "model_type": LLMType.RERANK.value
  492. },
  493. {
  494. "fid": factory_infos[11]["name"],
  495. "llm_name": "jina-reranker-v1-turbo-en",
  496. "tags": "RE-RANK,8k",
  497. "max_tokens": 8196,
  498. "model_type": LLMType.RERANK.value
  499. },
  500. {
  501. "fid": factory_infos[11]["name"],
  502. "llm_name": "jina-reranker-v1-tiny-en",
  503. "tags": "RE-RANK,8k",
  504. "max_tokens": 8196,
  505. "model_type": LLMType.RERANK.value
  506. },
  507. {
  508. "fid": factory_infos[11]["name"],
  509. "llm_name": "jina-colbert-v1-en",
  510. "tags": "RE-RANK,8k",
  511. "max_tokens": 8196,
  512. "model_type": LLMType.RERANK.value
  513. },
  514. {
  515. "fid": factory_infos[11]["name"],
  516. "llm_name": "jina-embeddings-v2-base-en",
  517. "tags": "TEXT EMBEDDING",
  518. "max_tokens": 8196,
  519. "model_type": LLMType.EMBEDDING.value
  520. },
  521. {
  522. "fid": factory_infos[11]["name"],
  523. "llm_name": "jina-embeddings-v2-base-de",
  524. "tags": "TEXT EMBEDDING",
  525. "max_tokens": 8196,
  526. "model_type": LLMType.EMBEDDING.value
  527. },
  528. {
  529. "fid": factory_infos[11]["name"],
  530. "llm_name": "jina-embeddings-v2-base-es",
  531. "tags": "TEXT EMBEDDING",
  532. "max_tokens": 8196,
  533. "model_type": LLMType.EMBEDDING.value
  534. },
  535. {
  536. "fid": factory_infos[11]["name"],
  537. "llm_name": "jina-embeddings-v2-base-code",
  538. "tags": "TEXT EMBEDDING",
  539. "max_tokens": 8196,
  540. "model_type": LLMType.EMBEDDING.value
  541. },
  542. {
  543. "fid": factory_infos[11]["name"],
  544. "llm_name": "jina-embeddings-v2-base-zh",
  545. "tags": "TEXT EMBEDDING",
  546. "max_tokens": 8196,
  547. "model_type": LLMType.EMBEDDING.value
  548. },
  549. # ------------------------ BAAI -----------------------
  550. {
  551. "fid": factory_infos[12]["name"],
  552. "llm_name": "BAAI/bge-large-zh-v1.5",
  553. "tags": "TEXT EMBEDDING,",
  554. "max_tokens": 1024,
  555. "model_type": LLMType.EMBEDDING.value
  556. },
  557. {
  558. "fid": factory_infos[12]["name"],
  559. "llm_name": "BAAI/bge-reranker-v2-m3",
  560. "tags": "RE-RANK,2k",
  561. "max_tokens": 2048,
  562. "model_type": LLMType.RERANK.value
  563. },
  564. # ------------------------ Minimax -----------------------
  565. {
  566. "fid": factory_infos[13]["name"],
  567. "llm_name": "abab6.5-chat",
  568. "tags": "LLM,CHAT,8k",
  569. "max_tokens": 8192,
  570. "model_type": LLMType.CHAT.value
  571. },
  572. {
  573. "fid": factory_infos[13]["name"],
  574. "llm_name": "abab6.5s-chat",
  575. "tags": "LLM,CHAT,245k",
  576. "max_tokens": 245760,
  577. "model_type": LLMType.CHAT.value
  578. },
  579. {
  580. "fid": factory_infos[13]["name"],
  581. "llm_name": "abab6.5t-chat",
  582. "tags": "LLM,CHAT,8k",
  583. "max_tokens": 8192,
  584. "model_type": LLMType.CHAT.value
  585. },
  586. {
  587. "fid": factory_infos[13]["name"],
  588. "llm_name": "abab6.5g-chat",
  589. "tags": "LLM,CHAT,8k",
  590. "max_tokens": 8192,
  591. "model_type": LLMType.CHAT.value
  592. },
  593. {
  594. "fid": factory_infos[13]["name"],
  595. "llm_name": "abab5.5-chat",
  596. "tags": "LLM,CHAT,16k",
  597. "max_tokens": 16384,
  598. "model_type": LLMType.CHAT.value
  599. },
  600. {
  601. "fid": factory_infos[13]["name"],
  602. "llm_name": "abab5.5s-chat",
  603. "tags": "LLM,CHAT,8k",
  604. "max_tokens": 8192,
  605. "model_type": LLMType.CHAT.value
  606. },
  607. # ------------------------ Mistral -----------------------
  608. {
  609. "fid": factory_infos[14]["name"],
  610. "llm_name": "open-mixtral-8x22b",
  611. "tags": "LLM,CHAT,64k",
  612. "max_tokens": 64000,
  613. "model_type": LLMType.CHAT.value
  614. },
  615. {
  616. "fid": factory_infos[14]["name"],
  617. "llm_name": "open-mixtral-8x7b",
  618. "tags": "LLM,CHAT,32k",
  619. "max_tokens": 32000,
  620. "model_type": LLMType.CHAT.value
  621. },
  622. {
  623. "fid": factory_infos[14]["name"],
  624. "llm_name": "open-mistral-7b",
  625. "tags": "LLM,CHAT,32k",
  626. "max_tokens": 32000,
  627. "model_type": LLMType.CHAT.value
  628. },
  629. {
  630. "fid": factory_infos[14]["name"],
  631. "llm_name": "mistral-large-latest",
  632. "tags": "LLM,CHAT,32k",
  633. "max_tokens": 32000,
  634. "model_type": LLMType.CHAT.value
  635. },
  636. {
  637. "fid": factory_infos[14]["name"],
  638. "llm_name": "mistral-small-latest",
  639. "tags": "LLM,CHAT,32k",
  640. "max_tokens": 32000,
  641. "model_type": LLMType.CHAT.value
  642. },
  643. {
  644. "fid": factory_infos[14]["name"],
  645. "llm_name": "mistral-medium-latest",
  646. "tags": "LLM,CHAT,32k",
  647. "max_tokens": 32000,
  648. "model_type": LLMType.CHAT.value
  649. },
  650. {
  651. "fid": factory_infos[14]["name"],
  652. "llm_name": "codestral-latest",
  653. "tags": "LLM,CHAT,32k",
  654. "max_tokens": 32000,
  655. "model_type": LLMType.CHAT.value
  656. },
  657. {
  658. "fid": factory_infos[14]["name"],
  659. "llm_name": "mistral-embed",
  660. "tags": "LLM,CHAT,8k",
  661. "max_tokens": 8192,
  662. "model_type": LLMType.EMBEDDING
  663. },
  664. # ------------------------ Azure OpenAI -----------------------
  665. # Please ensure the llm_name is the same as the name in Azure
  666. # OpenAI deployment name (e.g., azure-gpt-4o). And the llm_name
  667. # must different from the OpenAI llm_name
  668. #
  669. # Each model must be deployed in the Azure OpenAI service, otherwise,
  670. # you will receive an error message 'The API deployment for
  671. # this resource does not exist'
  672. {
  673. "fid": factory_infos[15]["name"],
  674. "llm_name": "azure-gpt-4o",
  675. "tags": "LLM,CHAT,128K",
  676. "max_tokens": 128000,
  677. "model_type": LLMType.CHAT.value + "," + LLMType.IMAGE2TEXT.value
  678. }, {
  679. "fid": factory_infos[15]["name"],
  680. "llm_name": "azure-gpt-35-turbo",
  681. "tags": "LLM,CHAT,4K",
  682. "max_tokens": 4096,
  683. "model_type": LLMType.CHAT.value
  684. }, {
  685. "fid": factory_infos[15]["name"],
  686. "llm_name": "azure-gpt-35-turbo-16k",
  687. "tags": "LLM,CHAT,16k",
  688. "max_tokens": 16385,
  689. "model_type": LLMType.CHAT.value
  690. }, {
  691. "fid": factory_infos[15]["name"],
  692. "llm_name": "azure-text-embedding-ada-002",
  693. "tags": "TEXT EMBEDDING,8K",
  694. "max_tokens": 8191,
  695. "model_type": LLMType.EMBEDDING.value
  696. }, {
  697. "fid": factory_infos[15]["name"],
  698. "llm_name": "azure-text-embedding-3-small",
  699. "tags": "TEXT EMBEDDING,8K",
  700. "max_tokens": 8191,
  701. "model_type": LLMType.EMBEDDING.value
  702. }, {
  703. "fid": factory_infos[15]["name"],
  704. "llm_name": "azure-text-embedding-3-large",
  705. "tags": "TEXT EMBEDDING,8K",
  706. "max_tokens": 8191,
  707. "model_type": LLMType.EMBEDDING.value
  708. },{
  709. "fid": factory_infos[15]["name"],
  710. "llm_name": "azure-whisper-1",
  711. "tags": "SPEECH2TEXT",
  712. "max_tokens": 25 * 1024 * 1024,
  713. "model_type": LLMType.SPEECH2TEXT.value
  714. },
  715. {
  716. "fid": factory_infos[15]["name"],
  717. "llm_name": "azure-gpt-4",
  718. "tags": "LLM,CHAT,8K",
  719. "max_tokens": 8191,
  720. "model_type": LLMType.CHAT.value
  721. }, {
  722. "fid": factory_infos[15]["name"],
  723. "llm_name": "azure-gpt-4-turbo",
  724. "tags": "LLM,CHAT,8K",
  725. "max_tokens": 8191,
  726. "model_type": LLMType.CHAT.value
  727. }, {
  728. "fid": factory_infos[15]["name"],
  729. "llm_name": "azure-gpt-4-32k",
  730. "tags": "LLM,CHAT,32K",
  731. "max_tokens": 32768,
  732. "model_type": LLMType.CHAT.value
  733. }, {
  734. "fid": factory_infos[15]["name"],
  735. "llm_name": "azure-gpt-4-vision-preview",
  736. "tags": "LLM,CHAT,IMAGE2TEXT",
  737. "max_tokens": 765,
  738. "model_type": LLMType.IMAGE2TEXT.value
  739. },
  740. # ------------------------ Bedrock -----------------------
  741. {
  742. "fid": factory_infos[16]["name"],
  743. "llm_name": "ai21.j2-ultra-v1",
  744. "tags": "LLM,CHAT,8k",
  745. "max_tokens": 8191,
  746. "model_type": LLMType.CHAT.value
  747. }, {
  748. "fid": factory_infos[16]["name"],
  749. "llm_name": "ai21.j2-mid-v1",
  750. "tags": "LLM,CHAT,8k",
  751. "max_tokens": 8191,
  752. "model_type": LLMType.CHAT.value
  753. }, {
  754. "fid": factory_infos[16]["name"],
  755. "llm_name": "cohere.command-text-v14",
  756. "tags": "LLM,CHAT,4k",
  757. "max_tokens": 4096,
  758. "model_type": LLMType.CHAT.value
  759. }, {
  760. "fid": factory_infos[16]["name"],
  761. "llm_name": "cohere.command-light-text-v14",
  762. "tags": "LLM,CHAT,4k",
  763. "max_tokens": 4096,
  764. "model_type": LLMType.CHAT.value
  765. }, {
  766. "fid": factory_infos[16]["name"],
  767. "llm_name": "cohere.command-r-v1:0",
  768. "tags": "LLM,CHAT,128k",
  769. "max_tokens": 128 * 1024,
  770. "model_type": LLMType.CHAT.value
  771. }, {
  772. "fid": factory_infos[16]["name"],
  773. "llm_name": "cohere.command-r-plus-v1:0",
  774. "tags": "LLM,CHAT,128k",
  775. "max_tokens": 128000,
  776. "model_type": LLMType.CHAT.value
  777. }, {
  778. "fid": factory_infos[16]["name"],
  779. "llm_name": "anthropic.claude-v2",
  780. "tags": "LLM,CHAT,100k",
  781. "max_tokens": 100 * 1024,
  782. "model_type": LLMType.CHAT.value
  783. }, {
  784. "fid": factory_infos[16]["name"],
  785. "llm_name": "anthropic.claude-v2:1",
  786. "tags": "LLM,CHAT,200k",
  787. "max_tokens": 200 * 1024,
  788. "model_type": LLMType.CHAT.value
  789. }, {
  790. "fid": factory_infos[16]["name"],
  791. "llm_name": "anthropic.claude-3-sonnet-20240229-v1:0",
  792. "tags": "LLM,CHAT,200k",
  793. "max_tokens": 200 * 1024,
  794. "model_type": LLMType.CHAT.value
  795. }, {
  796. "fid": factory_infos[16]["name"],
  797. "llm_name": "anthropic.claude-3-5-sonnet-20240620-v1:0",
  798. "tags": "LLM,CHAT,200k",
  799. "max_tokens": 200 * 1024,
  800. "model_type": LLMType.CHAT.value
  801. }, {
  802. "fid": factory_infos[16]["name"],
  803. "llm_name": "anthropic.claude-3-haiku-20240307-v1:0",
  804. "tags": "LLM,CHAT,200k",
  805. "max_tokens": 200 * 1024,
  806. "model_type": LLMType.CHAT.value
  807. }, {
  808. "fid": factory_infos[16]["name"],
  809. "llm_name": "anthropic.claude-3-opus-20240229-v1:0",
  810. "tags": "LLM,CHAT,200k",
  811. "max_tokens": 200 * 1024,
  812. "model_type": LLMType.CHAT.value
  813. }, {
  814. "fid": factory_infos[16]["name"],
  815. "llm_name": "anthropic.claude-instant-v1",
  816. "tags": "LLM,CHAT,100k",
  817. "max_tokens": 100 * 1024,
  818. "model_type": LLMType.CHAT.value
  819. }, {
  820. "fid": factory_infos[16]["name"],
  821. "llm_name": "amazon.titan-text-express-v1",
  822. "tags": "LLM,CHAT,8k",
  823. "max_tokens": 8192,
  824. "model_type": LLMType.CHAT.value
  825. }, {
  826. "fid": factory_infos[16]["name"],
  827. "llm_name": "amazon.titan-text-premier-v1:0",
  828. "tags": "LLM,CHAT,32k",
  829. "max_tokens": 32 * 1024,
  830. "model_type": LLMType.CHAT.value
  831. }, {
  832. "fid": factory_infos[16]["name"],
  833. "llm_name": "amazon.titan-text-lite-v1",
  834. "tags": "LLM,CHAT,4k",
  835. "max_tokens": 4096,
  836. "model_type": LLMType.CHAT.value
  837. }, {
  838. "fid": factory_infos[16]["name"],
  839. "llm_name": "meta.llama2-13b-chat-v1",
  840. "tags": "LLM,CHAT,4k",
  841. "max_tokens": 4096,
  842. "model_type": LLMType.CHAT.value
  843. }, {
  844. "fid": factory_infos[16]["name"],
  845. "llm_name": "meta.llama2-70b-chat-v1",
  846. "tags": "LLM,CHAT,4k",
  847. "max_tokens": 4096,
  848. "model_type": LLMType.CHAT.value
  849. }, {
  850. "fid": factory_infos[16]["name"],
  851. "llm_name": "meta.llama3-8b-instruct-v1:0",
  852. "tags": "LLM,CHAT,8k",
  853. "max_tokens": 8192,
  854. "model_type": LLMType.CHAT.value
  855. }, {
  856. "fid": factory_infos[16]["name"],
  857. "llm_name": "meta.llama3-70b-instruct-v1:0",
  858. "tags": "LLM,CHAT,8k",
  859. "max_tokens": 8192,
  860. "model_type": LLMType.CHAT.value
  861. }, {
  862. "fid": factory_infos[16]["name"],
  863. "llm_name": "mistral.mistral-7b-instruct-v0:2",
  864. "tags": "LLM,CHAT,8k",
  865. "max_tokens": 8192,
  866. "model_type": LLMType.CHAT.value
  867. }, {
  868. "fid": factory_infos[16]["name"],
  869. "llm_name": "mistral.mixtral-8x7b-instruct-v0:1",
  870. "tags": "LLM,CHAT,4k",
  871. "max_tokens": 4096,
  872. "model_type": LLMType.CHAT.value
  873. }, {
  874. "fid": factory_infos[16]["name"],
  875. "llm_name": "mistral.mistral-large-2402-v1:0",
  876. "tags": "LLM,CHAT,8k",
  877. "max_tokens": 8192,
  878. "model_type": LLMType.CHAT.value
  879. }, {
  880. "fid": factory_infos[16]["name"],
  881. "llm_name": "mistral.mistral-small-2402-v1:0",
  882. "tags": "LLM,CHAT,8k",
  883. "max_tokens": 8192,
  884. "model_type": LLMType.CHAT.value
  885. }, {
  886. "fid": factory_infos[16]["name"],
  887. "llm_name": "amazon.titan-embed-text-v2:0",
  888. "tags": "TEXT EMBEDDING",
  889. "max_tokens": 8192,
  890. "model_type": LLMType.EMBEDDING.value
  891. }, {
  892. "fid": factory_infos[16]["name"],
  893. "llm_name": "cohere.embed-english-v3",
  894. "tags": "TEXT EMBEDDING",
  895. "max_tokens": 2048,
  896. "model_type": LLMType.EMBEDDING.value
  897. }, {
  898. "fid": factory_infos[16]["name"],
  899. "llm_name": "cohere.embed-multilingual-v3",
  900. "tags": "TEXT EMBEDDING",
  901. "max_tokens": 2048,
  902. "model_type": LLMType.EMBEDDING.value
  903. }, {
  904. "fid": factory_infos[17]["name"],
  905. "llm_name": "gemini-1.5-pro-latest",
  906. "tags": "LLM,CHAT,1024K",
  907. "max_tokens": 1024*1024,
  908. "model_type": LLMType.CHAT.value
  909. }, {
  910. "fid": factory_infos[17]["name"],
  911. "llm_name": "gemini-1.5-flash-latest",
  912. "tags": "LLM,CHAT,1024K",
  913. "max_tokens": 1024*1024,
  914. "model_type": LLMType.CHAT.value
  915. }, {
  916. "fid": factory_infos[17]["name"],
  917. "llm_name": "gemini-1.0-pro",
  918. "tags": "LLM,CHAT,30K",
  919. "max_tokens": 30*1024,
  920. "model_type": LLMType.CHAT.value
  921. }, {
  922. "fid": factory_infos[17]["name"],
  923. "llm_name": "gemini-1.0-pro-vision-latest",
  924. "tags": "LLM,IMAGE2TEXT,12K",
  925. "max_tokens": 12*1024,
  926. "model_type": LLMType.IMAGE2TEXT.value
  927. }, {
  928. "fid": factory_infos[17]["name"],
  929. "llm_name": "text-embedding-004",
  930. "tags": "TEXT EMBEDDING",
  931. "max_tokens": 2048,
  932. "model_type": LLMType.EMBEDDING.value
  933. },
  934. # ------------------------ Groq -----------------------
  935. {
  936. "fid": factory_infos[18]["name"],
  937. "llm_name": "gemma-7b-it",
  938. "tags": "LLM,CHAT,15k",
  939. "max_tokens": 8192,
  940. "model_type": LLMType.CHAT.value
  941. },
  942. {
  943. "fid": factory_infos[18]["name"],
  944. "llm_name": "gemma2-9b-it",
  945. "tags": "LLM,CHAT,15k",
  946. "max_tokens": 8192,
  947. "model_type": LLMType.CHAT.value
  948. },
  949. {
  950. "fid": factory_infos[18]["name"],
  951. "llm_name": "llama3-70b-8192",
  952. "tags": "LLM,CHAT,6k",
  953. "max_tokens": 8192,
  954. "model_type": LLMType.CHAT.value
  955. },
  956. {
  957. "fid": factory_infos[18]["name"],
  958. "llm_name": "llama3-8b-8192",
  959. "tags": "LLM,CHAT,30k",
  960. "max_tokens": 8192,
  961. "model_type": LLMType.CHAT.value
  962. },
  963. {
  964. "fid": factory_infos[18]["name"],
  965. "llm_name": "mixtral-8x7b-32768",
  966. "tags": "LLM,CHAT,5k",
  967. "max_tokens": 32768,
  968. "model_type": LLMType.CHAT.value
  969. }
  970. ]
  971. for info in factory_infos:
  972. try:
  973. LLMFactoriesService.save(**info)
  974. except Exception as e:
  975. pass
  976. for info in llm_infos:
  977. try:
  978. LLMService.save(**info)
  979. except Exception as e:
  980. pass
  981. LLMFactoriesService.filter_delete([LLMFactories.name == "Local"])
  982. LLMService.filter_delete([LLM.fid == "Local"])
  983. LLMService.filter_delete([LLM.fid == "Moonshot", LLM.llm_name == "flag-embedding"])
  984. TenantLLMService.filter_delete([TenantLLM.llm_factory == "Moonshot", TenantLLM.llm_name == "flag-embedding"])
  985. LLMFactoriesService.filter_delete([LLMFactoriesService.model.name == "QAnything"])
  986. LLMService.filter_delete([LLMService.model.fid == "QAnything"])
  987. TenantLLMService.filter_update([TenantLLMService.model.llm_factory == "QAnything"], {"llm_factory": "Youdao"})
  988. ## insert openai two embedding models to the current openai user.
  989. print("Start to insert 2 OpenAI embedding models...")
  990. tenant_ids = set([row["tenant_id"] for row in TenantLLMService.get_openai_models()])
  991. for tid in tenant_ids:
  992. for row in TenantLLMService.query(llm_factory="OpenAI", tenant_id=tid):
  993. row = row.to_dict()
  994. row["model_type"] = LLMType.EMBEDDING.value
  995. row["llm_name"] = "text-embedding-3-small"
  996. row["used_tokens"] = 0
  997. try:
  998. TenantLLMService.save(**row)
  999. row = deepcopy(row)
  1000. row["llm_name"] = "text-embedding-3-large"
  1001. TenantLLMService.save(**row)
  1002. except Exception as e:
  1003. pass
  1004. break
  1005. for kb_id in KnowledgebaseService.get_all_ids():
  1006. KnowledgebaseService.update_by_id(kb_id, {"doc_num": DocumentService.get_kb_doc_count(kb_id)})
  1007. """
  1008. drop table llm;
  1009. drop table llm_factories;
  1010. update tenant set parser_ids='naive:General,qa:Q&A,resume:Resume,manual:Manual,table:Table,paper:Paper,book:Book,laws:Laws,presentation:Presentation,picture:Picture,one:One';
  1011. alter table knowledgebase modify avatar longtext;
  1012. alter table user modify avatar longtext;
  1013. alter table dialog modify icon longtext;
  1014. """
  1015. def add_graph_templates():
  1016. dir = os.path.join(get_project_base_directory(), "graph", "templates")
  1017. for fnm in os.listdir(dir):
  1018. try:
  1019. cnvs = json.load(open(os.path.join(dir, fnm), "r"))
  1020. try:
  1021. CanvasTemplateService.save(**cnvs)
  1022. except:
  1023. CanvasTemplateService.update_by_id(cnvs["id"], cnvs)
  1024. except Exception as e:
  1025. print("Add graph templates error: ", e)
  1026. print("------------", flush=True)
  1027. def init_web_data():
  1028. start_time = time.time()
  1029. init_llm_factory()
  1030. if not UserService.get_all().count():
  1031. init_superuser()
  1032. add_graph_templates()
  1033. print("init web data success:{}".format(time.time() - start_time))
  1034. if __name__ == '__main__':
  1035. init_web_db()
  1036. init_web_data()