You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

init_data.py 25KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727
  1. #
  2. # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. import json
  17. import os
  18. import time
  19. import uuid
  20. from copy import deepcopy
  21. from api.db import LLMType, UserTenantRole
  22. from api.db.db_models import init_database_tables as init_web_db, LLMFactories, LLM, TenantLLM
  23. from api.db.services import UserService
  24. from api.db.services.canvas_service import CanvasTemplateService
  25. from api.db.services.document_service import DocumentService
  26. from api.db.services.knowledgebase_service import KnowledgebaseService
  27. from api.db.services.llm_service import LLMFactoriesService, LLMService, TenantLLMService, LLMBundle
  28. from api.db.services.user_service import TenantService, UserTenantService
  29. from api.settings import CHAT_MDL, EMBEDDING_MDL, ASR_MDL, IMAGE2TEXT_MDL, PARSERS, LLM_FACTORY, API_KEY, LLM_BASE_URL
  30. from api.utils.file_utils import get_project_base_directory
  31. def init_superuser():
  32. user_info = {
  33. "id": uuid.uuid1().hex,
  34. "password": "admin",
  35. "nickname": "admin",
  36. "is_superuser": True,
  37. "email": "admin@ragflow.io",
  38. "creator": "system",
  39. "status": "1",
  40. }
  41. tenant = {
  42. "id": user_info["id"],
  43. "name": user_info["nickname"] + "‘s Kingdom",
  44. "llm_id": CHAT_MDL,
  45. "embd_id": EMBEDDING_MDL,
  46. "asr_id": ASR_MDL,
  47. "parser_ids": PARSERS,
  48. "img2txt_id": IMAGE2TEXT_MDL
  49. }
  50. usr_tenant = {
  51. "tenant_id": user_info["id"],
  52. "user_id": user_info["id"],
  53. "invited_by": user_info["id"],
  54. "role": UserTenantRole.OWNER
  55. }
  56. tenant_llm = []
  57. for llm in LLMService.query(fid=LLM_FACTORY):
  58. tenant_llm.append(
  59. {"tenant_id": user_info["id"], "llm_factory": LLM_FACTORY, "llm_name": llm.llm_name, "model_type": llm.model_type,
  60. "api_key": API_KEY, "api_base": LLM_BASE_URL})
  61. if not UserService.save(**user_info):
  62. print("\033[93m【ERROR】\033[0mcan't init admin.")
  63. return
  64. TenantService.insert(**tenant)
  65. UserTenantService.insert(**usr_tenant)
  66. TenantLLMService.insert_many(tenant_llm)
  67. print(
  68. "【INFO】Super user initialized. \033[93memail: admin@ragflow.io, password: admin\033[0m. Changing the password after logining is strongly recomanded.")
  69. chat_mdl = LLMBundle(tenant["id"], LLMType.CHAT, tenant["llm_id"])
  70. msg = chat_mdl.chat(system="", history=[
  71. {"role": "user", "content": "Hello!"}], gen_conf={})
  72. if msg.find("ERROR: ") == 0:
  73. print(
  74. "\33[91m【ERROR】\33[0m: ",
  75. "'{}' dosen't work. {}".format(
  76. tenant["llm_id"],
  77. msg))
  78. embd_mdl = LLMBundle(tenant["id"], LLMType.EMBEDDING, tenant["embd_id"])
  79. v, c = embd_mdl.encode(["Hello!"])
  80. if c == 0:
  81. print(
  82. "\33[91m【ERROR】\33[0m:",
  83. " '{}' dosen't work!".format(
  84. tenant["embd_id"]))
  85. factory_infos = [{
  86. "name": "OpenAI",
  87. "logo": "",
  88. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  89. "status": "1",
  90. }, {
  91. "name": "Tongyi-Qianwen",
  92. "logo": "",
  93. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  94. "status": "1",
  95. }, {
  96. "name": "ZHIPU-AI",
  97. "logo": "",
  98. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  99. "status": "1",
  100. },
  101. {
  102. "name": "Ollama",
  103. "logo": "",
  104. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  105. "status": "1",
  106. }, {
  107. "name": "Moonshot",
  108. "logo": "",
  109. "tags": "LLM,TEXT EMBEDDING",
  110. "status": "1",
  111. }, {
  112. "name": "FastEmbed",
  113. "logo": "",
  114. "tags": "TEXT EMBEDDING",
  115. "status": "1",
  116. }, {
  117. "name": "Xinference",
  118. "logo": "",
  119. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  120. "status": "1",
  121. },{
  122. "name": "Youdao",
  123. "logo": "",
  124. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  125. "status": "1",
  126. },{
  127. "name": "DeepSeek",
  128. "logo": "",
  129. "tags": "LLM",
  130. "status": "1",
  131. },{
  132. "name": "VolcEngine",
  133. "logo": "",
  134. "tags": "LLM, TEXT EMBEDDING",
  135. "status": "1",
  136. },{
  137. "name": "BaiChuan",
  138. "logo": "",
  139. "tags": "LLM,TEXT EMBEDDING",
  140. "status": "1",
  141. },{
  142. "name": "Jina",
  143. "logo": "",
  144. "tags": "TEXT EMBEDDING, TEXT RE-RANK",
  145. "status": "1",
  146. },{
  147. "name": "BAAI",
  148. "logo": "",
  149. "tags": "TEXT EMBEDDING, TEXT RE-RANK",
  150. "status": "1",
  151. },{
  152. "name": "Minimax",
  153. "logo": "",
  154. "tags": "LLM,TEXT EMBEDDING",
  155. "status": "1",
  156. },{
  157. "name": "Mistral",
  158. "logo": "",
  159. "tags": "LLM,TEXT EMBEDDING",
  160. "status": "1",
  161. }
  162. # {
  163. # "name": "文心一言",
  164. # "logo": "",
  165. # "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  166. # "status": "1",
  167. # },
  168. ]
  169. def init_llm_factory():
  170. llm_infos = [
  171. # ---------------------- OpenAI ------------------------
  172. {
  173. "fid": factory_infos[0]["name"],
  174. "llm_name": "gpt-4o",
  175. "tags": "LLM,CHAT,128K",
  176. "max_tokens": 128000,
  177. "model_type": LLMType.CHAT.value + "," + LLMType.IMAGE2TEXT.value
  178. }, {
  179. "fid": factory_infos[0]["name"],
  180. "llm_name": "gpt-3.5-turbo",
  181. "tags": "LLM,CHAT,4K",
  182. "max_tokens": 4096,
  183. "model_type": LLMType.CHAT.value
  184. }, {
  185. "fid": factory_infos[0]["name"],
  186. "llm_name": "gpt-3.5-turbo-16k-0613",
  187. "tags": "LLM,CHAT,16k",
  188. "max_tokens": 16385,
  189. "model_type": LLMType.CHAT.value
  190. }, {
  191. "fid": factory_infos[0]["name"],
  192. "llm_name": "text-embedding-ada-002",
  193. "tags": "TEXT EMBEDDING,8K",
  194. "max_tokens": 8191,
  195. "model_type": LLMType.EMBEDDING.value
  196. }, {
  197. "fid": factory_infos[0]["name"],
  198. "llm_name": "text-embedding-3-small",
  199. "tags": "TEXT EMBEDDING,8K",
  200. "max_tokens": 8191,
  201. "model_type": LLMType.EMBEDDING.value
  202. }, {
  203. "fid": factory_infos[0]["name"],
  204. "llm_name": "text-embedding-3-large",
  205. "tags": "TEXT EMBEDDING,8K",
  206. "max_tokens": 8191,
  207. "model_type": LLMType.EMBEDDING.value
  208. }, {
  209. "fid": factory_infos[0]["name"],
  210. "llm_name": "whisper-1",
  211. "tags": "SPEECH2TEXT",
  212. "max_tokens": 25 * 1024 * 1024,
  213. "model_type": LLMType.SPEECH2TEXT.value
  214. }, {
  215. "fid": factory_infos[0]["name"],
  216. "llm_name": "gpt-4",
  217. "tags": "LLM,CHAT,8K",
  218. "max_tokens": 8191,
  219. "model_type": LLMType.CHAT.value
  220. }, {
  221. "fid": factory_infos[0]["name"],
  222. "llm_name": "gpt-4-turbo",
  223. "tags": "LLM,CHAT,8K",
  224. "max_tokens": 8191,
  225. "model_type": LLMType.CHAT.value
  226. },{
  227. "fid": factory_infos[0]["name"],
  228. "llm_name": "gpt-4-32k",
  229. "tags": "LLM,CHAT,32K",
  230. "max_tokens": 32768,
  231. "model_type": LLMType.CHAT.value
  232. }, {
  233. "fid": factory_infos[0]["name"],
  234. "llm_name": "gpt-4-vision-preview",
  235. "tags": "LLM,CHAT,IMAGE2TEXT",
  236. "max_tokens": 765,
  237. "model_type": LLMType.IMAGE2TEXT.value
  238. },
  239. # ----------------------- Qwen -----------------------
  240. {
  241. "fid": factory_infos[1]["name"],
  242. "llm_name": "qwen-turbo",
  243. "tags": "LLM,CHAT,8K",
  244. "max_tokens": 8191,
  245. "model_type": LLMType.CHAT.value
  246. }, {
  247. "fid": factory_infos[1]["name"],
  248. "llm_name": "qwen-plus",
  249. "tags": "LLM,CHAT,32K",
  250. "max_tokens": 32768,
  251. "model_type": LLMType.CHAT.value
  252. }, {
  253. "fid": factory_infos[1]["name"],
  254. "llm_name": "qwen-max-1201",
  255. "tags": "LLM,CHAT,6K",
  256. "max_tokens": 5899,
  257. "model_type": LLMType.CHAT.value
  258. }, {
  259. "fid": factory_infos[1]["name"],
  260. "llm_name": "text-embedding-v2",
  261. "tags": "TEXT EMBEDDING,2K",
  262. "max_tokens": 2048,
  263. "model_type": LLMType.EMBEDDING.value
  264. }, {
  265. "fid": factory_infos[1]["name"],
  266. "llm_name": "paraformer-realtime-8k-v1",
  267. "tags": "SPEECH2TEXT",
  268. "max_tokens": 25 * 1024 * 1024,
  269. "model_type": LLMType.SPEECH2TEXT.value
  270. }, {
  271. "fid": factory_infos[1]["name"],
  272. "llm_name": "qwen-vl-max",
  273. "tags": "LLM,CHAT,IMAGE2TEXT",
  274. "max_tokens": 765,
  275. "model_type": LLMType.IMAGE2TEXT.value
  276. },
  277. # ---------------------- ZhipuAI ----------------------
  278. {
  279. "fid": factory_infos[2]["name"],
  280. "llm_name": "glm-3-turbo",
  281. "tags": "LLM,CHAT,",
  282. "max_tokens": 128 * 1000,
  283. "model_type": LLMType.CHAT.value
  284. }, {
  285. "fid": factory_infos[2]["name"],
  286. "llm_name": "glm-4",
  287. "tags": "LLM,CHAT,",
  288. "max_tokens": 128 * 1000,
  289. "model_type": LLMType.CHAT.value
  290. }, {
  291. "fid": factory_infos[2]["name"],
  292. "llm_name": "glm-4v",
  293. "tags": "LLM,CHAT,IMAGE2TEXT",
  294. "max_tokens": 2000,
  295. "model_type": LLMType.IMAGE2TEXT.value
  296. },
  297. {
  298. "fid": factory_infos[2]["name"],
  299. "llm_name": "embedding-2",
  300. "tags": "TEXT EMBEDDING",
  301. "max_tokens": 512,
  302. "model_type": LLMType.EMBEDDING.value
  303. },
  304. # ------------------------ Moonshot -----------------------
  305. {
  306. "fid": factory_infos[4]["name"],
  307. "llm_name": "moonshot-v1-8k",
  308. "tags": "LLM,CHAT,",
  309. "max_tokens": 7900,
  310. "model_type": LLMType.CHAT.value
  311. }, {
  312. "fid": factory_infos[4]["name"],
  313. "llm_name": "moonshot-v1-32k",
  314. "tags": "LLM,CHAT,",
  315. "max_tokens": 32768,
  316. "model_type": LLMType.CHAT.value
  317. }, {
  318. "fid": factory_infos[4]["name"],
  319. "llm_name": "moonshot-v1-128k",
  320. "tags": "LLM,CHAT",
  321. "max_tokens": 128 * 1000,
  322. "model_type": LLMType.CHAT.value
  323. },
  324. # ------------------------ FastEmbed -----------------------
  325. {
  326. "fid": factory_infos[5]["name"],
  327. "llm_name": "BAAI/bge-small-en-v1.5",
  328. "tags": "TEXT EMBEDDING,",
  329. "max_tokens": 512,
  330. "model_type": LLMType.EMBEDDING.value
  331. }, {
  332. "fid": factory_infos[5]["name"],
  333. "llm_name": "BAAI/bge-small-zh-v1.5",
  334. "tags": "TEXT EMBEDDING,",
  335. "max_tokens": 512,
  336. "model_type": LLMType.EMBEDDING.value
  337. }, {
  338. }, {
  339. "fid": factory_infos[5]["name"],
  340. "llm_name": "BAAI/bge-base-en-v1.5",
  341. "tags": "TEXT EMBEDDING,",
  342. "max_tokens": 512,
  343. "model_type": LLMType.EMBEDDING.value
  344. }, {
  345. }, {
  346. "fid": factory_infos[5]["name"],
  347. "llm_name": "BAAI/bge-large-en-v1.5",
  348. "tags": "TEXT EMBEDDING,",
  349. "max_tokens": 512,
  350. "model_type": LLMType.EMBEDDING.value
  351. }, {
  352. "fid": factory_infos[5]["name"],
  353. "llm_name": "sentence-transformers/all-MiniLM-L6-v2",
  354. "tags": "TEXT EMBEDDING,",
  355. "max_tokens": 512,
  356. "model_type": LLMType.EMBEDDING.value
  357. }, {
  358. "fid": factory_infos[5]["name"],
  359. "llm_name": "nomic-ai/nomic-embed-text-v1.5",
  360. "tags": "TEXT EMBEDDING,",
  361. "max_tokens": 8192,
  362. "model_type": LLMType.EMBEDDING.value
  363. }, {
  364. "fid": factory_infos[5]["name"],
  365. "llm_name": "jinaai/jina-embeddings-v2-small-en",
  366. "tags": "TEXT EMBEDDING,",
  367. "max_tokens": 2147483648,
  368. "model_type": LLMType.EMBEDDING.value
  369. }, {
  370. "fid": factory_infos[5]["name"],
  371. "llm_name": "jinaai/jina-embeddings-v2-base-en",
  372. "tags": "TEXT EMBEDDING,",
  373. "max_tokens": 2147483648,
  374. "model_type": LLMType.EMBEDDING.value
  375. },
  376. # ------------------------ Youdao -----------------------
  377. {
  378. "fid": factory_infos[7]["name"],
  379. "llm_name": "maidalun1020/bce-embedding-base_v1",
  380. "tags": "TEXT EMBEDDING,",
  381. "max_tokens": 512,
  382. "model_type": LLMType.EMBEDDING.value
  383. },
  384. {
  385. "fid": factory_infos[7]["name"],
  386. "llm_name": "maidalun1020/bce-reranker-base_v1",
  387. "tags": "RE-RANK, 512",
  388. "max_tokens": 512,
  389. "model_type": LLMType.RERANK.value
  390. },
  391. # ------------------------ DeepSeek -----------------------
  392. {
  393. "fid": factory_infos[8]["name"],
  394. "llm_name": "deepseek-chat",
  395. "tags": "LLM,CHAT,",
  396. "max_tokens": 32768,
  397. "model_type": LLMType.CHAT.value
  398. },
  399. {
  400. "fid": factory_infos[8]["name"],
  401. "llm_name": "deepseek-coder",
  402. "tags": "LLM,CHAT,",
  403. "max_tokens": 16385,
  404. "model_type": LLMType.CHAT.value
  405. },
  406. # ------------------------ VolcEngine -----------------------
  407. {
  408. "fid": factory_infos[9]["name"],
  409. "llm_name": "Skylark2-pro-32k",
  410. "tags": "LLM,CHAT,32k",
  411. "max_tokens": 32768,
  412. "model_type": LLMType.CHAT.value
  413. },
  414. {
  415. "fid": factory_infos[9]["name"],
  416. "llm_name": "Skylark2-pro-4k",
  417. "tags": "LLM,CHAT,4k",
  418. "max_tokens": 4096,
  419. "model_type": LLMType.CHAT.value
  420. },
  421. # ------------------------ BaiChuan -----------------------
  422. {
  423. "fid": factory_infos[10]["name"],
  424. "llm_name": "Baichuan2-Turbo",
  425. "tags": "LLM,CHAT,32K",
  426. "max_tokens": 32768,
  427. "model_type": LLMType.CHAT.value
  428. },
  429. {
  430. "fid": factory_infos[10]["name"],
  431. "llm_name": "Baichuan2-Turbo-192k",
  432. "tags": "LLM,CHAT,192K",
  433. "max_tokens": 196608,
  434. "model_type": LLMType.CHAT.value
  435. },
  436. {
  437. "fid": factory_infos[10]["name"],
  438. "llm_name": "Baichuan3-Turbo",
  439. "tags": "LLM,CHAT,32K",
  440. "max_tokens": 32768,
  441. "model_type": LLMType.CHAT.value
  442. },
  443. {
  444. "fid": factory_infos[10]["name"],
  445. "llm_name": "Baichuan3-Turbo-128k",
  446. "tags": "LLM,CHAT,128K",
  447. "max_tokens": 131072,
  448. "model_type": LLMType.CHAT.value
  449. },
  450. {
  451. "fid": factory_infos[10]["name"],
  452. "llm_name": "Baichuan4",
  453. "tags": "LLM,CHAT,128K",
  454. "max_tokens": 131072,
  455. "model_type": LLMType.CHAT.value
  456. },
  457. {
  458. "fid": factory_infos[10]["name"],
  459. "llm_name": "Baichuan-Text-Embedding",
  460. "tags": "TEXT EMBEDDING",
  461. "max_tokens": 512,
  462. "model_type": LLMType.EMBEDDING.value
  463. },
  464. # ------------------------ Jina -----------------------
  465. {
  466. "fid": factory_infos[11]["name"],
  467. "llm_name": "jina-reranker-v1-base-en",
  468. "tags": "RE-RANK,8k",
  469. "max_tokens": 8196,
  470. "model_type": LLMType.RERANK.value
  471. },
  472. {
  473. "fid": factory_infos[11]["name"],
  474. "llm_name": "jina-reranker-v1-turbo-en",
  475. "tags": "RE-RANK,8k",
  476. "max_tokens": 8196,
  477. "model_type": LLMType.RERANK.value
  478. },
  479. {
  480. "fid": factory_infos[11]["name"],
  481. "llm_name": "jina-reranker-v1-tiny-en",
  482. "tags": "RE-RANK,8k",
  483. "max_tokens": 8196,
  484. "model_type": LLMType.RERANK.value
  485. },
  486. {
  487. "fid": factory_infos[11]["name"],
  488. "llm_name": "jina-colbert-v1-en",
  489. "tags": "RE-RANK,8k",
  490. "max_tokens": 8196,
  491. "model_type": LLMType.RERANK.value
  492. },
  493. {
  494. "fid": factory_infos[11]["name"],
  495. "llm_name": "jina-embeddings-v2-base-en",
  496. "tags": "TEXT EMBEDDING",
  497. "max_tokens": 8196,
  498. "model_type": LLMType.EMBEDDING.value
  499. },
  500. {
  501. "fid": factory_infos[11]["name"],
  502. "llm_name": "jina-embeddings-v2-base-de",
  503. "tags": "TEXT EMBEDDING",
  504. "max_tokens": 8196,
  505. "model_type": LLMType.EMBEDDING.value
  506. },
  507. {
  508. "fid": factory_infos[11]["name"],
  509. "llm_name": "jina-embeddings-v2-base-es",
  510. "tags": "TEXT EMBEDDING",
  511. "max_tokens": 8196,
  512. "model_type": LLMType.EMBEDDING.value
  513. },
  514. {
  515. "fid": factory_infos[11]["name"],
  516. "llm_name": "jina-embeddings-v2-base-code",
  517. "tags": "TEXT EMBEDDING",
  518. "max_tokens": 8196,
  519. "model_type": LLMType.EMBEDDING.value
  520. },
  521. {
  522. "fid": factory_infos[11]["name"],
  523. "llm_name": "jina-embeddings-v2-base-zh",
  524. "tags": "TEXT EMBEDDING",
  525. "max_tokens": 8196,
  526. "model_type": LLMType.EMBEDDING.value
  527. },
  528. # ------------------------ BAAI -----------------------
  529. {
  530. "fid": factory_infos[12]["name"],
  531. "llm_name": "BAAI/bge-large-zh-v1.5",
  532. "tags": "TEXT EMBEDDING,",
  533. "max_tokens": 1024,
  534. "model_type": LLMType.EMBEDDING.value
  535. },
  536. {
  537. "fid": factory_infos[12]["name"],
  538. "llm_name": "BAAI/bge-reranker-v2-m3",
  539. "tags": "RE-RANK,2k",
  540. "max_tokens": 2048,
  541. "model_type": LLMType.RERANK.value
  542. },
  543. # ------------------------ Minimax -----------------------
  544. {
  545. "fid": factory_infos[13]["name"],
  546. "llm_name": "abab6.5-chat",
  547. "tags": "LLM,CHAT,8k",
  548. "max_tokens": 8192,
  549. "model_type": LLMType.CHAT.value
  550. },
  551. {
  552. "fid": factory_infos[13]["name"],
  553. "llm_name": "abab6.5s-chat",
  554. "tags": "LLM,CHAT,245k",
  555. "max_tokens": 245760,
  556. "model_type": LLMType.CHAT.value
  557. },
  558. {
  559. "fid": factory_infos[13]["name"],
  560. "llm_name": "abab6.5t-chat",
  561. "tags": "LLM,CHAT,8k",
  562. "max_tokens": 8192,
  563. "model_type": LLMType.CHAT.value
  564. },
  565. {
  566. "fid": factory_infos[13]["name"],
  567. "llm_name": "abab6.5g-chat",
  568. "tags": "LLM,CHAT,8k",
  569. "max_tokens": 8192,
  570. "model_type": LLMType.CHAT.value
  571. },
  572. {
  573. "fid": factory_infos[13]["name"],
  574. "llm_name": "abab5.5-chat",
  575. "tags": "LLM,CHAT,16k",
  576. "max_tokens": 16384,
  577. "model_type": LLMType.CHAT.value
  578. },
  579. {
  580. "fid": factory_infos[13]["name"],
  581. "llm_name": "abab5.5s-chat",
  582. "tags": "LLM,CHAT,8k",
  583. "max_tokens": 8192,
  584. "model_type": LLMType.CHAT.value
  585. },
  586. # ------------------------ Mistral -----------------------
  587. {
  588. "fid": factory_infos[14]["name"],
  589. "llm_name": "open-mixtral-8x22b",
  590. "tags": "LLM,CHAT,64k",
  591. "max_tokens": 64000,
  592. "model_type": LLMType.CHAT.value
  593. },
  594. {
  595. "fid": factory_infos[14]["name"],
  596. "llm_name": "open-mixtral-8x7b",
  597. "tags": "LLM,CHAT,32k",
  598. "max_tokens": 32000,
  599. "model_type": LLMType.CHAT.value
  600. },
  601. {
  602. "fid": factory_infos[14]["name"],
  603. "llm_name": "open-mistral-7b",
  604. "tags": "LLM,CHAT,32k",
  605. "max_tokens": 32000,
  606. "model_type": LLMType.CHAT.value
  607. },
  608. {
  609. "fid": factory_infos[14]["name"],
  610. "llm_name": "mistral-large-latest",
  611. "tags": "LLM,CHAT,32k",
  612. "max_tokens": 32000,
  613. "model_type": LLMType.CHAT.value
  614. },
  615. {
  616. "fid": factory_infos[14]["name"],
  617. "llm_name": "mistral-small-latest",
  618. "tags": "LLM,CHAT,32k",
  619. "max_tokens": 32000,
  620. "model_type": LLMType.CHAT.value
  621. },
  622. {
  623. "fid": factory_infos[14]["name"],
  624. "llm_name": "mistral-medium-latest",
  625. "tags": "LLM,CHAT,32k",
  626. "max_tokens": 32000,
  627. "model_type": LLMType.CHAT.value
  628. },
  629. {
  630. "fid": factory_infos[14]["name"],
  631. "llm_name": "codestral-latest",
  632. "tags": "LLM,CHAT,32k",
  633. "max_tokens": 32000,
  634. "model_type": LLMType.CHAT.value
  635. },
  636. {
  637. "fid": factory_infos[14]["name"],
  638. "llm_name": "mistral-embed",
  639. "tags": "LLM,CHAT,8k",
  640. "max_tokens": 8192,
  641. "model_type": LLMType.EMBEDDING
  642. },
  643. ]
  644. for info in factory_infos:
  645. try:
  646. LLMFactoriesService.save(**info)
  647. except Exception as e:
  648. pass
  649. for info in llm_infos:
  650. try:
  651. LLMService.save(**info)
  652. except Exception as e:
  653. pass
  654. LLMFactoriesService.filter_delete([LLMFactories.name == "Local"])
  655. LLMService.filter_delete([LLM.fid == "Local"])
  656. LLMService.filter_delete([LLM.fid == "Moonshot", LLM.llm_name == "flag-embedding"])
  657. TenantLLMService.filter_delete([TenantLLM.llm_factory == "Moonshot", TenantLLM.llm_name == "flag-embedding"])
  658. LLMFactoriesService.filter_delete([LLMFactoriesService.model.name == "QAnything"])
  659. LLMService.filter_delete([LLMService.model.fid == "QAnything"])
  660. TenantLLMService.filter_update([TenantLLMService.model.llm_factory == "QAnything"], {"llm_factory": "Youdao"})
  661. ## insert openai two embedding models to the current openai user.
  662. print("Start to insert 2 OpenAI embedding models...")
  663. tenant_ids = set([row["tenant_id"] for row in TenantLLMService.get_openai_models()])
  664. for tid in tenant_ids:
  665. for row in TenantLLMService.query(llm_factory="OpenAI", tenant_id=tid):
  666. row = row.to_dict()
  667. row["model_type"] = LLMType.EMBEDDING.value
  668. row["llm_name"] = "text-embedding-3-small"
  669. row["used_tokens"] = 0
  670. try:
  671. TenantLLMService.save(**row)
  672. row = deepcopy(row)
  673. row["llm_name"] = "text-embedding-3-large"
  674. TenantLLMService.save(**row)
  675. except Exception as e:
  676. pass
  677. break
  678. for kb_id in KnowledgebaseService.get_all_ids():
  679. KnowledgebaseService.update_by_id(kb_id, {"doc_num": DocumentService.get_kb_doc_count(kb_id)})
  680. """
  681. drop table llm;
  682. drop table llm_factories;
  683. update tenant set parser_ids='naive:General,qa:Q&A,resume:Resume,manual:Manual,table:Table,paper:Paper,book:Book,laws:Laws,presentation:Presentation,picture:Picture,one:One';
  684. alter table knowledgebase modify avatar longtext;
  685. alter table user modify avatar longtext;
  686. alter table dialog modify icon longtext;
  687. """
  688. def add_graph_templates():
  689. dir = os.path.join(get_project_base_directory(), "graph", "templates")
  690. for fnm in os.listdir(dir):
  691. try:
  692. cnvs = json.load(open(os.path.join(dir, fnm), "r"))
  693. try:
  694. CanvasTemplateService.save(**cnvs)
  695. except:
  696. CanvasTemplateService.update_by_id(cnvs["id"], cnvs)
  697. except Exception as e:
  698. print("Add graph templates error: ", e)
  699. print("------------", flush=True)
  700. def init_web_data():
  701. start_time = time.time()
  702. init_llm_factory()
  703. if not UserService.get_all().count():
  704. init_superuser()
  705. add_graph_templates()
  706. print("init web data success:{}".format(time.time() - start_time))
  707. if __name__ == '__main__':
  708. init_web_db()
  709. init_web_data()