Вы не можете выбрать более 25 тем Темы должны начинаться с буквы или цифры, могут содержать дефисы(-) и должны содержать не более 35 символов.

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709
  1. #
  2. # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. import os
  17. import time
  18. import uuid
  19. from copy import deepcopy
  20. from api.db import LLMType, UserTenantRole
  21. from api.db.db_models import init_database_tables as init_web_db, LLMFactories, LLM, TenantLLM
  22. from api.db.services import UserService
  23. from api.db.services.document_service import DocumentService
  24. from api.db.services.knowledgebase_service import KnowledgebaseService
  25. from api.db.services.llm_service import LLMFactoriesService, LLMService, TenantLLMService, LLMBundle
  26. from api.db.services.user_service import TenantService, UserTenantService
  27. from api.settings import CHAT_MDL, EMBEDDING_MDL, ASR_MDL, IMAGE2TEXT_MDL, PARSERS, LLM_FACTORY, API_KEY, LLM_BASE_URL
  28. def init_superuser():
  29. user_info = {
  30. "id": uuid.uuid1().hex,
  31. "password": "admin",
  32. "nickname": "admin",
  33. "is_superuser": True,
  34. "email": "admin@ragflow.io",
  35. "creator": "system",
  36. "status": "1",
  37. }
  38. tenant = {
  39. "id": user_info["id"],
  40. "name": user_info["nickname"] + "‘s Kingdom",
  41. "llm_id": CHAT_MDL,
  42. "embd_id": EMBEDDING_MDL,
  43. "asr_id": ASR_MDL,
  44. "parser_ids": PARSERS,
  45. "img2txt_id": IMAGE2TEXT_MDL
  46. }
  47. usr_tenant = {
  48. "tenant_id": user_info["id"],
  49. "user_id": user_info["id"],
  50. "invited_by": user_info["id"],
  51. "role": UserTenantRole.OWNER
  52. }
  53. tenant_llm = []
  54. for llm in LLMService.query(fid=LLM_FACTORY):
  55. tenant_llm.append(
  56. {"tenant_id": user_info["id"], "llm_factory": LLM_FACTORY, "llm_name": llm.llm_name, "model_type": llm.model_type,
  57. "api_key": API_KEY, "api_base": LLM_BASE_URL})
  58. if not UserService.save(**user_info):
  59. print("\033[93m【ERROR】\033[0mcan't init admin.")
  60. return
  61. TenantService.insert(**tenant)
  62. UserTenantService.insert(**usr_tenant)
  63. TenantLLMService.insert_many(tenant_llm)
  64. print(
  65. "【INFO】Super user initialized. \033[93memail: admin@ragflow.io, password: admin\033[0m. Changing the password after logining is strongly recomanded.")
  66. chat_mdl = LLMBundle(tenant["id"], LLMType.CHAT, tenant["llm_id"])
  67. msg = chat_mdl.chat(system="", history=[
  68. {"role": "user", "content": "Hello!"}], gen_conf={})
  69. if msg.find("ERROR: ") == 0:
  70. print(
  71. "\33[91m【ERROR】\33[0m: ",
  72. "'{}' dosen't work. {}".format(
  73. tenant["llm_id"],
  74. msg))
  75. embd_mdl = LLMBundle(tenant["id"], LLMType.EMBEDDING, tenant["embd_id"])
  76. v, c = embd_mdl.encode(["Hello!"])
  77. if c == 0:
  78. print(
  79. "\33[91m【ERROR】\33[0m:",
  80. " '{}' dosen't work!".format(
  81. tenant["embd_id"]))
  82. factory_infos = [{
  83. "name": "OpenAI",
  84. "logo": "",
  85. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  86. "status": "1",
  87. }, {
  88. "name": "Tongyi-Qianwen",
  89. "logo": "",
  90. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  91. "status": "1",
  92. }, {
  93. "name": "ZHIPU-AI",
  94. "logo": "",
  95. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  96. "status": "1",
  97. },
  98. {
  99. "name": "Ollama",
  100. "logo": "",
  101. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  102. "status": "1",
  103. }, {
  104. "name": "Moonshot",
  105. "logo": "",
  106. "tags": "LLM,TEXT EMBEDDING",
  107. "status": "1",
  108. }, {
  109. "name": "FastEmbed",
  110. "logo": "",
  111. "tags": "TEXT EMBEDDING",
  112. "status": "1",
  113. }, {
  114. "name": "Xinference",
  115. "logo": "",
  116. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  117. "status": "1",
  118. },{
  119. "name": "Youdao",
  120. "logo": "",
  121. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  122. "status": "1",
  123. },{
  124. "name": "DeepSeek",
  125. "logo": "",
  126. "tags": "LLM",
  127. "status": "1",
  128. },{
  129. "name": "VolcEngine",
  130. "logo": "",
  131. "tags": "LLM, TEXT EMBEDDING",
  132. "status": "1",
  133. },{
  134. "name": "BaiChuan",
  135. "logo": "",
  136. "tags": "LLM,TEXT EMBEDDING",
  137. "status": "1",
  138. },{
  139. "name": "Jina",
  140. "logo": "",
  141. "tags": "TEXT EMBEDDING, TEXT RE-RANK",
  142. "status": "1",
  143. },{
  144. "name": "BAAI",
  145. "logo": "",
  146. "tags": "TEXT EMBEDDING, TEXT RE-RANK",
  147. "status": "1",
  148. },{
  149. "name": "Minimax",
  150. "logo": "",
  151. "tags": "LLM,TEXT EMBEDDING",
  152. "status": "1",
  153. },{
  154. "name": "Mistral",
  155. "logo": "",
  156. "tags": "LLM,TEXT EMBEDDING",
  157. "status": "1",
  158. }
  159. # {
  160. # "name": "文心一言",
  161. # "logo": "",
  162. # "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  163. # "status": "1",
  164. # },
  165. ]
  166. def init_llm_factory():
  167. llm_infos = [
  168. # ---------------------- OpenAI ------------------------
  169. {
  170. "fid": factory_infos[0]["name"],
  171. "llm_name": "gpt-4o",
  172. "tags": "LLM,CHAT,128K",
  173. "max_tokens": 128000,
  174. "model_type": LLMType.CHAT.value + "," + LLMType.IMAGE2TEXT.value
  175. }, {
  176. "fid": factory_infos[0]["name"],
  177. "llm_name": "gpt-3.5-turbo",
  178. "tags": "LLM,CHAT,4K",
  179. "max_tokens": 4096,
  180. "model_type": LLMType.CHAT.value
  181. }, {
  182. "fid": factory_infos[0]["name"],
  183. "llm_name": "gpt-3.5-turbo-16k-0613",
  184. "tags": "LLM,CHAT,16k",
  185. "max_tokens": 16385,
  186. "model_type": LLMType.CHAT.value
  187. }, {
  188. "fid": factory_infos[0]["name"],
  189. "llm_name": "text-embedding-ada-002",
  190. "tags": "TEXT EMBEDDING,8K",
  191. "max_tokens": 8191,
  192. "model_type": LLMType.EMBEDDING.value
  193. }, {
  194. "fid": factory_infos[0]["name"],
  195. "llm_name": "text-embedding-3-small",
  196. "tags": "TEXT EMBEDDING,8K",
  197. "max_tokens": 8191,
  198. "model_type": LLMType.EMBEDDING.value
  199. }, {
  200. "fid": factory_infos[0]["name"],
  201. "llm_name": "text-embedding-3-large",
  202. "tags": "TEXT EMBEDDING,8K",
  203. "max_tokens": 8191,
  204. "model_type": LLMType.EMBEDDING.value
  205. }, {
  206. "fid": factory_infos[0]["name"],
  207. "llm_name": "whisper-1",
  208. "tags": "SPEECH2TEXT",
  209. "max_tokens": 25 * 1024 * 1024,
  210. "model_type": LLMType.SPEECH2TEXT.value
  211. }, {
  212. "fid": factory_infos[0]["name"],
  213. "llm_name": "gpt-4",
  214. "tags": "LLM,CHAT,8K",
  215. "max_tokens": 8191,
  216. "model_type": LLMType.CHAT.value
  217. }, {
  218. "fid": factory_infos[0]["name"],
  219. "llm_name": "gpt-4-turbo",
  220. "tags": "LLM,CHAT,8K",
  221. "max_tokens": 8191,
  222. "model_type": LLMType.CHAT.value
  223. },{
  224. "fid": factory_infos[0]["name"],
  225. "llm_name": "gpt-4-32k",
  226. "tags": "LLM,CHAT,32K",
  227. "max_tokens": 32768,
  228. "model_type": LLMType.CHAT.value
  229. }, {
  230. "fid": factory_infos[0]["name"],
  231. "llm_name": "gpt-4-vision-preview",
  232. "tags": "LLM,CHAT,IMAGE2TEXT",
  233. "max_tokens": 765,
  234. "model_type": LLMType.IMAGE2TEXT.value
  235. },
  236. # ----------------------- Qwen -----------------------
  237. {
  238. "fid": factory_infos[1]["name"],
  239. "llm_name": "qwen-turbo",
  240. "tags": "LLM,CHAT,8K",
  241. "max_tokens": 8191,
  242. "model_type": LLMType.CHAT.value
  243. }, {
  244. "fid": factory_infos[1]["name"],
  245. "llm_name": "qwen-plus",
  246. "tags": "LLM,CHAT,32K",
  247. "max_tokens": 32768,
  248. "model_type": LLMType.CHAT.value
  249. }, {
  250. "fid": factory_infos[1]["name"],
  251. "llm_name": "qwen-max-1201",
  252. "tags": "LLM,CHAT,6K",
  253. "max_tokens": 5899,
  254. "model_type": LLMType.CHAT.value
  255. }, {
  256. "fid": factory_infos[1]["name"],
  257. "llm_name": "text-embedding-v2",
  258. "tags": "TEXT EMBEDDING,2K",
  259. "max_tokens": 2048,
  260. "model_type": LLMType.EMBEDDING.value
  261. }, {
  262. "fid": factory_infos[1]["name"],
  263. "llm_name": "paraformer-realtime-8k-v1",
  264. "tags": "SPEECH2TEXT",
  265. "max_tokens": 25 * 1024 * 1024,
  266. "model_type": LLMType.SPEECH2TEXT.value
  267. }, {
  268. "fid": factory_infos[1]["name"],
  269. "llm_name": "qwen-vl-max",
  270. "tags": "LLM,CHAT,IMAGE2TEXT",
  271. "max_tokens": 765,
  272. "model_type": LLMType.IMAGE2TEXT.value
  273. },
  274. # ---------------------- ZhipuAI ----------------------
  275. {
  276. "fid": factory_infos[2]["name"],
  277. "llm_name": "glm-3-turbo",
  278. "tags": "LLM,CHAT,",
  279. "max_tokens": 128 * 1000,
  280. "model_type": LLMType.CHAT.value
  281. }, {
  282. "fid": factory_infos[2]["name"],
  283. "llm_name": "glm-4",
  284. "tags": "LLM,CHAT,",
  285. "max_tokens": 128 * 1000,
  286. "model_type": LLMType.CHAT.value
  287. }, {
  288. "fid": factory_infos[2]["name"],
  289. "llm_name": "glm-4v",
  290. "tags": "LLM,CHAT,IMAGE2TEXT",
  291. "max_tokens": 2000,
  292. "model_type": LLMType.IMAGE2TEXT.value
  293. },
  294. {
  295. "fid": factory_infos[2]["name"],
  296. "llm_name": "embedding-2",
  297. "tags": "TEXT EMBEDDING",
  298. "max_tokens": 512,
  299. "model_type": LLMType.EMBEDDING.value
  300. },
  301. # ------------------------ Moonshot -----------------------
  302. {
  303. "fid": factory_infos[4]["name"],
  304. "llm_name": "moonshot-v1-8k",
  305. "tags": "LLM,CHAT,",
  306. "max_tokens": 7900,
  307. "model_type": LLMType.CHAT.value
  308. }, {
  309. "fid": factory_infos[4]["name"],
  310. "llm_name": "moonshot-v1-32k",
  311. "tags": "LLM,CHAT,",
  312. "max_tokens": 32768,
  313. "model_type": LLMType.CHAT.value
  314. }, {
  315. "fid": factory_infos[4]["name"],
  316. "llm_name": "moonshot-v1-128k",
  317. "tags": "LLM,CHAT",
  318. "max_tokens": 128 * 1000,
  319. "model_type": LLMType.CHAT.value
  320. },
  321. # ------------------------ FastEmbed -----------------------
  322. {
  323. "fid": factory_infos[5]["name"],
  324. "llm_name": "BAAI/bge-small-en-v1.5",
  325. "tags": "TEXT EMBEDDING,",
  326. "max_tokens": 512,
  327. "model_type": LLMType.EMBEDDING.value
  328. }, {
  329. "fid": factory_infos[5]["name"],
  330. "llm_name": "BAAI/bge-small-zh-v1.5",
  331. "tags": "TEXT EMBEDDING,",
  332. "max_tokens": 512,
  333. "model_type": LLMType.EMBEDDING.value
  334. }, {
  335. }, {
  336. "fid": factory_infos[5]["name"],
  337. "llm_name": "BAAI/bge-base-en-v1.5",
  338. "tags": "TEXT EMBEDDING,",
  339. "max_tokens": 512,
  340. "model_type": LLMType.EMBEDDING.value
  341. }, {
  342. }, {
  343. "fid": factory_infos[5]["name"],
  344. "llm_name": "BAAI/bge-large-en-v1.5",
  345. "tags": "TEXT EMBEDDING,",
  346. "max_tokens": 512,
  347. "model_type": LLMType.EMBEDDING.value
  348. }, {
  349. "fid": factory_infos[5]["name"],
  350. "llm_name": "sentence-transformers/all-MiniLM-L6-v2",
  351. "tags": "TEXT EMBEDDING,",
  352. "max_tokens": 512,
  353. "model_type": LLMType.EMBEDDING.value
  354. }, {
  355. "fid": factory_infos[5]["name"],
  356. "llm_name": "nomic-ai/nomic-embed-text-v1.5",
  357. "tags": "TEXT EMBEDDING,",
  358. "max_tokens": 8192,
  359. "model_type": LLMType.EMBEDDING.value
  360. }, {
  361. "fid": factory_infos[5]["name"],
  362. "llm_name": "jinaai/jina-embeddings-v2-small-en",
  363. "tags": "TEXT EMBEDDING,",
  364. "max_tokens": 2147483648,
  365. "model_type": LLMType.EMBEDDING.value
  366. }, {
  367. "fid": factory_infos[5]["name"],
  368. "llm_name": "jinaai/jina-embeddings-v2-base-en",
  369. "tags": "TEXT EMBEDDING,",
  370. "max_tokens": 2147483648,
  371. "model_type": LLMType.EMBEDDING.value
  372. },
  373. # ------------------------ Youdao -----------------------
  374. {
  375. "fid": factory_infos[7]["name"],
  376. "llm_name": "maidalun1020/bce-embedding-base_v1",
  377. "tags": "TEXT EMBEDDING,",
  378. "max_tokens": 512,
  379. "model_type": LLMType.EMBEDDING.value
  380. },
  381. {
  382. "fid": factory_infos[7]["name"],
  383. "llm_name": "maidalun1020/bce-reranker-base_v1",
  384. "tags": "RE-RANK, 512",
  385. "max_tokens": 512,
  386. "model_type": LLMType.RERANK.value
  387. },
  388. # ------------------------ DeepSeek -----------------------
  389. {
  390. "fid": factory_infos[8]["name"],
  391. "llm_name": "deepseek-chat",
  392. "tags": "LLM,CHAT,",
  393. "max_tokens": 32768,
  394. "model_type": LLMType.CHAT.value
  395. },
  396. {
  397. "fid": factory_infos[8]["name"],
  398. "llm_name": "deepseek-coder",
  399. "tags": "LLM,CHAT,",
  400. "max_tokens": 16385,
  401. "model_type": LLMType.CHAT.value
  402. },
  403. # ------------------------ VolcEngine -----------------------
  404. {
  405. "fid": factory_infos[9]["name"],
  406. "llm_name": "Skylark2-pro-32k",
  407. "tags": "LLM,CHAT,32k",
  408. "max_tokens": 32768,
  409. "model_type": LLMType.CHAT.value
  410. },
  411. {
  412. "fid": factory_infos[9]["name"],
  413. "llm_name": "Skylark2-pro-4k",
  414. "tags": "LLM,CHAT,4k",
  415. "max_tokens": 4096,
  416. "model_type": LLMType.CHAT.value
  417. },
  418. # ------------------------ BaiChuan -----------------------
  419. {
  420. "fid": factory_infos[10]["name"],
  421. "llm_name": "Baichuan2-Turbo",
  422. "tags": "LLM,CHAT,32K",
  423. "max_tokens": 32768,
  424. "model_type": LLMType.CHAT.value
  425. },
  426. {
  427. "fid": factory_infos[10]["name"],
  428. "llm_name": "Baichuan2-Turbo-192k",
  429. "tags": "LLM,CHAT,192K",
  430. "max_tokens": 196608,
  431. "model_type": LLMType.CHAT.value
  432. },
  433. {
  434. "fid": factory_infos[10]["name"],
  435. "llm_name": "Baichuan3-Turbo",
  436. "tags": "LLM,CHAT,32K",
  437. "max_tokens": 32768,
  438. "model_type": LLMType.CHAT.value
  439. },
  440. {
  441. "fid": factory_infos[10]["name"],
  442. "llm_name": "Baichuan3-Turbo-128k",
  443. "tags": "LLM,CHAT,128K",
  444. "max_tokens": 131072,
  445. "model_type": LLMType.CHAT.value
  446. },
  447. {
  448. "fid": factory_infos[10]["name"],
  449. "llm_name": "Baichuan4",
  450. "tags": "LLM,CHAT,128K",
  451. "max_tokens": 131072,
  452. "model_type": LLMType.CHAT.value
  453. },
  454. {
  455. "fid": factory_infos[10]["name"],
  456. "llm_name": "Baichuan-Text-Embedding",
  457. "tags": "TEXT EMBEDDING",
  458. "max_tokens": 512,
  459. "model_type": LLMType.EMBEDDING.value
  460. },
  461. # ------------------------ Jina -----------------------
  462. {
  463. "fid": factory_infos[11]["name"],
  464. "llm_name": "jina-reranker-v1-base-en",
  465. "tags": "RE-RANK,8k",
  466. "max_tokens": 8196,
  467. "model_type": LLMType.RERANK.value
  468. },
  469. {
  470. "fid": factory_infos[11]["name"],
  471. "llm_name": "jina-reranker-v1-turbo-en",
  472. "tags": "RE-RANK,8k",
  473. "max_tokens": 8196,
  474. "model_type": LLMType.RERANK.value
  475. },
  476. {
  477. "fid": factory_infos[11]["name"],
  478. "llm_name": "jina-reranker-v1-tiny-en",
  479. "tags": "RE-RANK,8k",
  480. "max_tokens": 8196,
  481. "model_type": LLMType.RERANK.value
  482. },
  483. {
  484. "fid": factory_infos[11]["name"],
  485. "llm_name": "jina-colbert-v1-en",
  486. "tags": "RE-RANK,8k",
  487. "max_tokens": 8196,
  488. "model_type": LLMType.RERANK.value
  489. },
  490. {
  491. "fid": factory_infos[11]["name"],
  492. "llm_name": "jina-embeddings-v2-base-en",
  493. "tags": "TEXT EMBEDDING",
  494. "max_tokens": 8196,
  495. "model_type": LLMType.EMBEDDING.value
  496. },
  497. {
  498. "fid": factory_infos[11]["name"],
  499. "llm_name": "jina-embeddings-v2-base-de",
  500. "tags": "TEXT EMBEDDING",
  501. "max_tokens": 8196,
  502. "model_type": LLMType.EMBEDDING.value
  503. },
  504. {
  505. "fid": factory_infos[11]["name"],
  506. "llm_name": "jina-embeddings-v2-base-es",
  507. "tags": "TEXT EMBEDDING",
  508. "max_tokens": 8196,
  509. "model_type": LLMType.EMBEDDING.value
  510. },
  511. {
  512. "fid": factory_infos[11]["name"],
  513. "llm_name": "jina-embeddings-v2-base-code",
  514. "tags": "TEXT EMBEDDING",
  515. "max_tokens": 8196,
  516. "model_type": LLMType.EMBEDDING.value
  517. },
  518. {
  519. "fid": factory_infos[11]["name"],
  520. "llm_name": "jina-embeddings-v2-base-zh",
  521. "tags": "TEXT EMBEDDING",
  522. "max_tokens": 8196,
  523. "model_type": LLMType.EMBEDDING.value
  524. },
  525. # ------------------------ BAAI -----------------------
  526. {
  527. "fid": factory_infos[12]["name"],
  528. "llm_name": "BAAI/bge-large-zh-v1.5",
  529. "tags": "TEXT EMBEDDING,",
  530. "max_tokens": 1024,
  531. "model_type": LLMType.EMBEDDING.value
  532. },
  533. {
  534. "fid": factory_infos[12]["name"],
  535. "llm_name": "BAAI/bge-reranker-v2-m3",
  536. "tags": "RE-RANK,2k",
  537. "max_tokens": 2048,
  538. "model_type": LLMType.RERANK.value
  539. },
  540. # ------------------------ Minimax -----------------------
  541. {
  542. "fid": factory_infos[13]["name"],
  543. "llm_name": "abab6.5-chat",
  544. "tags": "LLM,CHAT,8k",
  545. "max_tokens": 8192,
  546. "model_type": LLMType.CHAT.value
  547. },
  548. {
  549. "fid": factory_infos[13]["name"],
  550. "llm_name": "abab6.5s-chat",
  551. "tags": "LLM,CHAT,245k",
  552. "max_tokens": 245760,
  553. "model_type": LLMType.CHAT.value
  554. },
  555. {
  556. "fid": factory_infos[13]["name"],
  557. "llm_name": "abab6.5t-chat",
  558. "tags": "LLM,CHAT,8k",
  559. "max_tokens": 8192,
  560. "model_type": LLMType.CHAT.value
  561. },
  562. {
  563. "fid": factory_infos[13]["name"],
  564. "llm_name": "abab6.5g-chat",
  565. "tags": "LLM,CHAT,8k",
  566. "max_tokens": 8192,
  567. "model_type": LLMType.CHAT.value
  568. },
  569. {
  570. "fid": factory_infos[13]["name"],
  571. "llm_name": "abab5.5-chat",
  572. "tags": "LLM,CHAT,16k",
  573. "max_tokens": 16384,
  574. "model_type": LLMType.CHAT.value
  575. },
  576. {
  577. "fid": factory_infos[13]["name"],
  578. "llm_name": "abab5.5s-chat",
  579. "tags": "LLM,CHAT,8k",
  580. "max_tokens": 8192,
  581. "model_type": LLMType.CHAT.value
  582. },
  583. # ------------------------ Mistral -----------------------
  584. {
  585. "fid": factory_infos[14]["name"],
  586. "llm_name": "open-mixtral-8x22b",
  587. "tags": "LLM,CHAT,64k",
  588. "max_tokens": 64000,
  589. "model_type": LLMType.CHAT.value
  590. },
  591. {
  592. "fid": factory_infos[14]["name"],
  593. "llm_name": "open-mixtral-8x7b",
  594. "tags": "LLM,CHAT,32k",
  595. "max_tokens": 32000,
  596. "model_type": LLMType.CHAT.value
  597. },
  598. {
  599. "fid": factory_infos[14]["name"],
  600. "llm_name": "open-mistral-7b",
  601. "tags": "LLM,CHAT,32k",
  602. "max_tokens": 32000,
  603. "model_type": LLMType.CHAT.value
  604. },
  605. {
  606. "fid": factory_infos[14]["name"],
  607. "llm_name": "mistral-large-latest",
  608. "tags": "LLM,CHAT,32k",
  609. "max_tokens": 32000,
  610. "model_type": LLMType.CHAT.value
  611. },
  612. {
  613. "fid": factory_infos[14]["name"],
  614. "llm_name": "mistral-small-latest",
  615. "tags": "LLM,CHAT,32k",
  616. "max_tokens": 32000,
  617. "model_type": LLMType.CHAT.value
  618. },
  619. {
  620. "fid": factory_infos[14]["name"],
  621. "llm_name": "mistral-medium-latest",
  622. "tags": "LLM,CHAT,32k",
  623. "max_tokens": 32000,
  624. "model_type": LLMType.CHAT.value
  625. },
  626. {
  627. "fid": factory_infos[14]["name"],
  628. "llm_name": "codestral-latest",
  629. "tags": "LLM,CHAT,32k",
  630. "max_tokens": 32000,
  631. "model_type": LLMType.CHAT.value
  632. },
  633. {
  634. "fid": factory_infos[14]["name"],
  635. "llm_name": "mistral-embed",
  636. "tags": "LLM,CHAT,8k",
  637. "max_tokens": 8192,
  638. "model_type": LLMType.EMBEDDING
  639. },
  640. ]
  641. for info in factory_infos:
  642. try:
  643. LLMFactoriesService.save(**info)
  644. except Exception as e:
  645. pass
  646. for info in llm_infos:
  647. try:
  648. LLMService.save(**info)
  649. except Exception as e:
  650. pass
  651. LLMFactoriesService.filter_delete([LLMFactories.name == "Local"])
  652. LLMService.filter_delete([LLM.fid == "Local"])
  653. LLMService.filter_delete([LLM.fid == "Moonshot", LLM.llm_name == "flag-embedding"])
  654. TenantLLMService.filter_delete([TenantLLM.llm_factory == "Moonshot", TenantLLM.llm_name == "flag-embedding"])
  655. LLMFactoriesService.filter_delete([LLMFactoriesService.model.name == "QAnything"])
  656. LLMService.filter_delete([LLMService.model.fid == "QAnything"])
  657. TenantLLMService.filter_update([TenantLLMService.model.llm_factory == "QAnything"], {"llm_factory": "Youdao"})
  658. ## insert openai two embedding models to the current openai user.
  659. print("Start to insert 2 OpenAI embedding models...")
  660. tenant_ids = set([row["tenant_id"] for row in TenantLLMService.get_openai_models()])
  661. for tid in tenant_ids:
  662. for row in TenantLLMService.query(llm_factory="OpenAI", tenant_id=tid):
  663. row = row.to_dict()
  664. row["model_type"] = LLMType.EMBEDDING.value
  665. row["llm_name"] = "text-embedding-3-small"
  666. row["used_tokens"] = 0
  667. try:
  668. TenantLLMService.save(**row)
  669. row = deepcopy(row)
  670. row["llm_name"] = "text-embedding-3-large"
  671. TenantLLMService.save(**row)
  672. except Exception as e:
  673. pass
  674. break
  675. for kb_id in KnowledgebaseService.get_all_ids():
  676. KnowledgebaseService.update_by_id(kb_id, {"doc_num": DocumentService.get_kb_doc_count(kb_id)})
  677. """
  678. drop table llm;
  679. drop table llm_factories;
  680. update tenant set parser_ids='naive:General,qa:Q&A,resume:Resume,manual:Manual,table:Table,paper:Paper,book:Book,laws:Laws,presentation:Presentation,picture:Picture,one:One';
  681. alter table knowledgebase modify avatar longtext;
  682. alter table user modify avatar longtext;
  683. alter table dialog modify icon longtext;
  684. """
  685. def init_web_data():
  686. start_time = time.time()
  687. init_llm_factory()
  688. if not UserService.get_all().count():
  689. init_superuser()
  690. print("init web data success:{}".format(time.time() - start_time))
  691. if __name__ == '__main__':
  692. init_web_db()
  693. init_web_data()