Nevar pievienot vairāk kā 25 tēmas Tēmai ir jāsākas ar burtu vai ciparu, tā var saturēt domu zīmes ('-') un var būt līdz 35 simboliem gara.

init_data.py 22KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647
  1. #
  2. # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. import os
  17. import time
  18. import uuid
  19. from copy import deepcopy
  20. from api.db import LLMType, UserTenantRole
  21. from api.db.db_models import init_database_tables as init_web_db, LLMFactories, LLM, TenantLLM
  22. from api.db.services import UserService
  23. from api.db.services.document_service import DocumentService
  24. from api.db.services.knowledgebase_service import KnowledgebaseService
  25. from api.db.services.llm_service import LLMFactoriesService, LLMService, TenantLLMService, LLMBundle
  26. from api.db.services.user_service import TenantService, UserTenantService
  27. from api.settings import CHAT_MDL, EMBEDDING_MDL, ASR_MDL, IMAGE2TEXT_MDL, PARSERS, LLM_FACTORY, API_KEY, LLM_BASE_URL
  28. def init_superuser():
  29. user_info = {
  30. "id": uuid.uuid1().hex,
  31. "password": "admin",
  32. "nickname": "admin",
  33. "is_superuser": True,
  34. "email": "admin@ragflow.io",
  35. "creator": "system",
  36. "status": "1",
  37. }
  38. tenant = {
  39. "id": user_info["id"],
  40. "name": user_info["nickname"] + "‘s Kingdom",
  41. "llm_id": CHAT_MDL,
  42. "embd_id": EMBEDDING_MDL,
  43. "asr_id": ASR_MDL,
  44. "parser_ids": PARSERS,
  45. "img2txt_id": IMAGE2TEXT_MDL
  46. }
  47. usr_tenant = {
  48. "tenant_id": user_info["id"],
  49. "user_id": user_info["id"],
  50. "invited_by": user_info["id"],
  51. "role": UserTenantRole.OWNER
  52. }
  53. tenant_llm = []
  54. for llm in LLMService.query(fid=LLM_FACTORY):
  55. tenant_llm.append(
  56. {"tenant_id": user_info["id"], "llm_factory": LLM_FACTORY, "llm_name": llm.llm_name, "model_type": llm.model_type,
  57. "api_key": API_KEY, "api_base": LLM_BASE_URL})
  58. if not UserService.save(**user_info):
  59. print("\033[93m【ERROR】\033[0mcan't init admin.")
  60. return
  61. TenantService.insert(**tenant)
  62. UserTenantService.insert(**usr_tenant)
  63. TenantLLMService.insert_many(tenant_llm)
  64. print(
  65. "【INFO】Super user initialized. \033[93memail: admin@ragflow.io, password: admin\033[0m. Changing the password after logining is strongly recomanded.")
  66. chat_mdl = LLMBundle(tenant["id"], LLMType.CHAT, tenant["llm_id"])
  67. msg = chat_mdl.chat(system="", history=[
  68. {"role": "user", "content": "Hello!"}], gen_conf={})
  69. if msg.find("ERROR: ") == 0:
  70. print(
  71. "\33[91m【ERROR】\33[0m: ",
  72. "'{}' dosen't work. {}".format(
  73. tenant["llm_id"],
  74. msg))
  75. embd_mdl = LLMBundle(tenant["id"], LLMType.EMBEDDING, tenant["embd_id"])
  76. v, c = embd_mdl.encode(["Hello!"])
  77. if c == 0:
  78. print(
  79. "\33[91m【ERROR】\33[0m:",
  80. " '{}' dosen't work!".format(
  81. tenant["embd_id"]))
  82. factory_infos = [{
  83. "name": "OpenAI",
  84. "logo": "",
  85. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  86. "status": "1",
  87. }, {
  88. "name": "Tongyi-Qianwen",
  89. "logo": "",
  90. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  91. "status": "1",
  92. }, {
  93. "name": "ZHIPU-AI",
  94. "logo": "",
  95. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  96. "status": "1",
  97. },
  98. {
  99. "name": "Ollama",
  100. "logo": "",
  101. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  102. "status": "1",
  103. }, {
  104. "name": "Moonshot",
  105. "logo": "",
  106. "tags": "LLM,TEXT EMBEDDING",
  107. "status": "1",
  108. }, {
  109. "name": "FastEmbed",
  110. "logo": "",
  111. "tags": "TEXT EMBEDDING",
  112. "status": "1",
  113. }, {
  114. "name": "Xinference",
  115. "logo": "",
  116. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  117. "status": "1",
  118. },{
  119. "name": "Youdao",
  120. "logo": "",
  121. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  122. "status": "1",
  123. },{
  124. "name": "DeepSeek",
  125. "logo": "",
  126. "tags": "LLM",
  127. "status": "1",
  128. },{
  129. "name": "VolcEngine",
  130. "logo": "",
  131. "tags": "LLM, TEXT EMBEDDING",
  132. "status": "1",
  133. },{
  134. "name": "BaiChuan",
  135. "logo": "",
  136. "tags": "LLM,TEXT EMBEDDING",
  137. "status": "1",
  138. },{
  139. "name": "Jina",
  140. "logo": "",
  141. "tags": "TEXT EMBEDDING, TEXT RE-RANK",
  142. "status": "1",
  143. },{
  144. "name": "BAAI",
  145. "logo": "",
  146. "tags": "TEXT EMBEDDING, TEXT RE-RANK",
  147. "status": "1",
  148. },{
  149. "name": "Minimax",
  150. "logo": "",
  151. "tags": "LLM,TEXT EMBEDDING",
  152. "status": "1",
  153. }
  154. # {
  155. # "name": "文心一言",
  156. # "logo": "",
  157. # "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  158. # "status": "1",
  159. # },
  160. ]
  161. def init_llm_factory():
  162. llm_infos = [
  163. # ---------------------- OpenAI ------------------------
  164. {
  165. "fid": factory_infos[0]["name"],
  166. "llm_name": "gpt-4o",
  167. "tags": "LLM,CHAT,128K",
  168. "max_tokens": 128000,
  169. "model_type": LLMType.CHAT.value + "," + LLMType.IMAGE2TEXT.value
  170. }, {
  171. "fid": factory_infos[0]["name"],
  172. "llm_name": "gpt-3.5-turbo",
  173. "tags": "LLM,CHAT,4K",
  174. "max_tokens": 4096,
  175. "model_type": LLMType.CHAT.value
  176. }, {
  177. "fid": factory_infos[0]["name"],
  178. "llm_name": "gpt-3.5-turbo-16k-0613",
  179. "tags": "LLM,CHAT,16k",
  180. "max_tokens": 16385,
  181. "model_type": LLMType.CHAT.value
  182. }, {
  183. "fid": factory_infos[0]["name"],
  184. "llm_name": "text-embedding-ada-002",
  185. "tags": "TEXT EMBEDDING,8K",
  186. "max_tokens": 8191,
  187. "model_type": LLMType.EMBEDDING.value
  188. }, {
  189. "fid": factory_infos[0]["name"],
  190. "llm_name": "text-embedding-3-small",
  191. "tags": "TEXT EMBEDDING,8K",
  192. "max_tokens": 8191,
  193. "model_type": LLMType.EMBEDDING.value
  194. }, {
  195. "fid": factory_infos[0]["name"],
  196. "llm_name": "text-embedding-3-large",
  197. "tags": "TEXT EMBEDDING,8K",
  198. "max_tokens": 8191,
  199. "model_type": LLMType.EMBEDDING.value
  200. }, {
  201. "fid": factory_infos[0]["name"],
  202. "llm_name": "whisper-1",
  203. "tags": "SPEECH2TEXT",
  204. "max_tokens": 25 * 1024 * 1024,
  205. "model_type": LLMType.SPEECH2TEXT.value
  206. }, {
  207. "fid": factory_infos[0]["name"],
  208. "llm_name": "gpt-4",
  209. "tags": "LLM,CHAT,8K",
  210. "max_tokens": 8191,
  211. "model_type": LLMType.CHAT.value
  212. }, {
  213. "fid": factory_infos[0]["name"],
  214. "llm_name": "gpt-4-turbo",
  215. "tags": "LLM,CHAT,8K",
  216. "max_tokens": 8191,
  217. "model_type": LLMType.CHAT.value
  218. },{
  219. "fid": factory_infos[0]["name"],
  220. "llm_name": "gpt-4-32k",
  221. "tags": "LLM,CHAT,32K",
  222. "max_tokens": 32768,
  223. "model_type": LLMType.CHAT.value
  224. }, {
  225. "fid": factory_infos[0]["name"],
  226. "llm_name": "gpt-4-vision-preview",
  227. "tags": "LLM,CHAT,IMAGE2TEXT",
  228. "max_tokens": 765,
  229. "model_type": LLMType.IMAGE2TEXT.value
  230. },
  231. # ----------------------- Qwen -----------------------
  232. {
  233. "fid": factory_infos[1]["name"],
  234. "llm_name": "qwen-turbo",
  235. "tags": "LLM,CHAT,8K",
  236. "max_tokens": 8191,
  237. "model_type": LLMType.CHAT.value
  238. }, {
  239. "fid": factory_infos[1]["name"],
  240. "llm_name": "qwen-plus",
  241. "tags": "LLM,CHAT,32K",
  242. "max_tokens": 32768,
  243. "model_type": LLMType.CHAT.value
  244. }, {
  245. "fid": factory_infos[1]["name"],
  246. "llm_name": "qwen-max-1201",
  247. "tags": "LLM,CHAT,6K",
  248. "max_tokens": 5899,
  249. "model_type": LLMType.CHAT.value
  250. }, {
  251. "fid": factory_infos[1]["name"],
  252. "llm_name": "text-embedding-v2",
  253. "tags": "TEXT EMBEDDING,2K",
  254. "max_tokens": 2048,
  255. "model_type": LLMType.EMBEDDING.value
  256. }, {
  257. "fid": factory_infos[1]["name"],
  258. "llm_name": "paraformer-realtime-8k-v1",
  259. "tags": "SPEECH2TEXT",
  260. "max_tokens": 25 * 1024 * 1024,
  261. "model_type": LLMType.SPEECH2TEXT.value
  262. }, {
  263. "fid": factory_infos[1]["name"],
  264. "llm_name": "qwen-vl-max",
  265. "tags": "LLM,CHAT,IMAGE2TEXT",
  266. "max_tokens": 765,
  267. "model_type": LLMType.IMAGE2TEXT.value
  268. },
  269. # ---------------------- ZhipuAI ----------------------
  270. {
  271. "fid": factory_infos[2]["name"],
  272. "llm_name": "glm-3-turbo",
  273. "tags": "LLM,CHAT,",
  274. "max_tokens": 128 * 1000,
  275. "model_type": LLMType.CHAT.value
  276. }, {
  277. "fid": factory_infos[2]["name"],
  278. "llm_name": "glm-4",
  279. "tags": "LLM,CHAT,",
  280. "max_tokens": 128 * 1000,
  281. "model_type": LLMType.CHAT.value
  282. }, {
  283. "fid": factory_infos[2]["name"],
  284. "llm_name": "glm-4v",
  285. "tags": "LLM,CHAT,IMAGE2TEXT",
  286. "max_tokens": 2000,
  287. "model_type": LLMType.IMAGE2TEXT.value
  288. },
  289. {
  290. "fid": factory_infos[2]["name"],
  291. "llm_name": "embedding-2",
  292. "tags": "TEXT EMBEDDING",
  293. "max_tokens": 512,
  294. "model_type": LLMType.EMBEDDING.value
  295. },
  296. # ------------------------ Moonshot -----------------------
  297. {
  298. "fid": factory_infos[4]["name"],
  299. "llm_name": "moonshot-v1-8k",
  300. "tags": "LLM,CHAT,",
  301. "max_tokens": 7900,
  302. "model_type": LLMType.CHAT.value
  303. }, {
  304. "fid": factory_infos[4]["name"],
  305. "llm_name": "moonshot-v1-32k",
  306. "tags": "LLM,CHAT,",
  307. "max_tokens": 32768,
  308. "model_type": LLMType.CHAT.value
  309. }, {
  310. "fid": factory_infos[4]["name"],
  311. "llm_name": "moonshot-v1-128k",
  312. "tags": "LLM,CHAT",
  313. "max_tokens": 128 * 1000,
  314. "model_type": LLMType.CHAT.value
  315. },
  316. # ------------------------ FastEmbed -----------------------
  317. {
  318. "fid": factory_infos[5]["name"],
  319. "llm_name": "BAAI/bge-small-en-v1.5",
  320. "tags": "TEXT EMBEDDING,",
  321. "max_tokens": 512,
  322. "model_type": LLMType.EMBEDDING.value
  323. }, {
  324. "fid": factory_infos[5]["name"],
  325. "llm_name": "BAAI/bge-small-zh-v1.5",
  326. "tags": "TEXT EMBEDDING,",
  327. "max_tokens": 512,
  328. "model_type": LLMType.EMBEDDING.value
  329. }, {
  330. }, {
  331. "fid": factory_infos[5]["name"],
  332. "llm_name": "BAAI/bge-base-en-v1.5",
  333. "tags": "TEXT EMBEDDING,",
  334. "max_tokens": 512,
  335. "model_type": LLMType.EMBEDDING.value
  336. }, {
  337. }, {
  338. "fid": factory_infos[5]["name"],
  339. "llm_name": "BAAI/bge-large-en-v1.5",
  340. "tags": "TEXT EMBEDDING,",
  341. "max_tokens": 512,
  342. "model_type": LLMType.EMBEDDING.value
  343. }, {
  344. "fid": factory_infos[5]["name"],
  345. "llm_name": "sentence-transformers/all-MiniLM-L6-v2",
  346. "tags": "TEXT EMBEDDING,",
  347. "max_tokens": 512,
  348. "model_type": LLMType.EMBEDDING.value
  349. }, {
  350. "fid": factory_infos[5]["name"],
  351. "llm_name": "nomic-ai/nomic-embed-text-v1.5",
  352. "tags": "TEXT EMBEDDING,",
  353. "max_tokens": 8192,
  354. "model_type": LLMType.EMBEDDING.value
  355. }, {
  356. "fid": factory_infos[5]["name"],
  357. "llm_name": "jinaai/jina-embeddings-v2-small-en",
  358. "tags": "TEXT EMBEDDING,",
  359. "max_tokens": 2147483648,
  360. "model_type": LLMType.EMBEDDING.value
  361. }, {
  362. "fid": factory_infos[5]["name"],
  363. "llm_name": "jinaai/jina-embeddings-v2-base-en",
  364. "tags": "TEXT EMBEDDING,",
  365. "max_tokens": 2147483648,
  366. "model_type": LLMType.EMBEDDING.value
  367. },
  368. # ------------------------ Youdao -----------------------
  369. {
  370. "fid": factory_infos[7]["name"],
  371. "llm_name": "maidalun1020/bce-embedding-base_v1",
  372. "tags": "TEXT EMBEDDING,",
  373. "max_tokens": 512,
  374. "model_type": LLMType.EMBEDDING.value
  375. },
  376. {
  377. "fid": factory_infos[7]["name"],
  378. "llm_name": "maidalun1020/bce-reranker-base_v1",
  379. "tags": "RE-RANK, 8K",
  380. "max_tokens": 512,
  381. "model_type": LLMType.RERANK.value
  382. },
  383. # ------------------------ DeepSeek -----------------------
  384. {
  385. "fid": factory_infos[8]["name"],
  386. "llm_name": "deepseek-chat",
  387. "tags": "LLM,CHAT,",
  388. "max_tokens": 32768,
  389. "model_type": LLMType.CHAT.value
  390. },
  391. {
  392. "fid": factory_infos[8]["name"],
  393. "llm_name": "deepseek-coder",
  394. "tags": "LLM,CHAT,",
  395. "max_tokens": 16385,
  396. "model_type": LLMType.CHAT.value
  397. },
  398. # ------------------------ VolcEngine -----------------------
  399. {
  400. "fid": factory_infos[9]["name"],
  401. "llm_name": "Skylark2-pro-32k",
  402. "tags": "LLM,CHAT,32k",
  403. "max_tokens": 32768,
  404. "model_type": LLMType.CHAT.value
  405. },
  406. {
  407. "fid": factory_infos[9]["name"],
  408. "llm_name": "Skylark2-pro-4k",
  409. "tags": "LLM,CHAT,4k",
  410. "max_tokens": 4096,
  411. "model_type": LLMType.CHAT.value
  412. },
  413. # ------------------------ BaiChuan -----------------------
  414. {
  415. "fid": factory_infos[10]["name"],
  416. "llm_name": "Baichuan2-Turbo",
  417. "tags": "LLM,CHAT,32K",
  418. "max_tokens": 32768,
  419. "model_type": LLMType.CHAT.value
  420. },
  421. {
  422. "fid": factory_infos[10]["name"],
  423. "llm_name": "Baichuan2-Turbo-192k",
  424. "tags": "LLM,CHAT,192K",
  425. "max_tokens": 196608,
  426. "model_type": LLMType.CHAT.value
  427. },
  428. {
  429. "fid": factory_infos[10]["name"],
  430. "llm_name": "Baichuan3-Turbo",
  431. "tags": "LLM,CHAT,32K",
  432. "max_tokens": 32768,
  433. "model_type": LLMType.CHAT.value
  434. },
  435. {
  436. "fid": factory_infos[10]["name"],
  437. "llm_name": "Baichuan3-Turbo-128k",
  438. "tags": "LLM,CHAT,128K",
  439. "max_tokens": 131072,
  440. "model_type": LLMType.CHAT.value
  441. },
  442. {
  443. "fid": factory_infos[10]["name"],
  444. "llm_name": "Baichuan4",
  445. "tags": "LLM,CHAT,128K",
  446. "max_tokens": 131072,
  447. "model_type": LLMType.CHAT.value
  448. },
  449. {
  450. "fid": factory_infos[10]["name"],
  451. "llm_name": "Baichuan-Text-Embedding",
  452. "tags": "TEXT EMBEDDING",
  453. "max_tokens": 512,
  454. "model_type": LLMType.EMBEDDING.value
  455. },
  456. # ------------------------ Jina -----------------------
  457. {
  458. "fid": factory_infos[11]["name"],
  459. "llm_name": "jina-reranker-v1-base-en",
  460. "tags": "RE-RANK,8k",
  461. "max_tokens": 8196,
  462. "model_type": LLMType.RERANK.value
  463. },
  464. {
  465. "fid": factory_infos[11]["name"],
  466. "llm_name": "jina-reranker-v1-turbo-en",
  467. "tags": "RE-RANK,8k",
  468. "max_tokens": 8196,
  469. "model_type": LLMType.RERANK.value
  470. },
  471. {
  472. "fid": factory_infos[11]["name"],
  473. "llm_name": "jina-reranker-v1-tiny-en",
  474. "tags": "RE-RANK,8k",
  475. "max_tokens": 8196,
  476. "model_type": LLMType.RERANK.value
  477. },
  478. {
  479. "fid": factory_infos[11]["name"],
  480. "llm_name": "jina-colbert-v1-en",
  481. "tags": "RE-RANK,8k",
  482. "max_tokens": 8196,
  483. "model_type": LLMType.RERANK.value
  484. },
  485. {
  486. "fid": factory_infos[11]["name"],
  487. "llm_name": "jina-embeddings-v2-base-en",
  488. "tags": "TEXT EMBEDDING",
  489. "max_tokens": 8196,
  490. "model_type": LLMType.EMBEDDING.value
  491. },
  492. {
  493. "fid": factory_infos[11]["name"],
  494. "llm_name": "jina-embeddings-v2-base-de",
  495. "tags": "TEXT EMBEDDING",
  496. "max_tokens": 8196,
  497. "model_type": LLMType.EMBEDDING.value
  498. },
  499. {
  500. "fid": factory_infos[11]["name"],
  501. "llm_name": "jina-embeddings-v2-base-es",
  502. "tags": "TEXT EMBEDDING",
  503. "max_tokens": 8196,
  504. "model_type": LLMType.EMBEDDING.value
  505. },
  506. {
  507. "fid": factory_infos[11]["name"],
  508. "llm_name": "jina-embeddings-v2-base-code",
  509. "tags": "TEXT EMBEDDING",
  510. "max_tokens": 8196,
  511. "model_type": LLMType.EMBEDDING.value
  512. },
  513. {
  514. "fid": factory_infos[11]["name"],
  515. "llm_name": "jina-embeddings-v2-base-zh",
  516. "tags": "TEXT EMBEDDING",
  517. "max_tokens": 8196,
  518. "model_type": LLMType.EMBEDDING.value
  519. },
  520. # ------------------------ BAAI -----------------------
  521. {
  522. "fid": factory_infos[12]["name"],
  523. "llm_name": "BAAI/bge-large-zh-v1.5",
  524. "tags": "TEXT EMBEDDING,",
  525. "max_tokens": 1024,
  526. "model_type": LLMType.EMBEDDING.value
  527. },
  528. {
  529. "fid": factory_infos[12]["name"],
  530. "llm_name": "BAAI/bge-reranker-v2-m3",
  531. "tags": "RE-RANK,2k",
  532. "max_tokens": 2048,
  533. "model_type": LLMType.RERANK.value
  534. },
  535. # ------------------------ Minimax -----------------------
  536. {
  537. "fid": factory_infos[13]["name"],
  538. "llm_name": "abab6.5-chat",
  539. "tags": "LLM,CHAT,8k",
  540. "max_tokens": 8192,
  541. "model_type": LLMType.CHAT.value
  542. },
  543. {
  544. "fid": factory_infos[13]["name"],
  545. "llm_name": "abab6.5s-chat",
  546. "tags": "LLM,CHAT,245k",
  547. "max_tokens": 245760,
  548. "model_type": LLMType.CHAT.value
  549. },
  550. {
  551. "fid": factory_infos[13]["name"],
  552. "llm_name": "abab6.5t-chat",
  553. "tags": "LLM,CHAT,8k",
  554. "max_tokens": 8192,
  555. "model_type": LLMType.CHAT.value
  556. },
  557. {
  558. "fid": factory_infos[13]["name"],
  559. "llm_name": "abab6.5g-chat",
  560. "tags": "LLM,CHAT,8k",
  561. "max_tokens": 8192,
  562. "model_type": LLMType.CHAT.value
  563. },
  564. {
  565. "fid": factory_infos[13]["name"],
  566. "llm_name": "abab5.5-chat",
  567. "tags": "LLM,CHAT,16k",
  568. "max_tokens": 16384,
  569. "model_type": LLMType.CHAT.value
  570. },
  571. {
  572. "fid": factory_infos[13]["name"],
  573. "llm_name": "abab5.5s-chat",
  574. "tags": "LLM,CHAT,8k",
  575. "max_tokens": 8192,
  576. "model_type": LLMType.CHAT.value
  577. },
  578. ]
  579. for info in factory_infos:
  580. try:
  581. LLMFactoriesService.save(**info)
  582. except Exception as e:
  583. pass
  584. for info in llm_infos:
  585. try:
  586. LLMService.save(**info)
  587. except Exception as e:
  588. pass
  589. LLMFactoriesService.filter_delete([LLMFactories.name == "Local"])
  590. LLMService.filter_delete([LLM.fid == "Local"])
  591. LLMService.filter_delete([LLM.fid == "Moonshot", LLM.llm_name == "flag-embedding"])
  592. TenantLLMService.filter_delete([TenantLLM.llm_factory == "Moonshot", TenantLLM.llm_name == "flag-embedding"])
  593. LLMFactoriesService.filter_delete([LLMFactoriesService.model.name == "QAnything"])
  594. LLMService.filter_delete([LLMService.model.fid == "QAnything"])
  595. TenantLLMService.filter_update([TenantLLMService.model.llm_factory == "QAnything"], {"llm_factory": "Youdao"})
  596. ## insert openai two embedding models to the current openai user.
  597. print("Start to insert 2 OpenAI embedding models...")
  598. tenant_ids = set([row["tenant_id"] for row in TenantLLMService.get_openai_models()])
  599. for tid in tenant_ids:
  600. for row in TenantLLMService.query(llm_factory="OpenAI", tenant_id=tid):
  601. row = row.to_dict()
  602. row["model_type"] = LLMType.EMBEDDING.value
  603. row["llm_name"] = "text-embedding-3-small"
  604. row["used_tokens"] = 0
  605. try:
  606. TenantLLMService.save(**row)
  607. row = deepcopy(row)
  608. row["llm_name"] = "text-embedding-3-large"
  609. TenantLLMService.save(**row)
  610. except Exception as e:
  611. pass
  612. break
  613. for kb_id in KnowledgebaseService.get_all_ids():
  614. KnowledgebaseService.update_by_id(kb_id, {"doc_num": DocumentService.get_kb_doc_count(kb_id)})
  615. """
  616. drop table llm;
  617. drop table llm_factories;
  618. update tenant set parser_ids='naive:General,qa:Q&A,resume:Resume,manual:Manual,table:Table,paper:Paper,book:Book,laws:Laws,presentation:Presentation,picture:Picture,one:One';
  619. alter table knowledgebase modify avatar longtext;
  620. alter table user modify avatar longtext;
  621. alter table dialog modify icon longtext;
  622. """
  623. def init_web_data():
  624. start_time = time.time()
  625. init_llm_factory()
  626. if not UserService.get_all().count():
  627. init_superuser()
  628. print("init web data success:{}".format(time.time() - start_time))
  629. if __name__ == '__main__':
  630. init_web_db()
  631. init_web_data()