Ви не можете вибрати більше 25 тем Теми мають розпочинатися з літери або цифри, можуть містити дефіси (-) і не повинні перевищувати 35 символів.

1 рік тому
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977
  1. #
  2. # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. import json
  17. import os
  18. import time
  19. import uuid
  20. from copy import deepcopy
  21. from api.db import LLMType, UserTenantRole
  22. from api.db.db_models import init_database_tables as init_web_db, LLMFactories, LLM, TenantLLM
  23. from api.db.services import UserService
  24. from api.db.services.canvas_service import CanvasTemplateService
  25. from api.db.services.document_service import DocumentService
  26. from api.db.services.knowledgebase_service import KnowledgebaseService
  27. from api.db.services.llm_service import LLMFactoriesService, LLMService, TenantLLMService, LLMBundle
  28. from api.db.services.user_service import TenantService, UserTenantService
  29. from api.settings import CHAT_MDL, EMBEDDING_MDL, ASR_MDL, IMAGE2TEXT_MDL, PARSERS, LLM_FACTORY, API_KEY, LLM_BASE_URL
  30. from api.utils.file_utils import get_project_base_directory
  31. def init_superuser():
  32. user_info = {
  33. "id": uuid.uuid1().hex,
  34. "password": "admin",
  35. "nickname": "admin",
  36. "is_superuser": True,
  37. "email": "admin@ragflow.io",
  38. "creator": "system",
  39. "status": "1",
  40. }
  41. tenant = {
  42. "id": user_info["id"],
  43. "name": user_info["nickname"] + "‘s Kingdom",
  44. "llm_id": CHAT_MDL,
  45. "embd_id": EMBEDDING_MDL,
  46. "asr_id": ASR_MDL,
  47. "parser_ids": PARSERS,
  48. "img2txt_id": IMAGE2TEXT_MDL
  49. }
  50. usr_tenant = {
  51. "tenant_id": user_info["id"],
  52. "user_id": user_info["id"],
  53. "invited_by": user_info["id"],
  54. "role": UserTenantRole.OWNER
  55. }
  56. tenant_llm = []
  57. for llm in LLMService.query(fid=LLM_FACTORY):
  58. tenant_llm.append(
  59. {"tenant_id": user_info["id"], "llm_factory": LLM_FACTORY, "llm_name": llm.llm_name, "model_type": llm.model_type,
  60. "api_key": API_KEY, "api_base": LLM_BASE_URL})
  61. if not UserService.save(**user_info):
  62. print("\033[93m【ERROR】\033[0mcan't init admin.")
  63. return
  64. TenantService.insert(**tenant)
  65. UserTenantService.insert(**usr_tenant)
  66. TenantLLMService.insert_many(tenant_llm)
  67. print(
  68. "【INFO】Super user initialized. \033[93memail: admin@ragflow.io, password: admin\033[0m. Changing the password after logining is strongly recomanded.")
  69. chat_mdl = LLMBundle(tenant["id"], LLMType.CHAT, tenant["llm_id"])
  70. msg = chat_mdl.chat(system="", history=[
  71. {"role": "user", "content": "Hello!"}], gen_conf={})
  72. if msg.find("ERROR: ") == 0:
  73. print(
  74. "\33[91m【ERROR】\33[0m: ",
  75. "'{}' dosen't work. {}".format(
  76. tenant["llm_id"],
  77. msg))
  78. embd_mdl = LLMBundle(tenant["id"], LLMType.EMBEDDING, tenant["embd_id"])
  79. v, c = embd_mdl.encode(["Hello!"])
  80. if c == 0:
  81. print(
  82. "\33[91m【ERROR】\33[0m:",
  83. " '{}' dosen't work!".format(
  84. tenant["embd_id"]))
  85. factory_infos = [{
  86. "name": "OpenAI",
  87. "logo": "",
  88. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  89. "status": "1",
  90. }, {
  91. "name": "Tongyi-Qianwen",
  92. "logo": "",
  93. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  94. "status": "1",
  95. }, {
  96. "name": "ZHIPU-AI",
  97. "logo": "",
  98. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  99. "status": "1",
  100. },
  101. {
  102. "name": "Ollama",
  103. "logo": "",
  104. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  105. "status": "1",
  106. }, {
  107. "name": "Moonshot",
  108. "logo": "",
  109. "tags": "LLM,TEXT EMBEDDING",
  110. "status": "1",
  111. }, {
  112. "name": "FastEmbed",
  113. "logo": "",
  114. "tags": "TEXT EMBEDDING",
  115. "status": "1",
  116. }, {
  117. "name": "Xinference",
  118. "logo": "",
  119. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  120. "status": "1",
  121. },{
  122. "name": "Youdao",
  123. "logo": "",
  124. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  125. "status": "1",
  126. },{
  127. "name": "DeepSeek",
  128. "logo": "",
  129. "tags": "LLM",
  130. "status": "1",
  131. },{
  132. "name": "VolcEngine",
  133. "logo": "",
  134. "tags": "LLM, TEXT EMBEDDING",
  135. "status": "1",
  136. },{
  137. "name": "BaiChuan",
  138. "logo": "",
  139. "tags": "LLM,TEXT EMBEDDING",
  140. "status": "1",
  141. },{
  142. "name": "Jina",
  143. "logo": "",
  144. "tags": "TEXT EMBEDDING, TEXT RE-RANK",
  145. "status": "1",
  146. },{
  147. "name": "BAAI",
  148. "logo": "",
  149. "tags": "TEXT EMBEDDING, TEXT RE-RANK",
  150. "status": "1",
  151. },{
  152. "name": "MiniMax",
  153. "logo": "",
  154. "tags": "LLM,TEXT EMBEDDING",
  155. "status": "1",
  156. },{
  157. "name": "Mistral",
  158. "logo": "",
  159. "tags": "LLM,TEXT EMBEDDING",
  160. "status": "1",
  161. },{
  162. "name": "Azure-OpenAI",
  163. "logo": "",
  164. "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  165. "status": "1",
  166. },{
  167. "name": "Bedrock",
  168. "logo": "",
  169. "tags": "LLM,TEXT EMBEDDING",
  170. "status": "1",
  171. }
  172. # {
  173. # "name": "文心一言",
  174. # "logo": "",
  175. # "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
  176. # "status": "1",
  177. # },
  178. ]
  179. def init_llm_factory():
  180. llm_infos = [
  181. # ---------------------- OpenAI ------------------------
  182. {
  183. "fid": factory_infos[0]["name"],
  184. "llm_name": "gpt-4o",
  185. "tags": "LLM,CHAT,128K",
  186. "max_tokens": 128000,
  187. "model_type": LLMType.CHAT.value + "," + LLMType.IMAGE2TEXT.value
  188. }, {
  189. "fid": factory_infos[0]["name"],
  190. "llm_name": "gpt-3.5-turbo",
  191. "tags": "LLM,CHAT,4K",
  192. "max_tokens": 4096,
  193. "model_type": LLMType.CHAT.value
  194. }, {
  195. "fid": factory_infos[0]["name"],
  196. "llm_name": "gpt-3.5-turbo-16k-0613",
  197. "tags": "LLM,CHAT,16k",
  198. "max_tokens": 16385,
  199. "model_type": LLMType.CHAT.value
  200. }, {
  201. "fid": factory_infos[0]["name"],
  202. "llm_name": "text-embedding-ada-002",
  203. "tags": "TEXT EMBEDDING,8K",
  204. "max_tokens": 8191,
  205. "model_type": LLMType.EMBEDDING.value
  206. }, {
  207. "fid": factory_infos[0]["name"],
  208. "llm_name": "text-embedding-3-small",
  209. "tags": "TEXT EMBEDDING,8K",
  210. "max_tokens": 8191,
  211. "model_type": LLMType.EMBEDDING.value
  212. }, {
  213. "fid": factory_infos[0]["name"],
  214. "llm_name": "text-embedding-3-large",
  215. "tags": "TEXT EMBEDDING,8K",
  216. "max_tokens": 8191,
  217. "model_type": LLMType.EMBEDDING.value
  218. }, {
  219. "fid": factory_infos[0]["name"],
  220. "llm_name": "whisper-1",
  221. "tags": "SPEECH2TEXT",
  222. "max_tokens": 25 * 1024 * 1024,
  223. "model_type": LLMType.SPEECH2TEXT.value
  224. }, {
  225. "fid": factory_infos[0]["name"],
  226. "llm_name": "gpt-4",
  227. "tags": "LLM,CHAT,8K",
  228. "max_tokens": 8191,
  229. "model_type": LLMType.CHAT.value
  230. }, {
  231. "fid": factory_infos[0]["name"],
  232. "llm_name": "gpt-4-turbo",
  233. "tags": "LLM,CHAT,8K",
  234. "max_tokens": 8191,
  235. "model_type": LLMType.CHAT.value
  236. },{
  237. "fid": factory_infos[0]["name"],
  238. "llm_name": "gpt-4-32k",
  239. "tags": "LLM,CHAT,32K",
  240. "max_tokens": 32768,
  241. "model_type": LLMType.CHAT.value
  242. }, {
  243. "fid": factory_infos[0]["name"],
  244. "llm_name": "gpt-4-vision-preview",
  245. "tags": "LLM,CHAT,IMAGE2TEXT",
  246. "max_tokens": 765,
  247. "model_type": LLMType.IMAGE2TEXT.value
  248. },
  249. # ----------------------- Qwen -----------------------
  250. {
  251. "fid": factory_infos[1]["name"],
  252. "llm_name": "qwen-turbo",
  253. "tags": "LLM,CHAT,8K",
  254. "max_tokens": 8191,
  255. "model_type": LLMType.CHAT.value
  256. }, {
  257. "fid": factory_infos[1]["name"],
  258. "llm_name": "qwen-plus",
  259. "tags": "LLM,CHAT,32K",
  260. "max_tokens": 32768,
  261. "model_type": LLMType.CHAT.value
  262. }, {
  263. "fid": factory_infos[1]["name"],
  264. "llm_name": "qwen-max-1201",
  265. "tags": "LLM,CHAT,6K",
  266. "max_tokens": 5899,
  267. "model_type": LLMType.CHAT.value
  268. }, {
  269. "fid": factory_infos[1]["name"],
  270. "llm_name": "text-embedding-v2",
  271. "tags": "TEXT EMBEDDING,2K",
  272. "max_tokens": 2048,
  273. "model_type": LLMType.EMBEDDING.value
  274. }, {
  275. "fid": factory_infos[1]["name"],
  276. "llm_name": "paraformer-realtime-8k-v1",
  277. "tags": "SPEECH2TEXT",
  278. "max_tokens": 25 * 1024 * 1024,
  279. "model_type": LLMType.SPEECH2TEXT.value
  280. }, {
  281. "fid": factory_infos[1]["name"],
  282. "llm_name": "qwen-vl-max",
  283. "tags": "LLM,CHAT,IMAGE2TEXT",
  284. "max_tokens": 765,
  285. "model_type": LLMType.IMAGE2TEXT.value
  286. },
  287. # ---------------------- ZhipuAI ----------------------
  288. {
  289. "fid": factory_infos[2]["name"],
  290. "llm_name": "glm-3-turbo",
  291. "tags": "LLM,CHAT,",
  292. "max_tokens": 128 * 1000,
  293. "model_type": LLMType.CHAT.value
  294. }, {
  295. "fid": factory_infos[2]["name"],
  296. "llm_name": "glm-4",
  297. "tags": "LLM,CHAT,",
  298. "max_tokens": 128 * 1000,
  299. "model_type": LLMType.CHAT.value
  300. }, {
  301. "fid": factory_infos[2]["name"],
  302. "llm_name": "glm-4v",
  303. "tags": "LLM,CHAT,IMAGE2TEXT",
  304. "max_tokens": 2000,
  305. "model_type": LLMType.IMAGE2TEXT.value
  306. },
  307. {
  308. "fid": factory_infos[2]["name"],
  309. "llm_name": "embedding-2",
  310. "tags": "TEXT EMBEDDING",
  311. "max_tokens": 512,
  312. "model_type": LLMType.EMBEDDING.value
  313. },
  314. # ------------------------ Moonshot -----------------------
  315. {
  316. "fid": factory_infos[4]["name"],
  317. "llm_name": "moonshot-v1-8k",
  318. "tags": "LLM,CHAT,",
  319. "max_tokens": 7900,
  320. "model_type": LLMType.CHAT.value
  321. }, {
  322. "fid": factory_infos[4]["name"],
  323. "llm_name": "moonshot-v1-32k",
  324. "tags": "LLM,CHAT,",
  325. "max_tokens": 32768,
  326. "model_type": LLMType.CHAT.value
  327. }, {
  328. "fid": factory_infos[4]["name"],
  329. "llm_name": "moonshot-v1-128k",
  330. "tags": "LLM,CHAT",
  331. "max_tokens": 128 * 1000,
  332. "model_type": LLMType.CHAT.value
  333. },
  334. # ------------------------ FastEmbed -----------------------
  335. {
  336. "fid": factory_infos[5]["name"],
  337. "llm_name": "BAAI/bge-small-en-v1.5",
  338. "tags": "TEXT EMBEDDING,",
  339. "max_tokens": 512,
  340. "model_type": LLMType.EMBEDDING.value
  341. }, {
  342. "fid": factory_infos[5]["name"],
  343. "llm_name": "BAAI/bge-small-zh-v1.5",
  344. "tags": "TEXT EMBEDDING,",
  345. "max_tokens": 512,
  346. "model_type": LLMType.EMBEDDING.value
  347. }, {
  348. }, {
  349. "fid": factory_infos[5]["name"],
  350. "llm_name": "BAAI/bge-base-en-v1.5",
  351. "tags": "TEXT EMBEDDING,",
  352. "max_tokens": 512,
  353. "model_type": LLMType.EMBEDDING.value
  354. }, {
  355. }, {
  356. "fid": factory_infos[5]["name"],
  357. "llm_name": "BAAI/bge-large-en-v1.5",
  358. "tags": "TEXT EMBEDDING,",
  359. "max_tokens": 512,
  360. "model_type": LLMType.EMBEDDING.value
  361. }, {
  362. "fid": factory_infos[5]["name"],
  363. "llm_name": "sentence-transformers/all-MiniLM-L6-v2",
  364. "tags": "TEXT EMBEDDING,",
  365. "max_tokens": 512,
  366. "model_type": LLMType.EMBEDDING.value
  367. }, {
  368. "fid": factory_infos[5]["name"],
  369. "llm_name": "nomic-ai/nomic-embed-text-v1.5",
  370. "tags": "TEXT EMBEDDING,",
  371. "max_tokens": 8192,
  372. "model_type": LLMType.EMBEDDING.value
  373. }, {
  374. "fid": factory_infos[5]["name"],
  375. "llm_name": "jinaai/jina-embeddings-v2-small-en",
  376. "tags": "TEXT EMBEDDING,",
  377. "max_tokens": 2147483648,
  378. "model_type": LLMType.EMBEDDING.value
  379. }, {
  380. "fid": factory_infos[5]["name"],
  381. "llm_name": "jinaai/jina-embeddings-v2-base-en",
  382. "tags": "TEXT EMBEDDING,",
  383. "max_tokens": 2147483648,
  384. "model_type": LLMType.EMBEDDING.value
  385. },
  386. # ------------------------ Youdao -----------------------
  387. {
  388. "fid": factory_infos[7]["name"],
  389. "llm_name": "maidalun1020/bce-embedding-base_v1",
  390. "tags": "TEXT EMBEDDING,",
  391. "max_tokens": 512,
  392. "model_type": LLMType.EMBEDDING.value
  393. },
  394. {
  395. "fid": factory_infos[7]["name"],
  396. "llm_name": "maidalun1020/bce-reranker-base_v1",
  397. "tags": "RE-RANK, 512",
  398. "max_tokens": 512,
  399. "model_type": LLMType.RERANK.value
  400. },
  401. # ------------------------ DeepSeek -----------------------
  402. {
  403. "fid": factory_infos[8]["name"],
  404. "llm_name": "deepseek-chat",
  405. "tags": "LLM,CHAT,",
  406. "max_tokens": 32768,
  407. "model_type": LLMType.CHAT.value
  408. },
  409. {
  410. "fid": factory_infos[8]["name"],
  411. "llm_name": "deepseek-coder",
  412. "tags": "LLM,CHAT,",
  413. "max_tokens": 16385,
  414. "model_type": LLMType.CHAT.value
  415. },
  416. # ------------------------ VolcEngine -----------------------
  417. {
  418. "fid": factory_infos[9]["name"],
  419. "llm_name": "Skylark2-pro-32k",
  420. "tags": "LLM,CHAT,32k",
  421. "max_tokens": 32768,
  422. "model_type": LLMType.CHAT.value
  423. },
  424. {
  425. "fid": factory_infos[9]["name"],
  426. "llm_name": "Skylark2-pro-4k",
  427. "tags": "LLM,CHAT,4k",
  428. "max_tokens": 4096,
  429. "model_type": LLMType.CHAT.value
  430. },
  431. # ------------------------ BaiChuan -----------------------
  432. {
  433. "fid": factory_infos[10]["name"],
  434. "llm_name": "Baichuan2-Turbo",
  435. "tags": "LLM,CHAT,32K",
  436. "max_tokens": 32768,
  437. "model_type": LLMType.CHAT.value
  438. },
  439. {
  440. "fid": factory_infos[10]["name"],
  441. "llm_name": "Baichuan2-Turbo-192k",
  442. "tags": "LLM,CHAT,192K",
  443. "max_tokens": 196608,
  444. "model_type": LLMType.CHAT.value
  445. },
  446. {
  447. "fid": factory_infos[10]["name"],
  448. "llm_name": "Baichuan3-Turbo",
  449. "tags": "LLM,CHAT,32K",
  450. "max_tokens": 32768,
  451. "model_type": LLMType.CHAT.value
  452. },
  453. {
  454. "fid": factory_infos[10]["name"],
  455. "llm_name": "Baichuan3-Turbo-128k",
  456. "tags": "LLM,CHAT,128K",
  457. "max_tokens": 131072,
  458. "model_type": LLMType.CHAT.value
  459. },
  460. {
  461. "fid": factory_infos[10]["name"],
  462. "llm_name": "Baichuan4",
  463. "tags": "LLM,CHAT,128K",
  464. "max_tokens": 131072,
  465. "model_type": LLMType.CHAT.value
  466. },
  467. {
  468. "fid": factory_infos[10]["name"],
  469. "llm_name": "Baichuan-Text-Embedding",
  470. "tags": "TEXT EMBEDDING",
  471. "max_tokens": 512,
  472. "model_type": LLMType.EMBEDDING.value
  473. },
  474. # ------------------------ Jina -----------------------
  475. {
  476. "fid": factory_infos[11]["name"],
  477. "llm_name": "jina-reranker-v1-base-en",
  478. "tags": "RE-RANK,8k",
  479. "max_tokens": 8196,
  480. "model_type": LLMType.RERANK.value
  481. },
  482. {
  483. "fid": factory_infos[11]["name"],
  484. "llm_name": "jina-reranker-v1-turbo-en",
  485. "tags": "RE-RANK,8k",
  486. "max_tokens": 8196,
  487. "model_type": LLMType.RERANK.value
  488. },
  489. {
  490. "fid": factory_infos[11]["name"],
  491. "llm_name": "jina-reranker-v1-tiny-en",
  492. "tags": "RE-RANK,8k",
  493. "max_tokens": 8196,
  494. "model_type": LLMType.RERANK.value
  495. },
  496. {
  497. "fid": factory_infos[11]["name"],
  498. "llm_name": "jina-colbert-v1-en",
  499. "tags": "RE-RANK,8k",
  500. "max_tokens": 8196,
  501. "model_type": LLMType.RERANK.value
  502. },
  503. {
  504. "fid": factory_infos[11]["name"],
  505. "llm_name": "jina-embeddings-v2-base-en",
  506. "tags": "TEXT EMBEDDING",
  507. "max_tokens": 8196,
  508. "model_type": LLMType.EMBEDDING.value
  509. },
  510. {
  511. "fid": factory_infos[11]["name"],
  512. "llm_name": "jina-embeddings-v2-base-de",
  513. "tags": "TEXT EMBEDDING",
  514. "max_tokens": 8196,
  515. "model_type": LLMType.EMBEDDING.value
  516. },
  517. {
  518. "fid": factory_infos[11]["name"],
  519. "llm_name": "jina-embeddings-v2-base-es",
  520. "tags": "TEXT EMBEDDING",
  521. "max_tokens": 8196,
  522. "model_type": LLMType.EMBEDDING.value
  523. },
  524. {
  525. "fid": factory_infos[11]["name"],
  526. "llm_name": "jina-embeddings-v2-base-code",
  527. "tags": "TEXT EMBEDDING",
  528. "max_tokens": 8196,
  529. "model_type": LLMType.EMBEDDING.value
  530. },
  531. {
  532. "fid": factory_infos[11]["name"],
  533. "llm_name": "jina-embeddings-v2-base-zh",
  534. "tags": "TEXT EMBEDDING",
  535. "max_tokens": 8196,
  536. "model_type": LLMType.EMBEDDING.value
  537. },
  538. # ------------------------ BAAI -----------------------
  539. {
  540. "fid": factory_infos[12]["name"],
  541. "llm_name": "BAAI/bge-large-zh-v1.5",
  542. "tags": "TEXT EMBEDDING,",
  543. "max_tokens": 1024,
  544. "model_type": LLMType.EMBEDDING.value
  545. },
  546. {
  547. "fid": factory_infos[12]["name"],
  548. "llm_name": "BAAI/bge-reranker-v2-m3",
  549. "tags": "RE-RANK,2k",
  550. "max_tokens": 2048,
  551. "model_type": LLMType.RERANK.value
  552. },
  553. # ------------------------ Minimax -----------------------
  554. {
  555. "fid": factory_infos[13]["name"],
  556. "llm_name": "abab6.5-chat",
  557. "tags": "LLM,CHAT,8k",
  558. "max_tokens": 8192,
  559. "model_type": LLMType.CHAT.value
  560. },
  561. {
  562. "fid": factory_infos[13]["name"],
  563. "llm_name": "abab6.5s-chat",
  564. "tags": "LLM,CHAT,245k",
  565. "max_tokens": 245760,
  566. "model_type": LLMType.CHAT.value
  567. },
  568. {
  569. "fid": factory_infos[13]["name"],
  570. "llm_name": "abab6.5t-chat",
  571. "tags": "LLM,CHAT,8k",
  572. "max_tokens": 8192,
  573. "model_type": LLMType.CHAT.value
  574. },
  575. {
  576. "fid": factory_infos[13]["name"],
  577. "llm_name": "abab6.5g-chat",
  578. "tags": "LLM,CHAT,8k",
  579. "max_tokens": 8192,
  580. "model_type": LLMType.CHAT.value
  581. },
  582. {
  583. "fid": factory_infos[13]["name"],
  584. "llm_name": "abab5.5-chat",
  585. "tags": "LLM,CHAT,16k",
  586. "max_tokens": 16384,
  587. "model_type": LLMType.CHAT.value
  588. },
  589. {
  590. "fid": factory_infos[13]["name"],
  591. "llm_name": "abab5.5s-chat",
  592. "tags": "LLM,CHAT,8k",
  593. "max_tokens": 8192,
  594. "model_type": LLMType.CHAT.value
  595. },
  596. # ------------------------ Mistral -----------------------
  597. {
  598. "fid": factory_infos[14]["name"],
  599. "llm_name": "open-mixtral-8x22b",
  600. "tags": "LLM,CHAT,64k",
  601. "max_tokens": 64000,
  602. "model_type": LLMType.CHAT.value
  603. },
  604. {
  605. "fid": factory_infos[14]["name"],
  606. "llm_name": "open-mixtral-8x7b",
  607. "tags": "LLM,CHAT,32k",
  608. "max_tokens": 32000,
  609. "model_type": LLMType.CHAT.value
  610. },
  611. {
  612. "fid": factory_infos[14]["name"],
  613. "llm_name": "open-mistral-7b",
  614. "tags": "LLM,CHAT,32k",
  615. "max_tokens": 32000,
  616. "model_type": LLMType.CHAT.value
  617. },
  618. {
  619. "fid": factory_infos[14]["name"],
  620. "llm_name": "mistral-large-latest",
  621. "tags": "LLM,CHAT,32k",
  622. "max_tokens": 32000,
  623. "model_type": LLMType.CHAT.value
  624. },
  625. {
  626. "fid": factory_infos[14]["name"],
  627. "llm_name": "mistral-small-latest",
  628. "tags": "LLM,CHAT,32k",
  629. "max_tokens": 32000,
  630. "model_type": LLMType.CHAT.value
  631. },
  632. {
  633. "fid": factory_infos[14]["name"],
  634. "llm_name": "mistral-medium-latest",
  635. "tags": "LLM,CHAT,32k",
  636. "max_tokens": 32000,
  637. "model_type": LLMType.CHAT.value
  638. },
  639. {
  640. "fid": factory_infos[14]["name"],
  641. "llm_name": "codestral-latest",
  642. "tags": "LLM,CHAT,32k",
  643. "max_tokens": 32000,
  644. "model_type": LLMType.CHAT.value
  645. },
  646. {
  647. "fid": factory_infos[14]["name"],
  648. "llm_name": "mistral-embed",
  649. "tags": "LLM,CHAT,8k",
  650. "max_tokens": 8192,
  651. "model_type": LLMType.EMBEDDING
  652. },
  653. # ------------------------ Azure OpenAI -----------------------
  654. # Please ensure the llm_name is the same as the name in Azure
  655. # OpenAI deployment name (e.g., azure-gpt-4o). And the llm_name
  656. # must different from the OpenAI llm_name
  657. #
  658. # Each model must be deployed in the Azure OpenAI service, otherwise,
  659. # you will receive an error message 'The API deployment for
  660. # this resource does not exist'
  661. {
  662. "fid": factory_infos[15]["name"],
  663. "llm_name": "azure-gpt-4o",
  664. "tags": "LLM,CHAT,128K",
  665. "max_tokens": 128000,
  666. "model_type": LLMType.CHAT.value + "," + LLMType.IMAGE2TEXT.value
  667. }, {
  668. "fid": factory_infos[15]["name"],
  669. "llm_name": "azure-gpt-35-turbo",
  670. "tags": "LLM,CHAT,4K",
  671. "max_tokens": 4096,
  672. "model_type": LLMType.CHAT.value
  673. }, {
  674. "fid": factory_infos[15]["name"],
  675. "llm_name": "azure-gpt-35-turbo-16k",
  676. "tags": "LLM,CHAT,16k",
  677. "max_tokens": 16385,
  678. "model_type": LLMType.CHAT.value
  679. }, {
  680. "fid": factory_infos[15]["name"],
  681. "llm_name": "azure-text-embedding-ada-002",
  682. "tags": "TEXT EMBEDDING,8K",
  683. "max_tokens": 8191,
  684. "model_type": LLMType.EMBEDDING.value
  685. }, {
  686. "fid": factory_infos[15]["name"],
  687. "llm_name": "azure-text-embedding-3-small",
  688. "tags": "TEXT EMBEDDING,8K",
  689. "max_tokens": 8191,
  690. "model_type": LLMType.EMBEDDING.value
  691. }, {
  692. "fid": factory_infos[15]["name"],
  693. "llm_name": "azure-text-embedding-3-large",
  694. "tags": "TEXT EMBEDDING,8K",
  695. "max_tokens": 8191,
  696. "model_type": LLMType.EMBEDDING.value
  697. },{
  698. "fid": factory_infos[15]["name"],
  699. "llm_name": "azure-whisper-1",
  700. "tags": "SPEECH2TEXT",
  701. "max_tokens": 25 * 1024 * 1024,
  702. "model_type": LLMType.SPEECH2TEXT.value
  703. },
  704. {
  705. "fid": factory_infos[15]["name"],
  706. "llm_name": "azure-gpt-4",
  707. "tags": "LLM,CHAT,8K",
  708. "max_tokens": 8191,
  709. "model_type": LLMType.CHAT.value
  710. }, {
  711. "fid": factory_infos[15]["name"],
  712. "llm_name": "azure-gpt-4-turbo",
  713. "tags": "LLM,CHAT,8K",
  714. "max_tokens": 8191,
  715. "model_type": LLMType.CHAT.value
  716. }, {
  717. "fid": factory_infos[15]["name"],
  718. "llm_name": "azure-gpt-4-32k",
  719. "tags": "LLM,CHAT,32K",
  720. "max_tokens": 32768,
  721. "model_type": LLMType.CHAT.value
  722. }, {
  723. "fid": factory_infos[15]["name"],
  724. "llm_name": "azure-gpt-4-vision-preview",
  725. "tags": "LLM,CHAT,IMAGE2TEXT",
  726. "max_tokens": 765,
  727. "model_type": LLMType.IMAGE2TEXT.value
  728. },
  729. # ------------------------ Bedrock -----------------------
  730. {
  731. "fid": factory_infos[16]["name"],
  732. "llm_name": "ai21.j2-ultra-v1",
  733. "tags": "LLM,CHAT,8k",
  734. "max_tokens": 8191,
  735. "model_type": LLMType.CHAT.value
  736. }, {
  737. "fid": factory_infos[16]["name"],
  738. "llm_name": "ai21.j2-mid-v1",
  739. "tags": "LLM,CHAT,8k",
  740. "max_tokens": 8191,
  741. "model_type": LLMType.CHAT.value
  742. }, {
  743. "fid": factory_infos[16]["name"],
  744. "llm_name": "cohere.command-text-v14",
  745. "tags": "LLM,CHAT,4k",
  746. "max_tokens": 4096,
  747. "model_type": LLMType.CHAT.value
  748. }, {
  749. "fid": factory_infos[16]["name"],
  750. "llm_name": "cohere.command-light-text-v14",
  751. "tags": "LLM,CHAT,4k",
  752. "max_tokens": 4096,
  753. "model_type": LLMType.CHAT.value
  754. }, {
  755. "fid": factory_infos[16]["name"],
  756. "llm_name": "cohere.command-r-v1:0",
  757. "tags": "LLM,CHAT,128k",
  758. "max_tokens": 128 * 1024,
  759. "model_type": LLMType.CHAT.value
  760. }, {
  761. "fid": factory_infos[16]["name"],
  762. "llm_name": "cohere.command-r-plus-v1:0",
  763. "tags": "LLM,CHAT,128k",
  764. "max_tokens": 128000,
  765. "model_type": LLMType.CHAT.value
  766. }, {
  767. "fid": factory_infos[16]["name"],
  768. "llm_name": "anthropic.claude-v2",
  769. "tags": "LLM,CHAT,100k",
  770. "max_tokens": 100 * 1024,
  771. "model_type": LLMType.CHAT.value
  772. }, {
  773. "fid": factory_infos[16]["name"],
  774. "llm_name": "anthropic.claude-v2:1",
  775. "tags": "LLM,CHAT,200k",
  776. "max_tokens": 200 * 1024,
  777. "model_type": LLMType.CHAT.value
  778. }, {
  779. "fid": factory_infos[16]["name"],
  780. "llm_name": "anthropic.claude-3-sonnet-20240229-v1:0",
  781. "tags": "LLM,CHAT,200k",
  782. "max_tokens": 200 * 1024,
  783. "model_type": LLMType.CHAT.value
  784. }, {
  785. "fid": factory_infos[16]["name"],
  786. "llm_name": "anthropic.claude-3-5-sonnet-20240620-v1:0",
  787. "tags": "LLM,CHAT,200k",
  788. "max_tokens": 200 * 1024,
  789. "model_type": LLMType.CHAT.value
  790. }, {
  791. "fid": factory_infos[16]["name"],
  792. "llm_name": "anthropic.claude-3-haiku-20240307-v1:0",
  793. "tags": "LLM,CHAT,200k",
  794. "max_tokens": 200 * 1024,
  795. "model_type": LLMType.CHAT.value
  796. }, {
  797. "fid": factory_infos[16]["name"],
  798. "llm_name": "anthropic.claude-3-opus-20240229-v1:0",
  799. "tags": "LLM,CHAT,200k",
  800. "max_tokens": 200 * 1024,
  801. "model_type": LLMType.CHAT.value
  802. }, {
  803. "fid": factory_infos[16]["name"],
  804. "llm_name": "anthropic.claude-instant-v1",
  805. "tags": "LLM,CHAT,100k",
  806. "max_tokens": 100 * 1024,
  807. "model_type": LLMType.CHAT.value
  808. }, {
  809. "fid": factory_infos[16]["name"],
  810. "llm_name": "amazon.titan-text-express-v1",
  811. "tags": "LLM,CHAT,8k",
  812. "max_tokens": 8192,
  813. "model_type": LLMType.CHAT.value
  814. }, {
  815. "fid": factory_infos[16]["name"],
  816. "llm_name": "amazon.titan-text-premier-v1:0",
  817. "tags": "LLM,CHAT,32k",
  818. "max_tokens": 32 * 1024,
  819. "model_type": LLMType.CHAT.value
  820. }, {
  821. "fid": factory_infos[16]["name"],
  822. "llm_name": "amazon.titan-text-lite-v1",
  823. "tags": "LLM,CHAT,4k",
  824. "max_tokens": 4096,
  825. "model_type": LLMType.CHAT.value
  826. }, {
  827. "fid": factory_infos[16]["name"],
  828. "llm_name": "meta.llama2-13b-chat-v1",
  829. "tags": "LLM,CHAT,4k",
  830. "max_tokens": 4096,
  831. "model_type": LLMType.CHAT.value
  832. }, {
  833. "fid": factory_infos[16]["name"],
  834. "llm_name": "meta.llama2-70b-chat-v1",
  835. "tags": "LLM,CHAT,4k",
  836. "max_tokens": 4096,
  837. "model_type": LLMType.CHAT.value
  838. }, {
  839. "fid": factory_infos[16]["name"],
  840. "llm_name": "meta.llama3-8b-instruct-v1:0",
  841. "tags": "LLM,CHAT,8k",
  842. "max_tokens": 8192,
  843. "model_type": LLMType.CHAT.value
  844. }, {
  845. "fid": factory_infos[16]["name"],
  846. "llm_name": "meta.llama3-70b-instruct-v1:0",
  847. "tags": "LLM,CHAT,8k",
  848. "max_tokens": 8192,
  849. "model_type": LLMType.CHAT.value
  850. }, {
  851. "fid": factory_infos[16]["name"],
  852. "llm_name": "mistral.mistral-7b-instruct-v0:2",
  853. "tags": "LLM,CHAT,8k",
  854. "max_tokens": 8192,
  855. "model_type": LLMType.CHAT.value
  856. }, {
  857. "fid": factory_infos[16]["name"],
  858. "llm_name": "mistral.mixtral-8x7b-instruct-v0:1",
  859. "tags": "LLM,CHAT,4k",
  860. "max_tokens": 4096,
  861. "model_type": LLMType.CHAT.value
  862. }, {
  863. "fid": factory_infos[16]["name"],
  864. "llm_name": "mistral.mistral-large-2402-v1:0",
  865. "tags": "LLM,CHAT,8k",
  866. "max_tokens": 8192,
  867. "model_type": LLMType.CHAT.value
  868. }, {
  869. "fid": factory_infos[16]["name"],
  870. "llm_name": "mistral.mistral-small-2402-v1:0",
  871. "tags": "LLM,CHAT,8k",
  872. "max_tokens": 8192,
  873. "model_type": LLMType.CHAT.value
  874. }, {
  875. "fid": factory_infos[16]["name"],
  876. "llm_name": "amazon.titan-embed-text-v2:0",
  877. "tags": "TEXT EMBEDDING",
  878. "max_tokens": 8192,
  879. "model_type": LLMType.EMBEDDING.value
  880. }, {
  881. "fid": factory_infos[16]["name"],
  882. "llm_name": "cohere.embed-english-v3",
  883. "tags": "TEXT EMBEDDING",
  884. "max_tokens": 2048,
  885. "model_type": LLMType.EMBEDDING.value
  886. }, {
  887. "fid": factory_infos[16]["name"],
  888. "llm_name": "cohere.embed-multilingual-v3",
  889. "tags": "TEXT EMBEDDING",
  890. "max_tokens": 2048,
  891. "model_type": LLMType.EMBEDDING.value
  892. },
  893. ]
  894. for info in factory_infos:
  895. try:
  896. LLMFactoriesService.save(**info)
  897. except Exception as e:
  898. pass
  899. for info in llm_infos:
  900. try:
  901. LLMService.save(**info)
  902. except Exception as e:
  903. pass
  904. LLMFactoriesService.filter_delete([LLMFactories.name == "Local"])
  905. LLMService.filter_delete([LLM.fid == "Local"])
  906. LLMService.filter_delete([LLM.fid == "Moonshot", LLM.llm_name == "flag-embedding"])
  907. TenantLLMService.filter_delete([TenantLLM.llm_factory == "Moonshot", TenantLLM.llm_name == "flag-embedding"])
  908. LLMFactoriesService.filter_delete([LLMFactoriesService.model.name == "QAnything"])
  909. LLMService.filter_delete([LLMService.model.fid == "QAnything"])
  910. TenantLLMService.filter_update([TenantLLMService.model.llm_factory == "QAnything"], {"llm_factory": "Youdao"})
  911. ## insert openai two embedding models to the current openai user.
  912. print("Start to insert 2 OpenAI embedding models...")
  913. tenant_ids = set([row["tenant_id"] for row in TenantLLMService.get_openai_models()])
  914. for tid in tenant_ids:
  915. for row in TenantLLMService.query(llm_factory="OpenAI", tenant_id=tid):
  916. row = row.to_dict()
  917. row["model_type"] = LLMType.EMBEDDING.value
  918. row["llm_name"] = "text-embedding-3-small"
  919. row["used_tokens"] = 0
  920. try:
  921. TenantLLMService.save(**row)
  922. row = deepcopy(row)
  923. row["llm_name"] = "text-embedding-3-large"
  924. TenantLLMService.save(**row)
  925. except Exception as e:
  926. pass
  927. break
  928. for kb_id in KnowledgebaseService.get_all_ids():
  929. KnowledgebaseService.update_by_id(kb_id, {"doc_num": DocumentService.get_kb_doc_count(kb_id)})
  930. """
  931. drop table llm;
  932. drop table llm_factories;
  933. update tenant set parser_ids='naive:General,qa:Q&A,resume:Resume,manual:Manual,table:Table,paper:Paper,book:Book,laws:Laws,presentation:Presentation,picture:Picture,one:One';
  934. alter table knowledgebase modify avatar longtext;
  935. alter table user modify avatar longtext;
  936. alter table dialog modify icon longtext;
  937. """
  938. def add_graph_templates():
  939. dir = os.path.join(get_project_base_directory(), "graph", "templates")
  940. for fnm in os.listdir(dir):
  941. try:
  942. cnvs = json.load(open(os.path.join(dir, fnm), "r"))
  943. try:
  944. CanvasTemplateService.save(**cnvs)
  945. except:
  946. CanvasTemplateService.update_by_id(cnvs["id"], cnvs)
  947. except Exception as e:
  948. print("Add graph templates error: ", e)
  949. print("------------", flush=True)
  950. def init_web_data():
  951. start_time = time.time()
  952. init_llm_factory()
  953. if not UserService.get_all().count():
  954. init_superuser()
  955. add_graph_templates()
  956. print("init web data success:{}".format(time.time() - start_time))
  957. if __name__ == '__main__':
  958. init_web_db()
  959. init_web_data()