您最多选择25个主题 主题必须以字母或数字开头,可以包含连字符 (-),并且长度不得超过35个字符

dataset.py 17KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527
  1. #
  2. # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. from flask import request
  17. from api.db import StatusEnum, FileSource
  18. from api.db.db_models import File
  19. from api.db.services.document_service import DocumentService
  20. from api.db.services.file2document_service import File2DocumentService
  21. from api.db.services.file_service import FileService
  22. from api.db.services.knowledgebase_service import KnowledgebaseService
  23. from api.db.services.llm_service import TenantLLMService, LLMService
  24. from api.db.services.user_service import TenantService
  25. from api import settings
  26. from api.utils import get_uuid
  27. from api.utils.api_utils import (
  28. get_result,
  29. token_required,
  30. get_error_data_result,
  31. valid,
  32. get_parser_config,
  33. )
  34. @manager.route("/datasets", methods=["POST"]) # noqa: F821
  35. @token_required
  36. def create(tenant_id):
  37. """
  38. Create a new dataset.
  39. ---
  40. tags:
  41. - Datasets
  42. security:
  43. - ApiKeyAuth: []
  44. parameters:
  45. - in: header
  46. name: Authorization
  47. type: string
  48. required: true
  49. description: Bearer token for authentication.
  50. - in: body
  51. name: body
  52. description: Dataset creation parameters.
  53. required: true
  54. schema:
  55. type: object
  56. required:
  57. - name
  58. properties:
  59. name:
  60. type: string
  61. description: Name of the dataset.
  62. permission:
  63. type: string
  64. enum: ['me', 'team']
  65. description: Dataset permission.
  66. chunk_method:
  67. type: string
  68. enum: ["naive", "manual", "qa", "table", "paper", "book", "laws",
  69. "presentation", "picture", "one", "knowledge_graph", "email", "tag"
  70. ]
  71. description: Chunking method.
  72. parser_config:
  73. type: object
  74. description: Parser configuration.
  75. responses:
  76. 200:
  77. description: Successful operation.
  78. schema:
  79. type: object
  80. properties:
  81. data:
  82. type: object
  83. """
  84. req = request.json
  85. e, t = TenantService.get_by_id(tenant_id)
  86. permission = req.get("permission")
  87. chunk_method = req.get("chunk_method")
  88. parser_config = req.get("parser_config")
  89. valid_permission = ["me", "team"]
  90. valid_chunk_method = [
  91. "naive",
  92. "manual",
  93. "qa",
  94. "table",
  95. "paper",
  96. "book",
  97. "laws",
  98. "presentation",
  99. "picture",
  100. "one",
  101. "knowledge_graph",
  102. "email",
  103. "tag"
  104. ]
  105. check_validation = valid(
  106. permission,
  107. valid_permission,
  108. chunk_method,
  109. valid_chunk_method,
  110. )
  111. if check_validation:
  112. return check_validation
  113. req["parser_config"] = get_parser_config(chunk_method, parser_config)
  114. if "tenant_id" in req:
  115. return get_error_data_result(message="`tenant_id` must not be provided")
  116. if "chunk_count" in req or "document_count" in req:
  117. return get_error_data_result(
  118. message="`chunk_count` or `document_count` must not be provided"
  119. )
  120. if "name" not in req:
  121. return get_error_data_result(message="`name` is not empty!")
  122. req["id"] = get_uuid()
  123. req["name"] = req["name"].strip()
  124. if req["name"] == "":
  125. return get_error_data_result(message="`name` is not empty string!")
  126. if len(req["name"]) >= 128:
  127. return get_error_data_result(
  128. message="Dataset name should not be longer than 128 characters."
  129. )
  130. if KnowledgebaseService.query(
  131. name=req["name"], tenant_id=tenant_id, status=StatusEnum.VALID.value
  132. ):
  133. return get_error_data_result(
  134. message="Duplicated dataset name in creating dataset."
  135. )
  136. req["tenant_id"] = req["created_by"] = tenant_id
  137. if not req.get("embedding_model"):
  138. req["embedding_model"] = t.embd_id
  139. else:
  140. valid_embedding_models = [
  141. "BAAI/bge-large-zh-v1.5",
  142. "BAAI/bge-base-en-v1.5",
  143. "BAAI/bge-large-en-v1.5",
  144. "BAAI/bge-small-en-v1.5",
  145. "BAAI/bge-small-zh-v1.5",
  146. "jinaai/jina-embeddings-v2-base-en",
  147. "jinaai/jina-embeddings-v2-small-en",
  148. "nomic-ai/nomic-embed-text-v1.5",
  149. "sentence-transformers/all-MiniLM-L6-v2",
  150. "text-embedding-v2",
  151. "text-embedding-v3",
  152. "maidalun1020/bce-embedding-base_v1",
  153. ]
  154. embd_model = LLMService.query(
  155. llm_name=req["embedding_model"], model_type="embedding"
  156. )
  157. if embd_model:
  158. if req["embedding_model"] not in valid_embedding_models and not TenantLLMService.query(tenant_id=tenant_id,model_type="embedding",llm_name=req.get("embedding_model"),):
  159. return get_error_data_result(f"`embedding_model` {req.get('embedding_model')} doesn't exist")
  160. if not embd_model:
  161. embd_model=TenantLLMService.query(tenant_id=tenant_id,model_type="embedding", llm_name=req.get("embedding_model"))
  162. if not embd_model:
  163. return get_error_data_result(
  164. f"`embedding_model` {req.get('embedding_model')} doesn't exist"
  165. )
  166. key_mapping = {
  167. "chunk_num": "chunk_count",
  168. "doc_num": "document_count",
  169. "parser_id": "chunk_method",
  170. "embd_id": "embedding_model",
  171. }
  172. mapped_keys = {
  173. new_key: req[old_key]
  174. for new_key, old_key in key_mapping.items()
  175. if old_key in req
  176. }
  177. req.update(mapped_keys)
  178. if not KnowledgebaseService.save(**req):
  179. return get_error_data_result(message="Create dataset error.(Database error)")
  180. renamed_data = {}
  181. e, k = KnowledgebaseService.get_by_id(req["id"])
  182. for key, value in k.to_dict().items():
  183. new_key = key_mapping.get(key, key)
  184. renamed_data[new_key] = value
  185. return get_result(data=renamed_data)
  186. @manager.route("/datasets", methods=["DELETE"]) # noqa: F821
  187. @token_required
  188. def delete(tenant_id):
  189. """
  190. Delete datasets.
  191. ---
  192. tags:
  193. - Datasets
  194. security:
  195. - ApiKeyAuth: []
  196. parameters:
  197. - in: header
  198. name: Authorization
  199. type: string
  200. required: true
  201. description: Bearer token for authentication.
  202. - in: body
  203. name: body
  204. description: Dataset deletion parameters.
  205. required: true
  206. schema:
  207. type: object
  208. properties:
  209. ids:
  210. type: array
  211. items:
  212. type: string
  213. description: List of dataset IDs to delete.
  214. responses:
  215. 200:
  216. description: Successful operation.
  217. schema:
  218. type: object
  219. """
  220. req = request.json
  221. if not req:
  222. ids = None
  223. else:
  224. ids = req.get("ids")
  225. if not ids:
  226. id_list = []
  227. kbs = KnowledgebaseService.query(tenant_id=tenant_id)
  228. for kb in kbs:
  229. id_list.append(kb.id)
  230. else:
  231. id_list = ids
  232. for id in id_list:
  233. kbs = KnowledgebaseService.query(id=id, tenant_id=tenant_id)
  234. if not kbs:
  235. return get_error_data_result(message=f"You don't own the dataset {id}")
  236. for doc in DocumentService.query(kb_id=id):
  237. if not DocumentService.remove_document(doc, tenant_id):
  238. return get_error_data_result(
  239. message="Remove document error.(Database error)"
  240. )
  241. f2d = File2DocumentService.get_by_document_id(doc.id)
  242. FileService.filter_delete(
  243. [
  244. File.source_type == FileSource.KNOWLEDGEBASE,
  245. File.id == f2d[0].file_id,
  246. ]
  247. )
  248. File2DocumentService.delete_by_document_id(doc.id)
  249. FileService.filter_delete(
  250. [File.source_type == FileSource.KNOWLEDGEBASE, File.type == "folder", File.name == kbs[0].name])
  251. if not KnowledgebaseService.delete_by_id(id):
  252. return get_error_data_result(message="Delete dataset error.(Database error)")
  253. return get_result(code=settings.RetCode.SUCCESS)
  254. @manager.route("/datasets/<dataset_id>", methods=["PUT"]) # noqa: F821
  255. @token_required
  256. def update(tenant_id, dataset_id):
  257. """
  258. Update a dataset.
  259. ---
  260. tags:
  261. - Datasets
  262. security:
  263. - ApiKeyAuth: []
  264. parameters:
  265. - in: path
  266. name: dataset_id
  267. type: string
  268. required: true
  269. description: ID of the dataset to update.
  270. - in: header
  271. name: Authorization
  272. type: string
  273. required: true
  274. description: Bearer token for authentication.
  275. - in: body
  276. name: body
  277. description: Dataset update parameters.
  278. required: true
  279. schema:
  280. type: object
  281. properties:
  282. name:
  283. type: string
  284. description: New name of the dataset.
  285. permission:
  286. type: string
  287. enum: ['me', 'team']
  288. description: Updated permission.
  289. chunk_method:
  290. type: string
  291. enum: ["naive", "manual", "qa", "table", "paper", "book", "laws",
  292. "presentation", "picture", "one", "knowledge_graph", "email", "tag"
  293. ]
  294. description: Updated chunking method.
  295. parser_config:
  296. type: object
  297. description: Updated parser configuration.
  298. responses:
  299. 200:
  300. description: Successful operation.
  301. schema:
  302. type: object
  303. """
  304. if not KnowledgebaseService.query(id=dataset_id, tenant_id=tenant_id):
  305. return get_error_data_result(message="You don't own the dataset")
  306. req = request.json
  307. e, t = TenantService.get_by_id(tenant_id)
  308. invalid_keys = {"id", "embd_id", "chunk_num", "doc_num", "parser_id"}
  309. if any(key in req for key in invalid_keys):
  310. return get_error_data_result(message="The input parameters are invalid.")
  311. permission = req.get("permission")
  312. chunk_method = req.get("chunk_method")
  313. parser_config = req.get("parser_config")
  314. valid_permission = ["me", "team"]
  315. valid_chunk_method = [
  316. "naive",
  317. "manual",
  318. "qa",
  319. "table",
  320. "paper",
  321. "book",
  322. "laws",
  323. "presentation",
  324. "picture",
  325. "one",
  326. "knowledge_graph",
  327. "email",
  328. "tag"
  329. ]
  330. check_validation = valid(
  331. permission,
  332. valid_permission,
  333. chunk_method,
  334. valid_chunk_method,
  335. )
  336. if check_validation:
  337. return check_validation
  338. if "tenant_id" in req:
  339. if req["tenant_id"] != tenant_id:
  340. return get_error_data_result(message="Can't change `tenant_id`.")
  341. e, kb = KnowledgebaseService.get_by_id(dataset_id)
  342. if "parser_config" in req:
  343. temp_dict = kb.parser_config
  344. temp_dict.update(req["parser_config"])
  345. req["parser_config"] = temp_dict
  346. if "chunk_count" in req:
  347. if req["chunk_count"] != kb.chunk_num:
  348. return get_error_data_result(message="Can't change `chunk_count`.")
  349. req.pop("chunk_count")
  350. if "document_count" in req:
  351. if req["document_count"] != kb.doc_num:
  352. return get_error_data_result(message="Can't change `document_count`.")
  353. req.pop("document_count")
  354. if "chunk_method" in req:
  355. if kb.chunk_num != 0 and req["chunk_method"] != kb.parser_id:
  356. return get_error_data_result(
  357. message="If `chunk_count` is not 0, `chunk_method` is not changeable."
  358. )
  359. req["parser_id"] = req.pop("chunk_method")
  360. if req["parser_id"] != kb.parser_id:
  361. if not req.get("parser_config"):
  362. req["parser_config"] = get_parser_config(chunk_method, parser_config)
  363. if "embedding_model" in req:
  364. if kb.chunk_num != 0 and req["embedding_model"] != kb.embd_id:
  365. return get_error_data_result(
  366. message="If `chunk_count` is not 0, `embedding_model` is not changeable."
  367. )
  368. if not req.get("embedding_model"):
  369. return get_error_data_result("`embedding_model` can't be empty")
  370. valid_embedding_models = [
  371. "BAAI/bge-large-zh-v1.5",
  372. "BAAI/bge-base-en-v1.5",
  373. "BAAI/bge-large-en-v1.5",
  374. "BAAI/bge-small-en-v1.5",
  375. "BAAI/bge-small-zh-v1.5",
  376. "jinaai/jina-embeddings-v2-base-en",
  377. "jinaai/jina-embeddings-v2-small-en",
  378. "nomic-ai/nomic-embed-text-v1.5",
  379. "sentence-transformers/all-MiniLM-L6-v2",
  380. "text-embedding-v2",
  381. "text-embedding-v3",
  382. "maidalun1020/bce-embedding-base_v1",
  383. ]
  384. embd_model = LLMService.query(
  385. llm_name=req["embedding_model"], model_type="embedding"
  386. )
  387. if embd_model:
  388. if req["embedding_model"] not in valid_embedding_models and not TenantLLMService.query(tenant_id=tenant_id,model_type="embedding",llm_name=req.get("embedding_model"),):
  389. return get_error_data_result(f"`embedding_model` {req.get('embedding_model')} doesn't exist")
  390. if not embd_model:
  391. embd_model=TenantLLMService.query(tenant_id=tenant_id,model_type="embedding", llm_name=req.get("embedding_model"))
  392. if not embd_model:
  393. return get_error_data_result(
  394. f"`embedding_model` {req.get('embedding_model')} doesn't exist"
  395. )
  396. req["embd_id"] = req.pop("embedding_model")
  397. if "name" in req:
  398. req["name"] = req["name"].strip()
  399. if len(req["name"]) >= 128:
  400. return get_error_data_result(
  401. message="Dataset name should not be longer than 128 characters."
  402. )
  403. if (
  404. req["name"].lower() != kb.name.lower()
  405. and len(
  406. KnowledgebaseService.query(
  407. name=req["name"], tenant_id=tenant_id, status=StatusEnum.VALID.value
  408. )
  409. )
  410. > 0
  411. ):
  412. return get_error_data_result(
  413. message="Duplicated dataset name in updating dataset."
  414. )
  415. if not KnowledgebaseService.update_by_id(kb.id, req):
  416. return get_error_data_result(message="Update dataset error.(Database error)")
  417. return get_result(code=settings.RetCode.SUCCESS)
  418. @manager.route("/datasets", methods=["GET"]) # noqa: F821
  419. @token_required
  420. def list(tenant_id):
  421. """
  422. List datasets.
  423. ---
  424. tags:
  425. - Datasets
  426. security:
  427. - ApiKeyAuth: []
  428. parameters:
  429. - in: query
  430. name: id
  431. type: string
  432. required: false
  433. description: Dataset ID to filter.
  434. - in: query
  435. name: name
  436. type: string
  437. required: false
  438. description: Dataset name to filter.
  439. - in: query
  440. name: page
  441. type: integer
  442. required: false
  443. default: 1
  444. description: Page number.
  445. - in: query
  446. name: page_size
  447. type: integer
  448. required: false
  449. default: 1024
  450. description: Number of items per page.
  451. - in: query
  452. name: orderby
  453. type: string
  454. required: false
  455. default: "create_time"
  456. description: Field to order by.
  457. - in: query
  458. name: desc
  459. type: boolean
  460. required: false
  461. default: true
  462. description: Order in descending.
  463. - in: header
  464. name: Authorization
  465. type: string
  466. required: true
  467. description: Bearer token for authentication.
  468. responses:
  469. 200:
  470. description: Successful operation.
  471. schema:
  472. type: array
  473. items:
  474. type: object
  475. """
  476. id = request.args.get("id")
  477. name = request.args.get("name")
  478. if id:
  479. kbs = KnowledgebaseService.get_kb_by_id(id,tenant_id)
  480. if not kbs:
  481. return get_error_data_result(f"You don't own the dataset {id}")
  482. if name:
  483. kbs = KnowledgebaseService.get_kb_by_name(name,tenant_id)
  484. if not kbs:
  485. return get_error_data_result(f"You don't own the dataset {name}")
  486. page_number = int(request.args.get("page", 1))
  487. items_per_page = int(request.args.get("page_size", 30))
  488. orderby = request.args.get("orderby", "create_time")
  489. if request.args.get("desc") == "False" or request.args.get("desc") == "false":
  490. desc = False
  491. else:
  492. desc = True
  493. tenants = TenantService.get_joined_tenants_by_user_id(tenant_id)
  494. kbs = KnowledgebaseService.get_list(
  495. [m["tenant_id"] for m in tenants],
  496. tenant_id,
  497. page_number,
  498. items_per_page,
  499. orderby,
  500. desc,
  501. id,
  502. name,
  503. )
  504. renamed_list = []
  505. for kb in kbs:
  506. key_mapping = {
  507. "chunk_num": "chunk_count",
  508. "doc_num": "document_count",
  509. "parser_id": "chunk_method",
  510. "embd_id": "embedding_model",
  511. }
  512. renamed_data = {}
  513. for key, value in kb.items():
  514. new_key = key_mapping.get(key, key)
  515. renamed_data[new_key] = value
  516. renamed_list.append(renamed_data)
  517. return get_result(data=renamed_list)