選択できるのは25トピックまでです。 トピックは、先頭が英数字で、英数字とダッシュ('-')を使用した35文字以内のものにしてください。

dataset.py 19KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566
  1. #
  2. # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. from flask import request
  17. from api.db import StatusEnum, FileSource
  18. from api.db.db_models import File
  19. from api.db.services.document_service import DocumentService
  20. from api.db.services.file2document_service import File2DocumentService
  21. from api.db.services.file_service import FileService
  22. from api.db.services.knowledgebase_service import KnowledgebaseService
  23. from api.db.services.llm_service import TenantLLMService, LLMService
  24. from api.db.services.user_service import TenantService
  25. from api import settings
  26. from api.utils import get_uuid
  27. from api.utils.api_utils import (
  28. get_result,
  29. token_required,
  30. get_error_data_result,
  31. valid,
  32. get_parser_config, valid_parser_config, dataset_readonly_fields,check_duplicate_ids
  33. )
  34. @manager.route("/datasets", methods=["POST"]) # noqa: F821
  35. @token_required
  36. def create(tenant_id):
  37. """
  38. Create a new dataset.
  39. ---
  40. tags:
  41. - Datasets
  42. security:
  43. - ApiKeyAuth: []
  44. parameters:
  45. - in: header
  46. name: Authorization
  47. type: string
  48. required: true
  49. description: Bearer token for authentication.
  50. - in: body
  51. name: body
  52. description: Dataset creation parameters.
  53. required: true
  54. schema:
  55. type: object
  56. required:
  57. - name
  58. properties:
  59. name:
  60. type: string
  61. description: Name of the dataset.
  62. permission:
  63. type: string
  64. enum: ['me', 'team']
  65. description: Dataset permission.
  66. chunk_method:
  67. type: string
  68. enum: ["naive", "manual", "qa", "table", "paper", "book", "laws",
  69. "presentation", "picture", "one", "knowledge_graph", "email", "tag"
  70. ]
  71. description: Chunking method.
  72. parser_config:
  73. type: object
  74. description: Parser configuration.
  75. responses:
  76. 200:
  77. description: Successful operation.
  78. schema:
  79. type: object
  80. properties:
  81. data:
  82. type: object
  83. """
  84. req = request.json
  85. for k in req.keys():
  86. if dataset_readonly_fields(k):
  87. return get_result(code=settings.RetCode.ARGUMENT_ERROR, message=f"'{k}' is readonly.")
  88. e, t = TenantService.get_by_id(tenant_id)
  89. permission = req.get("permission")
  90. chunk_method = req.get("chunk_method")
  91. parser_config = req.get("parser_config")
  92. valid_parser_config(parser_config)
  93. valid_permission = ["me", "team"]
  94. valid_chunk_method = [
  95. "naive",
  96. "manual",
  97. "qa",
  98. "table",
  99. "paper",
  100. "book",
  101. "laws",
  102. "presentation",
  103. "picture",
  104. "one",
  105. "knowledge_graph",
  106. "email",
  107. "tag"
  108. ]
  109. check_validation = valid(
  110. permission,
  111. valid_permission,
  112. chunk_method,
  113. valid_chunk_method,
  114. )
  115. if check_validation:
  116. return check_validation
  117. req["parser_config"] = get_parser_config(chunk_method, parser_config)
  118. if "tenant_id" in req:
  119. return get_error_data_result(message="`tenant_id` must not be provided")
  120. if "chunk_count" in req or "document_count" in req:
  121. return get_error_data_result(
  122. message="`chunk_count` or `document_count` must not be provided"
  123. )
  124. if "name" not in req:
  125. return get_error_data_result(message="`name` is not empty!")
  126. req["id"] = get_uuid()
  127. req["name"] = req["name"].strip()
  128. if req["name"] == "":
  129. return get_error_data_result(message="`name` is not empty string!")
  130. if len(req["name"]) >= 128:
  131. return get_error_data_result(
  132. message="Dataset name should not be longer than 128 characters."
  133. )
  134. if KnowledgebaseService.query(
  135. name=req["name"], tenant_id=tenant_id, status=StatusEnum.VALID.value
  136. ):
  137. return get_error_data_result(
  138. message="Duplicated dataset name in creating dataset."
  139. )
  140. req["tenant_id"] = tenant_id
  141. req["created_by"] = tenant_id
  142. if not req.get("embedding_model"):
  143. req["embedding_model"] = t.embd_id
  144. else:
  145. valid_embedding_models = [
  146. "BAAI/bge-large-zh-v1.5",
  147. "BAAI/bge-base-en-v1.5",
  148. "BAAI/bge-large-en-v1.5",
  149. "BAAI/bge-small-en-v1.5",
  150. "BAAI/bge-small-zh-v1.5",
  151. "jinaai/jina-embeddings-v2-base-en",
  152. "jinaai/jina-embeddings-v2-small-en",
  153. "nomic-ai/nomic-embed-text-v1.5",
  154. "sentence-transformers/all-MiniLM-L6-v2",
  155. "text-embedding-v2",
  156. "text-embedding-v3",
  157. "maidalun1020/bce-embedding-base_v1",
  158. ]
  159. embd_model = LLMService.query(
  160. llm_name=req["embedding_model"], model_type="embedding"
  161. )
  162. if embd_model:
  163. if req["embedding_model"] not in valid_embedding_models and not TenantLLMService.query(tenant_id=tenant_id,model_type="embedding",llm_name=req.get("embedding_model"),):
  164. return get_error_data_result(f"`embedding_model` {req.get('embedding_model')} doesn't exist")
  165. if not embd_model:
  166. embd_model=TenantLLMService.query(tenant_id=tenant_id,model_type="embedding", llm_name=req.get("embedding_model"))
  167. if not embd_model:
  168. return get_error_data_result(
  169. f"`embedding_model` {req.get('embedding_model')} doesn't exist"
  170. )
  171. key_mapping = {
  172. "chunk_num": "chunk_count",
  173. "doc_num": "document_count",
  174. "parser_id": "chunk_method",
  175. "embd_id": "embedding_model",
  176. }
  177. mapped_keys = {
  178. new_key: req[old_key]
  179. for new_key, old_key in key_mapping.items()
  180. if old_key in req
  181. }
  182. req.update(mapped_keys)
  183. flds = list(req.keys())
  184. for f in flds:
  185. if req[f] == "" and f in ["permission", "parser_id", "chunk_method"]:
  186. del req[f]
  187. if not KnowledgebaseService.save(**req):
  188. return get_error_data_result(message="Create dataset error.(Database error)")
  189. renamed_data = {}
  190. e, k = KnowledgebaseService.get_by_id(req["id"])
  191. for key, value in k.to_dict().items():
  192. new_key = key_mapping.get(key, key)
  193. renamed_data[new_key] = value
  194. return get_result(data=renamed_data)
  195. @manager.route("/datasets", methods=["DELETE"]) # noqa: F821
  196. @token_required
  197. def delete(tenant_id):
  198. """
  199. Delete datasets.
  200. ---
  201. tags:
  202. - Datasets
  203. security:
  204. - ApiKeyAuth: []
  205. parameters:
  206. - in: header
  207. name: Authorization
  208. type: string
  209. required: true
  210. description: Bearer token for authentication.
  211. - in: body
  212. name: body
  213. description: Dataset deletion parameters.
  214. required: true
  215. schema:
  216. type: object
  217. properties:
  218. ids:
  219. type: array
  220. items:
  221. type: string
  222. description: List of dataset IDs to delete.
  223. responses:
  224. 200:
  225. description: Successful operation.
  226. schema:
  227. type: object
  228. """
  229. errors = []
  230. success_count = 0
  231. req = request.json
  232. if not req:
  233. ids = None
  234. else:
  235. ids = req.get("ids")
  236. if not ids:
  237. id_list = []
  238. kbs = KnowledgebaseService.query(tenant_id=tenant_id)
  239. for kb in kbs:
  240. id_list.append(kb.id)
  241. else:
  242. id_list = ids
  243. unique_id_list, duplicate_messages = check_duplicate_ids(id_list, "dataset")
  244. id_list = unique_id_list
  245. for id in id_list:
  246. kbs = KnowledgebaseService.query(id=id, tenant_id=tenant_id)
  247. if not kbs:
  248. errors.append(f"You don't own the dataset {id}")
  249. continue
  250. for doc in DocumentService.query(kb_id=id):
  251. if not DocumentService.remove_document(doc, tenant_id):
  252. errors.append(f"Remove document error for dataset {id}")
  253. continue
  254. f2d = File2DocumentService.get_by_document_id(doc.id)
  255. FileService.filter_delete(
  256. [
  257. File.source_type == FileSource.KNOWLEDGEBASE,
  258. File.id == f2d[0].file_id,
  259. ]
  260. )
  261. File2DocumentService.delete_by_document_id(doc.id)
  262. FileService.filter_delete(
  263. [File.source_type == FileSource.KNOWLEDGEBASE, File.type == "folder", File.name == kbs[0].name])
  264. if not KnowledgebaseService.delete_by_id(id):
  265. errors.append(f"Delete dataset error for {id}")
  266. continue
  267. success_count += 1
  268. if errors:
  269. if success_count > 0:
  270. return get_result(
  271. data={"success_count": success_count, "errors": errors},
  272. message=f"Partially deleted {success_count} datasets with {len(errors)} errors"
  273. )
  274. else:
  275. return get_error_data_result(message="; ".join(errors))
  276. if duplicate_messages:
  277. if success_count > 0:
  278. return get_result(message=f"Partially deleted {success_count} datasets with {len(duplicate_messages)} errors", data={"success_count": success_count, "errors": duplicate_messages},)
  279. else:
  280. return get_error_data_result(message=";".join(duplicate_messages))
  281. return get_result(code=settings.RetCode.SUCCESS)
  282. @manager.route("/datasets/<dataset_id>", methods=["PUT"]) # noqa: F821
  283. @token_required
  284. def update(tenant_id, dataset_id):
  285. """
  286. Update a dataset.
  287. ---
  288. tags:
  289. - Datasets
  290. security:
  291. - ApiKeyAuth: []
  292. parameters:
  293. - in: path
  294. name: dataset_id
  295. type: string
  296. required: true
  297. description: ID of the dataset to update.
  298. - in: header
  299. name: Authorization
  300. type: string
  301. required: true
  302. description: Bearer token for authentication.
  303. - in: body
  304. name: body
  305. description: Dataset update parameters.
  306. required: true
  307. schema:
  308. type: object
  309. properties:
  310. name:
  311. type: string
  312. description: New name of the dataset.
  313. permission:
  314. type: string
  315. enum: ['me', 'team']
  316. description: Updated permission.
  317. chunk_method:
  318. type: string
  319. enum: ["naive", "manual", "qa", "table", "paper", "book", "laws",
  320. "presentation", "picture", "one", "knowledge_graph", "email", "tag"
  321. ]
  322. description: Updated chunking method.
  323. parser_config:
  324. type: object
  325. description: Updated parser configuration.
  326. responses:
  327. 200:
  328. description: Successful operation.
  329. schema:
  330. type: object
  331. """
  332. if not KnowledgebaseService.query(id=dataset_id, tenant_id=tenant_id):
  333. return get_error_data_result(message="You don't own the dataset")
  334. req = request.json
  335. for k in req.keys():
  336. if dataset_readonly_fields(k):
  337. return get_result(code=settings.RetCode.ARGUMENT_ERROR, message=f"'{k}' is readonly.")
  338. e, t = TenantService.get_by_id(tenant_id)
  339. invalid_keys = {"id", "embd_id", "chunk_num", "doc_num", "parser_id", "create_date", "create_time", "created_by", "status","token_num","update_date","update_time"}
  340. if any(key in req for key in invalid_keys):
  341. return get_error_data_result(message="The input parameters are invalid.")
  342. permission = req.get("permission")
  343. chunk_method = req.get("chunk_method")
  344. parser_config = req.get("parser_config")
  345. valid_parser_config(parser_config)
  346. valid_permission = ["me", "team"]
  347. valid_chunk_method = [
  348. "naive",
  349. "manual",
  350. "qa",
  351. "table",
  352. "paper",
  353. "book",
  354. "laws",
  355. "presentation",
  356. "picture",
  357. "one",
  358. "knowledge_graph",
  359. "email",
  360. "tag"
  361. ]
  362. check_validation = valid(
  363. permission,
  364. valid_permission,
  365. chunk_method,
  366. valid_chunk_method,
  367. )
  368. if check_validation:
  369. return check_validation
  370. if "tenant_id" in req:
  371. if req["tenant_id"] != tenant_id:
  372. return get_error_data_result(message="Can't change `tenant_id`.")
  373. e, kb = KnowledgebaseService.get_by_id(dataset_id)
  374. if "parser_config" in req:
  375. temp_dict = kb.parser_config
  376. temp_dict.update(req["parser_config"])
  377. req["parser_config"] = temp_dict
  378. if "chunk_count" in req:
  379. if req["chunk_count"] != kb.chunk_num:
  380. return get_error_data_result(message="Can't change `chunk_count`.")
  381. req.pop("chunk_count")
  382. if "document_count" in req:
  383. if req["document_count"] != kb.doc_num:
  384. return get_error_data_result(message="Can't change `document_count`.")
  385. req.pop("document_count")
  386. if req.get("chunk_method"):
  387. if kb.chunk_num != 0 and req["chunk_method"] != kb.parser_id:
  388. return get_error_data_result(
  389. message="If `chunk_count` is not 0, `chunk_method` is not changeable."
  390. )
  391. req["parser_id"] = req.pop("chunk_method")
  392. if req["parser_id"] != kb.parser_id:
  393. if not req.get("parser_config"):
  394. req["parser_config"] = get_parser_config(chunk_method, parser_config)
  395. if "embedding_model" in req:
  396. if kb.chunk_num != 0 and req["embedding_model"] != kb.embd_id:
  397. return get_error_data_result(
  398. message="If `chunk_count` is not 0, `embedding_model` is not changeable."
  399. )
  400. if not req.get("embedding_model"):
  401. return get_error_data_result("`embedding_model` can't be empty")
  402. valid_embedding_models = [
  403. "BAAI/bge-large-zh-v1.5",
  404. "BAAI/bge-base-en-v1.5",
  405. "BAAI/bge-large-en-v1.5",
  406. "BAAI/bge-small-en-v1.5",
  407. "BAAI/bge-small-zh-v1.5",
  408. "jinaai/jina-embeddings-v2-base-en",
  409. "jinaai/jina-embeddings-v2-small-en",
  410. "nomic-ai/nomic-embed-text-v1.5",
  411. "sentence-transformers/all-MiniLM-L6-v2",
  412. "text-embedding-v2",
  413. "text-embedding-v3",
  414. "maidalun1020/bce-embedding-base_v1",
  415. ]
  416. embd_model = LLMService.query(
  417. llm_name=req["embedding_model"], model_type="embedding"
  418. )
  419. if embd_model:
  420. if req["embedding_model"] not in valid_embedding_models and not TenantLLMService.query(tenant_id=tenant_id,model_type="embedding",llm_name=req.get("embedding_model"),):
  421. return get_error_data_result(f"`embedding_model` {req.get('embedding_model')} doesn't exist")
  422. if not embd_model:
  423. embd_model=TenantLLMService.query(tenant_id=tenant_id,model_type="embedding", llm_name=req.get("embedding_model"))
  424. if not embd_model:
  425. return get_error_data_result(
  426. f"`embedding_model` {req.get('embedding_model')} doesn't exist"
  427. )
  428. req["embd_id"] = req.pop("embedding_model")
  429. if "name" in req:
  430. req["name"] = req["name"].strip()
  431. if len(req["name"]) >= 128:
  432. return get_error_data_result(
  433. message="Dataset name should not be longer than 128 characters."
  434. )
  435. if (
  436. req["name"].lower() != kb.name.lower()
  437. and len(
  438. KnowledgebaseService.query(
  439. name=req["name"], tenant_id=tenant_id, status=StatusEnum.VALID.value
  440. )
  441. )
  442. > 0
  443. ):
  444. return get_error_data_result(
  445. message="Duplicated dataset name in updating dataset."
  446. )
  447. flds = list(req.keys())
  448. for f in flds:
  449. if req[f] == "" and f in ["permission", "parser_id", "chunk_method"]:
  450. del req[f]
  451. if not KnowledgebaseService.update_by_id(kb.id, req):
  452. return get_error_data_result(message="Update dataset error.(Database error)")
  453. return get_result(code=settings.RetCode.SUCCESS)
  454. @manager.route("/datasets", methods=["GET"]) # noqa: F821
  455. @token_required
  456. def list_datasets(tenant_id):
  457. """
  458. List datasets.
  459. ---
  460. tags:
  461. - Datasets
  462. security:
  463. - ApiKeyAuth: []
  464. parameters:
  465. - in: query
  466. name: id
  467. type: string
  468. required: false
  469. description: Dataset ID to filter.
  470. - in: query
  471. name: name
  472. type: string
  473. required: false
  474. description: Dataset name to filter.
  475. - in: query
  476. name: page
  477. type: integer
  478. required: false
  479. default: 1
  480. description: Page number.
  481. - in: query
  482. name: page_size
  483. type: integer
  484. required: false
  485. default: 1024
  486. description: Number of items per page.
  487. - in: query
  488. name: orderby
  489. type: string
  490. required: false
  491. default: "create_time"
  492. description: Field to order by.
  493. - in: query
  494. name: desc
  495. type: boolean
  496. required: false
  497. default: true
  498. description: Order in descending.
  499. - in: header
  500. name: Authorization
  501. type: string
  502. required: true
  503. description: Bearer token for authentication.
  504. responses:
  505. 200:
  506. description: Successful operation.
  507. schema:
  508. type: array
  509. items:
  510. type: object
  511. """
  512. id = request.args.get("id")
  513. name = request.args.get("name")
  514. if id:
  515. kbs = KnowledgebaseService.get_kb_by_id(id,tenant_id)
  516. if not kbs:
  517. return get_error_data_result(f"You don't own the dataset {id}")
  518. if name:
  519. kbs = KnowledgebaseService.get_kb_by_name(name,tenant_id)
  520. if not kbs:
  521. return get_error_data_result(f"You don't own the dataset {name}")
  522. page_number = int(request.args.get("page", 1))
  523. items_per_page = int(request.args.get("page_size", 30))
  524. orderby = request.args.get("orderby", "create_time")
  525. if request.args.get("desc", "false").lower() not in ["true", "false"]:
  526. return get_error_data_result("desc should be true or false")
  527. if request.args.get("desc", "true").lower() == "false":
  528. desc = False
  529. else:
  530. desc = True
  531. tenants = TenantService.get_joined_tenants_by_user_id(tenant_id)
  532. kbs = KnowledgebaseService.get_list(
  533. [m["tenant_id"] for m in tenants],
  534. tenant_id,
  535. page_number,
  536. items_per_page,
  537. orderby,
  538. desc,
  539. id,
  540. name,
  541. )
  542. renamed_list = []
  543. for kb in kbs:
  544. key_mapping = {
  545. "chunk_num": "chunk_count",
  546. "doc_num": "document_count",
  547. "parser_id": "chunk_method",
  548. "embd_id": "embedding_model",
  549. }
  550. renamed_data = {}
  551. for key, value in kb.items():
  552. new_key = key_mapping.get(key, key)
  553. renamed_data[new_key] = value
  554. renamed_list.append(renamed_data)
  555. return get_result(data=renamed_list)