選択できるのは25トピックまでです。 トピックは、先頭が英数字で、英数字とダッシュ('-')を使用した35文字以内のものにしてください。

dataset.py 16KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491
  1. #
  2. # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. import logging
  17. from flask import request
  18. from peewee import OperationalError
  19. from api.db import FileSource, StatusEnum
  20. from api.db.db_models import File
  21. from api.db.services.document_service import DocumentService
  22. from api.db.services.file2document_service import File2DocumentService
  23. from api.db.services.file_service import FileService
  24. from api.db.services.knowledgebase_service import KnowledgebaseService
  25. from api.db.services.user_service import TenantService
  26. from api.utils import get_uuid
  27. from api.utils.api_utils import (
  28. deep_merge,
  29. get_error_argument_result,
  30. get_error_data_result,
  31. get_parser_config,
  32. get_result,
  33. token_required,
  34. verify_embedding_availability,
  35. )
  36. from api.utils.validation_utils import CreateDatasetReq, DeleteDatasetReq, UpdateDatasetReq, validate_and_parse_json_request
  37. @manager.route("/datasets", methods=["POST"]) # noqa: F821
  38. @token_required
  39. def create(tenant_id):
  40. """
  41. Create a new dataset.
  42. ---
  43. tags:
  44. - Datasets
  45. security:
  46. - ApiKeyAuth: []
  47. parameters:
  48. - in: header
  49. name: Authorization
  50. type: string
  51. required: true
  52. description: Bearer token for authentication.
  53. - in: body
  54. name: body
  55. description: Dataset creation parameters.
  56. required: true
  57. schema:
  58. type: object
  59. required:
  60. - name
  61. properties:
  62. name:
  63. type: string
  64. description: Name of the dataset.
  65. avatar:
  66. type: string
  67. description: Base64 encoding of the avatar.
  68. description:
  69. type: string
  70. description: Description of the dataset.
  71. embedding_model:
  72. type: string
  73. description: Embedding model Name.
  74. permission:
  75. type: string
  76. enum: ['me', 'team']
  77. description: Dataset permission.
  78. chunk_method:
  79. type: string
  80. enum: ["naive", "book", "email", "laws", "manual", "one", "paper",
  81. "picture", "presentation", "qa", "table", "tag"
  82. ]
  83. description: Chunking method.
  84. pagerank:
  85. type: integer
  86. description: Set page rank.
  87. parser_config:
  88. type: object
  89. description: Parser configuration.
  90. responses:
  91. 200:
  92. description: Successful operation.
  93. schema:
  94. type: object
  95. properties:
  96. data:
  97. type: object
  98. """
  99. # Field name transformations during model dump:
  100. # | Original | Dump Output |
  101. # |----------------|-------------|
  102. # | embedding_model| embd_id |
  103. # | chunk_method | parser_id |
  104. req, err = validate_and_parse_json_request(request, CreateDatasetReq)
  105. if err is not None:
  106. return get_error_argument_result(err)
  107. try:
  108. if KnowledgebaseService.get_or_none(name=req["name"], tenant_id=tenant_id, status=StatusEnum.VALID.value):
  109. return get_error_data_result(message=f"Dataset name '{req['name']}' already exists")
  110. except OperationalError as e:
  111. logging.exception(e)
  112. return get_error_data_result(message="Database operation failed")
  113. req["parser_config"] = get_parser_config(req["parser_id"], req["parser_config"])
  114. req["id"] = get_uuid()
  115. req["tenant_id"] = tenant_id
  116. req["created_by"] = tenant_id
  117. try:
  118. ok, t = TenantService.get_by_id(tenant_id)
  119. if not ok:
  120. return get_error_data_result(message="Tenant not found")
  121. except OperationalError as e:
  122. logging.exception(e)
  123. return get_error_data_result(message="Database operation failed")
  124. if not req.get("embd_id"):
  125. req["embd_id"] = t.embd_id
  126. else:
  127. ok, err = verify_embedding_availability(req["embd_id"], tenant_id)
  128. if not ok:
  129. return err
  130. try:
  131. if not KnowledgebaseService.save(**req):
  132. return get_error_data_result(message="Create dataset error.(Database error)")
  133. except OperationalError as e:
  134. logging.exception(e)
  135. return get_error_data_result(message="Database operation failed")
  136. try:
  137. ok, k = KnowledgebaseService.get_by_id(req["id"])
  138. if not ok:
  139. return get_error_data_result(message="Dataset created failed")
  140. except OperationalError as e:
  141. logging.exception(e)
  142. return get_error_data_result(message="Database operation failed")
  143. response_data = {}
  144. key_mapping = {
  145. "chunk_num": "chunk_count",
  146. "doc_num": "document_count",
  147. "parser_id": "chunk_method",
  148. "embd_id": "embedding_model",
  149. }
  150. for key, value in k.to_dict().items():
  151. new_key = key_mapping.get(key, key)
  152. response_data[new_key] = value
  153. return get_result(data=response_data)
  154. @manager.route("/datasets", methods=["DELETE"]) # noqa: F821
  155. @token_required
  156. def delete(tenant_id):
  157. """
  158. Delete datasets.
  159. ---
  160. tags:
  161. - Datasets
  162. security:
  163. - ApiKeyAuth: []
  164. parameters:
  165. - in: header
  166. name: Authorization
  167. type: string
  168. required: true
  169. description: Bearer token for authentication.
  170. - in: body
  171. name: body
  172. description: Dataset deletion parameters.
  173. required: true
  174. schema:
  175. type: object
  176. required:
  177. - ids
  178. properties:
  179. ids:
  180. type: array or null
  181. items:
  182. type: string
  183. description: |
  184. Specifies the datasets to delete:
  185. - If `null`, all datasets will be deleted.
  186. - If an array of IDs, only the specified datasets will be deleted.
  187. - If an empty array, no datasets will be deleted.
  188. responses:
  189. 200:
  190. description: Successful operation.
  191. schema:
  192. type: object
  193. """
  194. req, err = validate_and_parse_json_request(request, DeleteDatasetReq)
  195. if err is not None:
  196. return get_error_argument_result(err)
  197. kb_id_instance_pairs = []
  198. if req["ids"] is None:
  199. try:
  200. kbs = KnowledgebaseService.query(tenant_id=tenant_id)
  201. for kb in kbs:
  202. kb_id_instance_pairs.append((kb.id, kb))
  203. except OperationalError as e:
  204. logging.exception(e)
  205. return get_error_data_result(message="Database operation failed")
  206. else:
  207. error_kb_ids = []
  208. for kb_id in req["ids"]:
  209. try:
  210. kb = KnowledgebaseService.get_or_none(id=kb_id, tenant_id=tenant_id)
  211. if kb is None:
  212. error_kb_ids.append(kb_id)
  213. continue
  214. kb_id_instance_pairs.append((kb_id, kb))
  215. except OperationalError as e:
  216. logging.exception(e)
  217. return get_error_data_result(message="Database operation failed")
  218. if len(error_kb_ids) > 0:
  219. return get_error_data_result(message=f"""User '{tenant_id}' lacks permission for datasets: '{", ".join(error_kb_ids)}'""")
  220. errors = []
  221. success_count = 0
  222. for kb_id, kb in kb_id_instance_pairs:
  223. try:
  224. for doc in DocumentService.query(kb_id=kb_id):
  225. if not DocumentService.remove_document(doc, tenant_id):
  226. errors.append(f"Remove document '{doc.id}' error for dataset '{kb_id}'")
  227. continue
  228. f2d = File2DocumentService.get_by_document_id(doc.id)
  229. FileService.filter_delete(
  230. [
  231. File.source_type == FileSource.KNOWLEDGEBASE,
  232. File.id == f2d[0].file_id,
  233. ]
  234. )
  235. File2DocumentService.delete_by_document_id(doc.id)
  236. FileService.filter_delete([File.source_type == FileSource.KNOWLEDGEBASE, File.type == "folder", File.name == kb.name])
  237. if not KnowledgebaseService.delete_by_id(kb_id):
  238. errors.append(f"Delete dataset error for {kb_id}")
  239. continue
  240. success_count += 1
  241. except OperationalError as e:
  242. logging.exception(e)
  243. return get_error_data_result(message="Database operation failed")
  244. if not errors:
  245. return get_result()
  246. error_message = f"Successfully deleted {success_count} datasets, {len(errors)} failed. Details: {'; '.join(errors)[:128]}..."
  247. if success_count == 0:
  248. return get_error_data_result(message=error_message)
  249. return get_result(data={"success_count": success_count, "errors": errors[:5]}, message=error_message)
  250. @manager.route("/datasets/<dataset_id>", methods=["PUT"]) # noqa: F821
  251. @token_required
  252. def update(tenant_id, dataset_id):
  253. """
  254. Update a dataset.
  255. ---
  256. tags:
  257. - Datasets
  258. security:
  259. - ApiKeyAuth: []
  260. parameters:
  261. - in: path
  262. name: dataset_id
  263. type: string
  264. required: true
  265. description: ID of the dataset to update.
  266. - in: header
  267. name: Authorization
  268. type: string
  269. required: true
  270. description: Bearer token for authentication.
  271. - in: body
  272. name: body
  273. description: Dataset update parameters.
  274. required: true
  275. schema:
  276. type: object
  277. properties:
  278. name:
  279. type: string
  280. description: New name of the dataset.
  281. avatar:
  282. type: string
  283. description: Updated base64 encoding of the avatar.
  284. description:
  285. type: string
  286. description: Updated description of the dataset.
  287. embedding_model:
  288. type: string
  289. description: Updated embedding model Name.
  290. permission:
  291. type: string
  292. enum: ['me', 'team']
  293. description: Updated dataset permission.
  294. chunk_method:
  295. type: string
  296. enum: ["naive", "book", "email", "laws", "manual", "one", "paper",
  297. "picture", "presentation", "qa", "table", "tag"
  298. ]
  299. description: Updated chunking method.
  300. pagerank:
  301. type: integer
  302. description: Updated page rank.
  303. parser_config:
  304. type: object
  305. description: Updated parser configuration.
  306. responses:
  307. 200:
  308. description: Successful operation.
  309. schema:
  310. type: object
  311. """
  312. # Field name transformations during model dump:
  313. # | Original | Dump Output |
  314. # |----------------|-------------|
  315. # | embedding_model| embd_id |
  316. # | chunk_method | parser_id |
  317. extras = {"dataset_id": dataset_id}
  318. req, err = validate_and_parse_json_request(request, UpdateDatasetReq, extras=extras, exclude_unset=True)
  319. if err is not None:
  320. return get_error_argument_result(err)
  321. if not req:
  322. return get_error_argument_result(message="No properties were modified")
  323. try:
  324. kb = KnowledgebaseService.get_or_none(id=dataset_id, tenant_id=tenant_id)
  325. if kb is None:
  326. return get_error_data_result(message=f"User '{tenant_id}' lacks permission for dataset '{dataset_id}'")
  327. except OperationalError as e:
  328. logging.exception(e)
  329. return get_error_data_result(message="Database operation failed")
  330. if req.get("parser_config"):
  331. req["parser_config"] = deep_merge(kb.parser_config, req["parser_config"])
  332. if (chunk_method := req.get("parser_id")) and chunk_method != kb.parser_id:
  333. if not req.get("parser_config"):
  334. req["parser_config"] = get_parser_config(chunk_method, None)
  335. elif "parser_config" in req and not req["parser_config"]:
  336. del req["parser_config"]
  337. if "name" in req and req["name"].lower() != kb.name.lower():
  338. try:
  339. exists = KnowledgebaseService.get_or_none(name=req["name"], tenant_id=tenant_id, status=StatusEnum.VALID.value)
  340. if exists:
  341. return get_error_data_result(message=f"Dataset name '{req['name']}' already exists")
  342. except OperationalError as e:
  343. logging.exception(e)
  344. return get_error_data_result(message="Database operation failed")
  345. if "embd_id" in req:
  346. if kb.chunk_num != 0 and req["embd_id"] != kb.embd_id:
  347. return get_error_data_result(message=f"When chunk_num ({kb.chunk_num}) > 0, embedding_model must remain {kb.embd_id}")
  348. ok, err = verify_embedding_availability(req["embd_id"], tenant_id)
  349. if not ok:
  350. return err
  351. try:
  352. if not KnowledgebaseService.update_by_id(kb.id, req):
  353. return get_error_data_result(message="Update dataset error.(Database error)")
  354. except OperationalError as e:
  355. logging.exception(e)
  356. return get_error_data_result(message="Database operation failed")
  357. return get_result()
  358. @manager.route("/datasets", methods=["GET"]) # noqa: F821
  359. @token_required
  360. def list_datasets(tenant_id):
  361. """
  362. List datasets.
  363. ---
  364. tags:
  365. - Datasets
  366. security:
  367. - ApiKeyAuth: []
  368. parameters:
  369. - in: query
  370. name: id
  371. type: string
  372. required: false
  373. description: Dataset ID to filter.
  374. - in: query
  375. name: name
  376. type: string
  377. required: false
  378. description: Dataset name to filter.
  379. - in: query
  380. name: page
  381. type: integer
  382. required: false
  383. default: 1
  384. description: Page number.
  385. - in: query
  386. name: page_size
  387. type: integer
  388. required: false
  389. default: 1024
  390. description: Number of items per page.
  391. - in: query
  392. name: orderby
  393. type: string
  394. required: false
  395. default: "create_time"
  396. description: Field to order by.
  397. - in: query
  398. name: desc
  399. type: boolean
  400. required: false
  401. default: true
  402. description: Order in descending.
  403. - in: header
  404. name: Authorization
  405. type: string
  406. required: true
  407. description: Bearer token for authentication.
  408. responses:
  409. 200:
  410. description: Successful operation.
  411. schema:
  412. type: array
  413. items:
  414. type: object
  415. """
  416. id = request.args.get("id")
  417. name = request.args.get("name")
  418. if id:
  419. kbs = KnowledgebaseService.get_kb_by_id(id, tenant_id)
  420. if not kbs:
  421. return get_error_data_result(f"You don't own the dataset {id}")
  422. if name:
  423. kbs = KnowledgebaseService.get_kb_by_name(name, tenant_id)
  424. if not kbs:
  425. return get_error_data_result(f"You don't own the dataset {name}")
  426. page_number = int(request.args.get("page", 1))
  427. items_per_page = int(request.args.get("page_size", 30))
  428. orderby = request.args.get("orderby", "create_time")
  429. if request.args.get("desc", "false").lower() not in ["true", "false"]:
  430. return get_error_data_result("desc should be true or false")
  431. if request.args.get("desc", "true").lower() == "false":
  432. desc = False
  433. else:
  434. desc = True
  435. tenants = TenantService.get_joined_tenants_by_user_id(tenant_id)
  436. kbs = KnowledgebaseService.get_list(
  437. [m["tenant_id"] for m in tenants],
  438. tenant_id,
  439. page_number,
  440. items_per_page,
  441. orderby,
  442. desc,
  443. id,
  444. name,
  445. )
  446. renamed_list = []
  447. for kb in kbs:
  448. key_mapping = {
  449. "chunk_num": "chunk_count",
  450. "doc_num": "document_count",
  451. "parser_id": "chunk_method",
  452. "embd_id": "embedding_model",
  453. }
  454. renamed_data = {}
  455. for key, value in kb.items():
  456. new_key = key_mapping.get(key, key)
  457. renamed_data[new_key] = value
  458. renamed_list.append(renamed_data)
  459. return get_result(data=renamed_list)