Du kannst nicht mehr als 25 Themen auswählen Themen müssen mit entweder einem Buchstaben oder einer Ziffer beginnen. Sie können Bindestriche („-“) enthalten und bis zu 35 Zeichen lang sein.

dataset.py 16KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475
  1. #
  2. # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. import logging
  17. import os
  18. from flask import request
  19. from peewee import OperationalError
  20. from api import settings
  21. from api.db import FileSource, StatusEnum
  22. from api.db.db_models import File
  23. from api.db.services.document_service import DocumentService
  24. from api.db.services.file2document_service import File2DocumentService
  25. from api.db.services.file_service import FileService
  26. from api.db.services.knowledgebase_service import KnowledgebaseService
  27. from api.db.services.user_service import TenantService
  28. from api.utils import get_uuid
  29. from api.utils.api_utils import (
  30. deep_merge,
  31. get_error_argument_result,
  32. get_error_data_result,
  33. get_error_operating_result,
  34. get_error_permission_result,
  35. get_parser_config,
  36. get_result,
  37. remap_dictionary_keys,
  38. token_required,
  39. verify_embedding_availability,
  40. )
  41. from api.utils.validation_utils import (
  42. CreateDatasetReq,
  43. DeleteDatasetReq,
  44. ListDatasetReq,
  45. UpdateDatasetReq,
  46. validate_and_parse_json_request,
  47. validate_and_parse_request_args,
  48. )
  49. from rag.nlp import search
  50. from rag.settings import PAGERANK_FLD
  51. @manager.route("/datasets", methods=["POST"]) # noqa: F821
  52. @token_required
  53. def create(tenant_id):
  54. """
  55. Create a new dataset.
  56. ---
  57. tags:
  58. - Datasets
  59. security:
  60. - ApiKeyAuth: []
  61. parameters:
  62. - in: header
  63. name: Authorization
  64. type: string
  65. required: true
  66. description: Bearer token for authentication.
  67. - in: body
  68. name: body
  69. description: Dataset creation parameters.
  70. required: true
  71. schema:
  72. type: object
  73. required:
  74. - name
  75. properties:
  76. name:
  77. type: string
  78. description: Name of the dataset.
  79. avatar:
  80. type: string
  81. description: Base64 encoding of the avatar.
  82. description:
  83. type: string
  84. description: Description of the dataset.
  85. embedding_model:
  86. type: string
  87. description: Embedding model Name.
  88. permission:
  89. type: string
  90. enum: ['me', 'team']
  91. description: Dataset permission.
  92. chunk_method:
  93. type: string
  94. enum: ["naive", "book", "email", "laws", "manual", "one", "paper",
  95. "picture", "presentation", "qa", "table", "tag"
  96. ]
  97. description: Chunking method.
  98. parser_config:
  99. type: object
  100. description: Parser configuration.
  101. responses:
  102. 200:
  103. description: Successful operation.
  104. schema:
  105. type: object
  106. properties:
  107. data:
  108. type: object
  109. """
  110. # Field name transformations during model dump:
  111. # | Original | Dump Output |
  112. # |----------------|-------------|
  113. # | embedding_model| embd_id |
  114. # | chunk_method | parser_id |
  115. req, err = validate_and_parse_json_request(request, CreateDatasetReq)
  116. if err is not None:
  117. return get_error_argument_result(err)
  118. try:
  119. if KnowledgebaseService.get_or_none(name=req["name"], tenant_id=tenant_id, status=StatusEnum.VALID.value):
  120. return get_error_operating_result(message=f"Dataset name '{req['name']}' already exists")
  121. req["parser_config"] = get_parser_config(req["parser_id"], req["parser_config"])
  122. req["id"] = get_uuid()
  123. req["tenant_id"] = tenant_id
  124. req["created_by"] = tenant_id
  125. ok, t = TenantService.get_by_id(tenant_id)
  126. if not ok:
  127. return get_error_permission_result(message="Tenant not found")
  128. if not req.get("embd_id"):
  129. req["embd_id"] = t.embd_id
  130. else:
  131. ok, err = verify_embedding_availability(req["embd_id"], tenant_id)
  132. if not ok:
  133. return err
  134. if not KnowledgebaseService.save(**req):
  135. return get_error_data_result(message="Create dataset error.(Database error)")
  136. ok, k = KnowledgebaseService.get_by_id(req["id"])
  137. if not ok:
  138. return get_error_data_result(message="Dataset created failed")
  139. response_data = remap_dictionary_keys(k.to_dict())
  140. return get_result(data=response_data)
  141. except OperationalError as e:
  142. logging.exception(e)
  143. return get_error_data_result(message="Database operation failed")
  144. @manager.route("/datasets", methods=["DELETE"]) # noqa: F821
  145. @token_required
  146. def delete(tenant_id):
  147. """
  148. Delete datasets.
  149. ---
  150. tags:
  151. - Datasets
  152. security:
  153. - ApiKeyAuth: []
  154. parameters:
  155. - in: header
  156. name: Authorization
  157. type: string
  158. required: true
  159. description: Bearer token for authentication.
  160. - in: body
  161. name: body
  162. description: Dataset deletion parameters.
  163. required: true
  164. schema:
  165. type: object
  166. required:
  167. - ids
  168. properties:
  169. ids:
  170. type: array or null
  171. items:
  172. type: string
  173. description: |
  174. Specifies the datasets to delete:
  175. - If `null`, all datasets will be deleted.
  176. - If an array of IDs, only the specified datasets will be deleted.
  177. - If an empty array, no datasets will be deleted.
  178. responses:
  179. 200:
  180. description: Successful operation.
  181. schema:
  182. type: object
  183. """
  184. req, err = validate_and_parse_json_request(request, DeleteDatasetReq)
  185. if err is not None:
  186. return get_error_argument_result(err)
  187. try:
  188. kb_id_instance_pairs = []
  189. if req["ids"] is None:
  190. kbs = KnowledgebaseService.query(tenant_id=tenant_id)
  191. for kb in kbs:
  192. kb_id_instance_pairs.append((kb.id, kb))
  193. else:
  194. error_kb_ids = []
  195. for kb_id in req["ids"]:
  196. kb = KnowledgebaseService.get_or_none(id=kb_id, tenant_id=tenant_id)
  197. if kb is None:
  198. error_kb_ids.append(kb_id)
  199. continue
  200. kb_id_instance_pairs.append((kb_id, kb))
  201. if len(error_kb_ids) > 0:
  202. return get_error_permission_result(message=f"""User '{tenant_id}' lacks permission for datasets: '{", ".join(error_kb_ids)}'""")
  203. errors = []
  204. success_count = 0
  205. for kb_id, kb in kb_id_instance_pairs:
  206. for doc in DocumentService.query(kb_id=kb_id):
  207. if not DocumentService.remove_document(doc, tenant_id):
  208. errors.append(f"Remove document '{doc.id}' error for dataset '{kb_id}'")
  209. continue
  210. f2d = File2DocumentService.get_by_document_id(doc.id)
  211. FileService.filter_delete(
  212. [
  213. File.source_type == FileSource.KNOWLEDGEBASE,
  214. File.id == f2d[0].file_id,
  215. ]
  216. )
  217. File2DocumentService.delete_by_document_id(doc.id)
  218. FileService.filter_delete([File.source_type == FileSource.KNOWLEDGEBASE, File.type == "folder", File.name == kb.name])
  219. if not KnowledgebaseService.delete_by_id(kb_id):
  220. errors.append(f"Delete dataset error for {kb_id}")
  221. continue
  222. success_count += 1
  223. if not errors:
  224. return get_result()
  225. error_message = f"Successfully deleted {success_count} datasets, {len(errors)} failed. Details: {'; '.join(errors)[:128]}..."
  226. if success_count == 0:
  227. return get_error_data_result(message=error_message)
  228. return get_result(data={"success_count": success_count, "errors": errors[:5]}, message=error_message)
  229. except OperationalError as e:
  230. logging.exception(e)
  231. return get_error_data_result(message="Database operation failed")
  232. @manager.route("/datasets/<dataset_id>", methods=["PUT"]) # noqa: F821
  233. @token_required
  234. def update(tenant_id, dataset_id):
  235. """
  236. Update a dataset.
  237. ---
  238. tags:
  239. - Datasets
  240. security:
  241. - ApiKeyAuth: []
  242. parameters:
  243. - in: path
  244. name: dataset_id
  245. type: string
  246. required: true
  247. description: ID of the dataset to update.
  248. - in: header
  249. name: Authorization
  250. type: string
  251. required: true
  252. description: Bearer token for authentication.
  253. - in: body
  254. name: body
  255. description: Dataset update parameters.
  256. required: true
  257. schema:
  258. type: object
  259. properties:
  260. name:
  261. type: string
  262. description: New name of the dataset.
  263. avatar:
  264. type: string
  265. description: Updated base64 encoding of the avatar.
  266. description:
  267. type: string
  268. description: Updated description of the dataset.
  269. embedding_model:
  270. type: string
  271. description: Updated embedding model Name.
  272. permission:
  273. type: string
  274. enum: ['me', 'team']
  275. description: Updated dataset permission.
  276. chunk_method:
  277. type: string
  278. enum: ["naive", "book", "email", "laws", "manual", "one", "paper",
  279. "picture", "presentation", "qa", "table", "tag"
  280. ]
  281. description: Updated chunking method.
  282. pagerank:
  283. type: integer
  284. description: Updated page rank.
  285. parser_config:
  286. type: object
  287. description: Updated parser configuration.
  288. responses:
  289. 200:
  290. description: Successful operation.
  291. schema:
  292. type: object
  293. """
  294. # Field name transformations during model dump:
  295. # | Original | Dump Output |
  296. # |----------------|-------------|
  297. # | embedding_model| embd_id |
  298. # | chunk_method | parser_id |
  299. extras = {"dataset_id": dataset_id}
  300. req, err = validate_and_parse_json_request(request, UpdateDatasetReq, extras=extras, exclude_unset=True)
  301. if err is not None:
  302. return get_error_argument_result(err)
  303. if not req:
  304. return get_error_argument_result(message="No properties were modified")
  305. try:
  306. kb = KnowledgebaseService.get_or_none(id=dataset_id, tenant_id=tenant_id)
  307. if kb is None:
  308. return get_error_permission_result(message=f"User '{tenant_id}' lacks permission for dataset '{dataset_id}'")
  309. if req.get("parser_config"):
  310. req["parser_config"] = deep_merge(kb.parser_config, req["parser_config"])
  311. if (chunk_method := req.get("parser_id")) and chunk_method != kb.parser_id:
  312. if not req.get("parser_config"):
  313. req["parser_config"] = get_parser_config(chunk_method, None)
  314. elif "parser_config" in req and not req["parser_config"]:
  315. del req["parser_config"]
  316. if "name" in req and req["name"].lower() != kb.name.lower():
  317. exists = KnowledgebaseService.get_or_none(name=req["name"], tenant_id=tenant_id, status=StatusEnum.VALID.value)
  318. if exists:
  319. return get_error_data_result(message=f"Dataset name '{req['name']}' already exists")
  320. if "embd_id" in req:
  321. if not req["embd_id"]:
  322. req["embd_id"] = kb.embd_id
  323. if kb.chunk_num != 0 and req["embd_id"] != kb.embd_id:
  324. return get_error_data_result(message=f"When chunk_num ({kb.chunk_num}) > 0, embedding_model must remain {kb.embd_id}")
  325. ok, err = verify_embedding_availability(req["embd_id"], tenant_id)
  326. if not ok:
  327. return err
  328. if "pagerank" in req and req["pagerank"] != kb.pagerank:
  329. if os.environ.get("DOC_ENGINE", "elasticsearch") == "infinity":
  330. return get_error_argument_result(message="'pagerank' can only be set when doc_engine is elasticsearch")
  331. if req["pagerank"] > 0:
  332. settings.docStoreConn.update({"kb_id": kb.id}, {PAGERANK_FLD: req["pagerank"]}, search.index_name(kb.tenant_id), kb.id)
  333. else:
  334. # Elasticsearch requires PAGERANK_FLD be non-zero!
  335. settings.docStoreConn.update({"exists": PAGERANK_FLD}, {"remove": PAGERANK_FLD}, search.index_name(kb.tenant_id), kb.id)
  336. if not KnowledgebaseService.update_by_id(kb.id, req):
  337. return get_error_data_result(message="Update dataset error.(Database error)")
  338. ok, k = KnowledgebaseService.get_by_id(kb.id)
  339. if not ok:
  340. return get_error_data_result(message="Dataset created failed")
  341. response_data = remap_dictionary_keys(k.to_dict())
  342. return get_result(data=response_data)
  343. except OperationalError as e:
  344. logging.exception(e)
  345. return get_error_data_result(message="Database operation failed")
  346. @manager.route("/datasets", methods=["GET"]) # noqa: F821
  347. @token_required
  348. def list_datasets(tenant_id):
  349. """
  350. List datasets.
  351. ---
  352. tags:
  353. - Datasets
  354. security:
  355. - ApiKeyAuth: []
  356. parameters:
  357. - in: query
  358. name: id
  359. type: string
  360. required: false
  361. description: Dataset ID to filter.
  362. - in: query
  363. name: name
  364. type: string
  365. required: false
  366. description: Dataset name to filter.
  367. - in: query
  368. name: page
  369. type: integer
  370. required: false
  371. default: 1
  372. description: Page number.
  373. - in: query
  374. name: page_size
  375. type: integer
  376. required: false
  377. default: 30
  378. description: Number of items per page.
  379. - in: query
  380. name: orderby
  381. type: string
  382. required: false
  383. default: "create_time"
  384. description: Field to order by.
  385. - in: query
  386. name: desc
  387. type: boolean
  388. required: false
  389. default: true
  390. description: Order in descending.
  391. - in: header
  392. name: Authorization
  393. type: string
  394. required: true
  395. description: Bearer token for authentication.
  396. responses:
  397. 200:
  398. description: Successful operation.
  399. schema:
  400. type: array
  401. items:
  402. type: object
  403. """
  404. args, err = validate_and_parse_request_args(request, ListDatasetReq)
  405. if err is not None:
  406. return get_error_argument_result(err)
  407. try:
  408. kb_id = request.args.get("id")
  409. name = args.get("name")
  410. if kb_id:
  411. kbs = KnowledgebaseService.get_kb_by_id(kb_id, tenant_id)
  412. if not kbs:
  413. return get_error_permission_result(message=f"User '{tenant_id}' lacks permission for dataset '{kb_id}'")
  414. if name:
  415. kbs = KnowledgebaseService.get_kb_by_name(name, tenant_id)
  416. if not kbs:
  417. return get_error_permission_result(message=f"User '{tenant_id}' lacks permission for dataset '{name}'")
  418. tenants = TenantService.get_joined_tenants_by_user_id(tenant_id)
  419. kbs = KnowledgebaseService.get_list(
  420. [m["tenant_id"] for m in tenants],
  421. tenant_id,
  422. args["page"],
  423. args["page_size"],
  424. args["orderby"],
  425. args["desc"],
  426. kb_id,
  427. name,
  428. )
  429. response_data_list = []
  430. for kb in kbs:
  431. response_data_list.append(remap_dictionary_keys(kb))
  432. return get_result(data=response_data_list)
  433. except OperationalError as e:
  434. logging.exception(e)
  435. return get_error_data_result(message="Database operation failed")