Nevar pievienot vairāk kā 25 tēmas Tēmai ir jāsākas ar burtu vai ciparu, tā var saturēt domu zīmes ('-') un var būt līdz 35 simboliem gara.

disable_segments_from_index_task.py 2.9KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283
  1. import logging
  2. import time
  3. import click
  4. from celery import shared_task # type: ignore
  5. from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
  6. from extensions.ext_database import db
  7. from extensions.ext_redis import redis_client
  8. from models.dataset import Dataset, DocumentSegment
  9. from models.dataset import Document as DatasetDocument
  10. @shared_task(queue="dataset")
  11. def disable_segments_from_index_task(segment_ids: list, dataset_id: str, document_id: str):
  12. """
  13. Async disable segments from index
  14. :param segment_ids: list of segment ids
  15. :param dataset_id: dataset id
  16. :param document_id: document id
  17. Usage: disable_segments_from_index_task.delay(segment_ids, dataset_id, document_id)
  18. """
  19. start_at = time.perf_counter()
  20. dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
  21. if not dataset:
  22. logging.info(click.style("Dataset {} not found, pass.".format(dataset_id), fg="cyan"))
  23. db.session.close()
  24. return
  25. dataset_document = db.session.query(DatasetDocument).filter(DatasetDocument.id == document_id).first()
  26. if not dataset_document:
  27. logging.info(click.style("Document {} not found, pass.".format(document_id), fg="cyan"))
  28. db.session.close()
  29. return
  30. if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != "completed":
  31. logging.info(click.style("Document {} status is invalid, pass.".format(document_id), fg="cyan"))
  32. db.session.close()
  33. return
  34. # sync index processor
  35. index_processor = IndexProcessorFactory(dataset_document.doc_form).init_index_processor()
  36. segments = (
  37. db.session.query(DocumentSegment)
  38. .filter(
  39. DocumentSegment.id.in_(segment_ids),
  40. DocumentSegment.dataset_id == dataset_id,
  41. DocumentSegment.document_id == document_id,
  42. )
  43. .all()
  44. )
  45. if not segments:
  46. db.session.close()
  47. return
  48. try:
  49. index_node_ids = [segment.index_node_id for segment in segments]
  50. index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=False)
  51. end_at = time.perf_counter()
  52. logging.info(click.style("Segments removed from index latency: {}".format(end_at - start_at), fg="green"))
  53. except Exception:
  54. # update segment error msg
  55. db.session.query(DocumentSegment).filter(
  56. DocumentSegment.id.in_(segment_ids),
  57. DocumentSegment.dataset_id == dataset_id,
  58. DocumentSegment.document_id == document_id,
  59. ).update(
  60. {
  61. "disabled_at": None,
  62. "disabled_by": None,
  63. "enabled": True,
  64. }
  65. )
  66. db.session.commit()
  67. finally:
  68. for segment in segments:
  69. indexing_cache_key = "segment_{}_indexing".format(segment.id)
  70. redis_client.delete(indexing_cache_key)
  71. db.session.close()