You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

remove_document_from_index_task.py 2.7KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475
  1. import logging
  2. import time
  3. import click
  4. from celery import shared_task # type: ignore
  5. from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
  6. from extensions.ext_database import db
  7. from extensions.ext_redis import redis_client
  8. from libs.datetime_utils import naive_utc_now
  9. from models.dataset import Document, DocumentSegment
  10. @shared_task(queue="dataset")
  11. def remove_document_from_index_task(document_id: str):
  12. """
  13. Async Remove document from index
  14. :param document_id: document id
  15. Usage: remove_document_from_index.delay(document_id)
  16. """
  17. logging.info(click.style(f"Start remove document segments from index: {document_id}", fg="green"))
  18. start_at = time.perf_counter()
  19. document = db.session.query(Document).where(Document.id == document_id).first()
  20. if not document:
  21. logging.info(click.style(f"Document not found: {document_id}", fg="red"))
  22. db.session.close()
  23. return
  24. if document.indexing_status != "completed":
  25. logging.info(click.style(f"Document is not completed, remove is not allowed: {document_id}", fg="red"))
  26. db.session.close()
  27. return
  28. indexing_cache_key = f"document_{document.id}_indexing"
  29. try:
  30. dataset = document.dataset
  31. if not dataset:
  32. raise Exception("Document has no dataset")
  33. index_processor = IndexProcessorFactory(document.doc_form).init_index_processor()
  34. segments = db.session.query(DocumentSegment).where(DocumentSegment.document_id == document.id).all()
  35. index_node_ids = [segment.index_node_id for segment in segments]
  36. if index_node_ids:
  37. try:
  38. index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=False)
  39. except Exception:
  40. logging.exception("clean dataset %s from index failed", dataset.id)
  41. # update segment to disable
  42. db.session.query(DocumentSegment).where(DocumentSegment.document_id == document.id).update(
  43. {
  44. DocumentSegment.enabled: False,
  45. DocumentSegment.disabled_at: naive_utc_now(),
  46. DocumentSegment.disabled_by: document.disabled_by,
  47. DocumentSegment.updated_at: naive_utc_now(),
  48. }
  49. )
  50. db.session.commit()
  51. end_at = time.perf_counter()
  52. logging.info(
  53. click.style(f"Document removed from index: {document.id} latency: {end_at - start_at}", fg="green")
  54. )
  55. except Exception:
  56. logging.exception("remove document from index failed")
  57. if not document.archived:
  58. document.enabled = True
  59. db.session.commit()
  60. finally:
  61. redis_client.delete(indexing_cache_key)
  62. db.session.close()