You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

remove_document_from_index_task.py 2.7KB

2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475
  1. import logging
  2. import time
  3. import click
  4. from celery import shared_task
  5. from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
  6. from extensions.ext_database import db
  7. from extensions.ext_redis import redis_client
  8. from libs.datetime_utils import naive_utc_now
  9. from models.dataset import Document, DocumentSegment
  10. logger = logging.getLogger(__name__)
  11. @shared_task(queue="dataset")
  12. def remove_document_from_index_task(document_id: str):
  13. """
  14. Async Remove document from index
  15. :param document_id: document id
  16. Usage: remove_document_from_index.delay(document_id)
  17. """
  18. logger.info(click.style(f"Start remove document segments from index: {document_id}", fg="green"))
  19. start_at = time.perf_counter()
  20. document = db.session.query(Document).where(Document.id == document_id).first()
  21. if not document:
  22. logger.info(click.style(f"Document not found: {document_id}", fg="red"))
  23. db.session.close()
  24. return
  25. if document.indexing_status != "completed":
  26. logger.info(click.style(f"Document is not completed, remove is not allowed: {document_id}", fg="red"))
  27. db.session.close()
  28. return
  29. indexing_cache_key = f"document_{document.id}_indexing"
  30. try:
  31. dataset = document.dataset
  32. if not dataset:
  33. raise Exception("Document has no dataset")
  34. index_processor = IndexProcessorFactory(document.doc_form).init_index_processor()
  35. segments = db.session.query(DocumentSegment).where(DocumentSegment.document_id == document.id).all()
  36. index_node_ids = [segment.index_node_id for segment in segments]
  37. if index_node_ids:
  38. try:
  39. index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=False)
  40. except Exception:
  41. logger.exception("clean dataset %s from index failed", dataset.id)
  42. # update segment to disable
  43. db.session.query(DocumentSegment).where(DocumentSegment.document_id == document.id).update(
  44. {
  45. DocumentSegment.enabled: False,
  46. DocumentSegment.disabled_at: naive_utc_now(),
  47. DocumentSegment.disabled_by: document.disabled_by,
  48. DocumentSegment.updated_at: naive_utc_now(),
  49. }
  50. )
  51. db.session.commit()
  52. end_at = time.perf_counter()
  53. logger.info(click.style(f"Document removed from index: {document.id} latency: {end_at - start_at}", fg="green"))
  54. except Exception:
  55. logger.exception("remove document from index failed")
  56. if not document.archived:
  57. document.enabled = True
  58. db.session.commit()
  59. finally:
  60. redis_client.delete(indexing_cache_key)
  61. db.session.close()