You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

remove_document_from_index_task.py 2.8KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677
  1. import datetime
  2. import logging
  3. import time
  4. import click
  5. from celery import shared_task # type: ignore
  6. from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
  7. from extensions.ext_database import db
  8. from extensions.ext_redis import redis_client
  9. from models.dataset import Document, DocumentSegment
  10. @shared_task(queue="dataset")
  11. def remove_document_from_index_task(document_id: str):
  12. """
  13. Async Remove document from index
  14. :param document_id: document id
  15. Usage: remove_document_from_index.delay(document_id)
  16. """
  17. logging.info(click.style("Start remove document segments from index: {}".format(document_id), fg="green"))
  18. start_at = time.perf_counter()
  19. document = db.session.query(Document).filter(Document.id == document_id).first()
  20. if not document:
  21. logging.info(click.style("Document not found: {}".format(document_id), fg="red"))
  22. db.session.close()
  23. return
  24. if document.indexing_status != "completed":
  25. logging.info(click.style("Document is not completed, remove is not allowed: {}".format(document_id), fg="red"))
  26. db.session.close()
  27. return
  28. indexing_cache_key = "document_{}_indexing".format(document.id)
  29. try:
  30. dataset = document.dataset
  31. if not dataset:
  32. raise Exception("Document has no dataset")
  33. index_processor = IndexProcessorFactory(document.doc_form).init_index_processor()
  34. segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document.id).all()
  35. index_node_ids = [segment.index_node_id for segment in segments]
  36. if index_node_ids:
  37. try:
  38. index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=False)
  39. except Exception:
  40. logging.exception(f"clean dataset {dataset.id} from index failed")
  41. # update segment to disable
  42. db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document.id).update(
  43. {
  44. DocumentSegment.enabled: False,
  45. DocumentSegment.disabled_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
  46. DocumentSegment.disabled_by: document.disabled_by,
  47. DocumentSegment.updated_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
  48. }
  49. )
  50. db.session.commit()
  51. end_at = time.perf_counter()
  52. logging.info(
  53. click.style(
  54. "Document removed from index: {} latency: {}".format(document.id, end_at - start_at), fg="green"
  55. )
  56. )
  57. except Exception:
  58. logging.exception("remove document from index failed")
  59. if not document.archived:
  60. document.enabled = True
  61. db.session.commit()
  62. finally:
  63. redis_client.delete(indexing_cache_key)
  64. db.session.close()