You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

add_document_to_index_task.py 4.5KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118
  1. import datetime
  2. import logging
  3. import time
  4. import click
  5. from celery import shared_task # type: ignore
  6. from core.rag.index_processor.constant.index_type import IndexType
  7. from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
  8. from core.rag.models.document import ChildDocument, Document
  9. from extensions.ext_database import db
  10. from extensions.ext_redis import redis_client
  11. from models.dataset import DatasetAutoDisableLog, DocumentSegment
  12. from models.dataset import Document as DatasetDocument
  13. @shared_task(queue="dataset")
  14. def add_document_to_index_task(dataset_document_id: str):
  15. """
  16. Async Add document to index
  17. :param dataset_document_id:
  18. Usage: add_document_to_index_task.delay(dataset_document_id)
  19. """
  20. logging.info(click.style("Start add document to index: {}".format(dataset_document_id), fg="green"))
  21. start_at = time.perf_counter()
  22. dataset_document = db.session.query(DatasetDocument).filter(DatasetDocument.id == dataset_document_id).first()
  23. if not dataset_document:
  24. logging.info(click.style("Document not found: {}".format(dataset_document_id), fg="red"))
  25. db.session.close()
  26. return
  27. if dataset_document.indexing_status != "completed":
  28. return
  29. indexing_cache_key = "document_{}_indexing".format(dataset_document.id)
  30. try:
  31. dataset = dataset_document.dataset
  32. if not dataset:
  33. raise Exception(f"Document {dataset_document.id} dataset {dataset_document.dataset_id} doesn't exist.")
  34. segments = (
  35. db.session.query(DocumentSegment)
  36. .filter(
  37. DocumentSegment.document_id == dataset_document.id,
  38. DocumentSegment.enabled == False,
  39. DocumentSegment.status == "completed",
  40. )
  41. .order_by(DocumentSegment.position.asc())
  42. .all()
  43. )
  44. documents = []
  45. for segment in segments:
  46. document = Document(
  47. page_content=segment.content,
  48. metadata={
  49. "doc_id": segment.index_node_id,
  50. "doc_hash": segment.index_node_hash,
  51. "document_id": segment.document_id,
  52. "dataset_id": segment.dataset_id,
  53. },
  54. )
  55. if dataset_document.doc_form == IndexType.PARENT_CHILD_INDEX:
  56. child_chunks = segment.get_child_chunks()
  57. if child_chunks:
  58. child_documents = []
  59. for child_chunk in child_chunks:
  60. child_document = ChildDocument(
  61. page_content=child_chunk.content,
  62. metadata={
  63. "doc_id": child_chunk.index_node_id,
  64. "doc_hash": child_chunk.index_node_hash,
  65. "document_id": segment.document_id,
  66. "dataset_id": segment.dataset_id,
  67. },
  68. )
  69. child_documents.append(child_document)
  70. document.children = child_documents
  71. documents.append(document)
  72. index_type = dataset.doc_form
  73. index_processor = IndexProcessorFactory(index_type).init_index_processor()
  74. index_processor.load(dataset, documents)
  75. # delete auto disable log
  76. db.session.query(DatasetAutoDisableLog).filter(
  77. DatasetAutoDisableLog.document_id == dataset_document.id
  78. ).delete()
  79. # update segment to enable
  80. db.session.query(DocumentSegment).filter(DocumentSegment.document_id == dataset_document.id).update(
  81. {
  82. DocumentSegment.enabled: True,
  83. DocumentSegment.disabled_at: None,
  84. DocumentSegment.disabled_by: None,
  85. DocumentSegment.updated_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
  86. }
  87. )
  88. db.session.commit()
  89. end_at = time.perf_counter()
  90. logging.info(
  91. click.style(
  92. "Document added to index: {} latency: {}".format(dataset_document.id, end_at - start_at), fg="green"
  93. )
  94. )
  95. except Exception as e:
  96. logging.exception("add document to index failed")
  97. dataset_document.enabled = False
  98. dataset_document.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
  99. dataset_document.indexing_status = "error"
  100. dataset_document.error = str(e)
  101. db.session.commit()
  102. finally:
  103. redis_client.delete(indexing_cache_key)