You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

enable_segment_to_index_task.py 3.7KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100
  1. import datetime
  2. import logging
  3. import time
  4. import click
  5. from celery import shared_task # type: ignore
  6. from core.rag.index_processor.constant.index_type import IndexType
  7. from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
  8. from core.rag.models.document import ChildDocument, Document
  9. from extensions.ext_database import db
  10. from extensions.ext_redis import redis_client
  11. from models.dataset import DocumentSegment
  12. @shared_task(queue="dataset")
  13. def enable_segment_to_index_task(segment_id: str):
  14. """
  15. Async enable segment to index
  16. :param segment_id:
  17. Usage: enable_segment_to_index_task.delay(segment_id)
  18. """
  19. logging.info(click.style("Start enable segment to index: {}".format(segment_id), fg="green"))
  20. start_at = time.perf_counter()
  21. segment = db.session.query(DocumentSegment).filter(DocumentSegment.id == segment_id).first()
  22. if not segment:
  23. logging.info(click.style("Segment not found: {}".format(segment_id), fg="red"))
  24. db.session.close()
  25. return
  26. if segment.status != "completed":
  27. logging.info(click.style("Segment is not completed, enable is not allowed: {}".format(segment_id), fg="red"))
  28. db.session.close()
  29. return
  30. indexing_cache_key = "segment_{}_indexing".format(segment.id)
  31. try:
  32. document = Document(
  33. page_content=segment.content,
  34. metadata={
  35. "doc_id": segment.index_node_id,
  36. "doc_hash": segment.index_node_hash,
  37. "document_id": segment.document_id,
  38. "dataset_id": segment.dataset_id,
  39. },
  40. )
  41. dataset = segment.dataset
  42. if not dataset:
  43. logging.info(click.style("Segment {} has no dataset, pass.".format(segment.id), fg="cyan"))
  44. return
  45. dataset_document = segment.document
  46. if not dataset_document:
  47. logging.info(click.style("Segment {} has no document, pass.".format(segment.id), fg="cyan"))
  48. return
  49. if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != "completed":
  50. logging.info(click.style("Segment {} document status is invalid, pass.".format(segment.id), fg="cyan"))
  51. return
  52. index_processor = IndexProcessorFactory(dataset_document.doc_form).init_index_processor()
  53. if dataset_document.doc_form == IndexType.PARENT_CHILD_INDEX:
  54. child_chunks = segment.get_child_chunks()
  55. if child_chunks:
  56. child_documents = []
  57. for child_chunk in child_chunks:
  58. child_document = ChildDocument(
  59. page_content=child_chunk.content,
  60. metadata={
  61. "doc_id": child_chunk.index_node_id,
  62. "doc_hash": child_chunk.index_node_hash,
  63. "document_id": segment.document_id,
  64. "dataset_id": segment.dataset_id,
  65. },
  66. )
  67. child_documents.append(child_document)
  68. document.children = child_documents
  69. # save vector index
  70. index_processor.load(dataset, [document])
  71. end_at = time.perf_counter()
  72. logging.info(
  73. click.style("Segment enabled to index: {} latency: {}".format(segment.id, end_at - start_at), fg="green")
  74. )
  75. except Exception as e:
  76. logging.exception("enable segment to index failed")
  77. segment.enabled = False
  78. segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
  79. segment.status = "error"
  80. segment.error = str(e)
  81. db.session.commit()
  82. finally:
  83. redis_client.delete(indexing_cache_key)
  84. db.session.close()