Nelze vybrat více než 25 témat Téma musí začínat písmenem nebo číslem, může obsahovat pomlčky („-“) a může být dlouhé až 35 znaků.

retry_document_indexing_task.py 4.5KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108
  1. import logging
  2. import time
  3. import click
  4. from celery import shared_task
  5. from core.indexing_runner import IndexingRunner
  6. from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
  7. from extensions.ext_database import db
  8. from extensions.ext_redis import redis_client
  9. from libs.datetime_utils import naive_utc_now
  10. from models.dataset import Dataset, Document, DocumentSegment
  11. from services.feature_service import FeatureService
  12. logger = logging.getLogger(__name__)
  13. @shared_task(queue="dataset")
  14. def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
  15. """
  16. Async process document
  17. :param dataset_id:
  18. :param document_ids:
  19. Usage: retry_document_indexing_task.delay(dataset_id, document_ids)
  20. """
  21. documents: list[Document] = []
  22. start_at = time.perf_counter()
  23. try:
  24. dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
  25. if not dataset:
  26. logger.info(click.style(f"Dataset not found: {dataset_id}", fg="red"))
  27. return
  28. tenant_id = dataset.tenant_id
  29. for document_id in document_ids:
  30. retry_indexing_cache_key = f"document_{document_id}_is_retried"
  31. # check document limit
  32. features = FeatureService.get_features(tenant_id)
  33. try:
  34. if features.billing.enabled:
  35. vector_space = features.vector_space
  36. if 0 < vector_space.limit <= vector_space.size:
  37. raise ValueError(
  38. "Your total number of documents plus the number of uploads have over the limit of "
  39. "your subscription."
  40. )
  41. except Exception as e:
  42. document = (
  43. db.session.query(Document)
  44. .where(Document.id == document_id, Document.dataset_id == dataset_id)
  45. .first()
  46. )
  47. if document:
  48. document.indexing_status = "error"
  49. document.error = str(e)
  50. document.stopped_at = naive_utc_now()
  51. db.session.add(document)
  52. db.session.commit()
  53. redis_client.delete(retry_indexing_cache_key)
  54. return
  55. logger.info(click.style(f"Start retry document: {document_id}", fg="green"))
  56. document = (
  57. db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
  58. )
  59. if not document:
  60. logger.info(click.style(f"Document not found: {document_id}", fg="yellow"))
  61. return
  62. try:
  63. # clean old data
  64. index_processor = IndexProcessorFactory(document.doc_form).init_index_processor()
  65. segments = db.session.query(DocumentSegment).where(DocumentSegment.document_id == document_id).all()
  66. if segments:
  67. index_node_ids = [segment.index_node_id for segment in segments]
  68. # delete from vector index
  69. index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=True)
  70. for segment in segments:
  71. db.session.delete(segment)
  72. db.session.commit()
  73. document.indexing_status = "parsing"
  74. document.processing_started_at = naive_utc_now()
  75. db.session.add(document)
  76. db.session.commit()
  77. indexing_runner = IndexingRunner()
  78. indexing_runner.run([document])
  79. redis_client.delete(retry_indexing_cache_key)
  80. except Exception as ex:
  81. document.indexing_status = "error"
  82. document.error = str(ex)
  83. document.stopped_at = naive_utc_now()
  84. db.session.add(document)
  85. db.session.commit()
  86. logger.info(click.style(str(ex), fg="yellow"))
  87. redis_client.delete(retry_indexing_cache_key)
  88. logger.exception("retry_document_indexing_task failed, document_id: %s", document_id)
  89. end_at = time.perf_counter()
  90. logger.info(click.style(f"Retry dataset: {dataset_id} latency: {end_at - start_at}", fg="green"))
  91. except Exception as e:
  92. logger.exception(
  93. "retry_document_indexing_task failed, dataset_id: %s, document_ids: %s", dataset_id, document_ids
  94. )
  95. raise e
  96. finally:
  97. db.session.close()