Nevar pievienot vairāk kā 25 tēmas Tēmai ir jāsākas ar burtu vai ciparu, tā var saturēt domu zīmes ('-') un var būt līdz 35 simboliem gara.

task_broker.py 6.2KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182
  1. #
  2. # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. import logging
  17. import os
  18. import time
  19. import random
  20. from datetime import datetime
  21. from api.db.db_models import Task
  22. from api.db.db_utils import bulk_insert_into_db
  23. from api.db.services.task_service import TaskService
  24. from deepdoc.parser import PdfParser
  25. from deepdoc.parser.excel_parser import HuExcelParser
  26. from rag.settings import cron_logger
  27. from rag.utils import MINIO
  28. from rag.utils import findMaxTm
  29. import pandas as pd
  30. from api.db import FileType, TaskStatus
  31. from api.db.services.document_service import DocumentService
  32. from api.settings import database_logger
  33. from api.utils import get_format_time, get_uuid
  34. from api.utils.file_utils import get_project_base_directory
  35. def collect(tm):
  36. docs = DocumentService.get_newly_uploaded(tm)
  37. if len(docs) == 0:
  38. return pd.DataFrame()
  39. docs = pd.DataFrame(docs)
  40. mtm = docs["update_time"].max()
  41. cron_logger.info("TOTAL:{}, To:{}".format(len(docs), mtm))
  42. return docs
  43. def set_dispatching(docid):
  44. try:
  45. DocumentService.update_by_id(
  46. docid, {"progress": random.random() * 1 / 100.,
  47. "progress_msg": "Task dispatched...",
  48. "process_begin_at": get_format_time()
  49. })
  50. except Exception as e:
  51. cron_logger.error("set_dispatching:({}), {}".format(docid, str(e)))
  52. def dispatch():
  53. tm_fnm = os.path.join(
  54. get_project_base_directory(),
  55. "rag/res",
  56. f"broker.tm")
  57. tm = findMaxTm(tm_fnm)
  58. rows = collect(tm)
  59. if len(rows) == 0:
  60. return
  61. tmf = open(tm_fnm, "a+")
  62. for _, r in rows.iterrows():
  63. try:
  64. tsks = TaskService.query(doc_id=r["id"])
  65. if tsks:
  66. for t in tsks:
  67. TaskService.delete_by_id(t.id)
  68. except Exception as e:
  69. cron_logger.exception(e)
  70. def new_task():
  71. nonlocal r
  72. return {
  73. "id": get_uuid(),
  74. "doc_id": r["id"]
  75. }
  76. tsks = []
  77. try:
  78. if r["type"] == FileType.PDF.value:
  79. do_layout = r["parser_config"].get("layout_recognize", True)
  80. pages = PdfParser.total_page_number(
  81. r["name"], MINIO.get(r["kb_id"], r["location"]))
  82. page_size = r["parser_config"].get("task_page_size", 12)
  83. if r["parser_id"] == "paper":
  84. page_size = r["parser_config"].get("task_page_size", 22)
  85. if r["parser_id"] == "one":
  86. page_size = 1000000000
  87. if not do_layout:
  88. page_size = 1000000000
  89. page_ranges = r["parser_config"].get("pages")
  90. if not page_ranges:
  91. page_ranges = [(1, 100000)]
  92. for s, e in page_ranges:
  93. s -= 1
  94. s = max(0, s)
  95. e = min(e - 1, pages)
  96. for p in range(s, e, page_size):
  97. task = new_task()
  98. task["from_page"] = p
  99. task["to_page"] = min(p + page_size, e)
  100. tsks.append(task)
  101. elif r["parser_id"] == "table":
  102. rn = HuExcelParser.row_number(
  103. r["name"], MINIO.get(
  104. r["kb_id"], r["location"]))
  105. for i in range(0, rn, 3000):
  106. task = new_task()
  107. task["from_page"] = i
  108. task["to_page"] = min(i + 3000, rn)
  109. tsks.append(task)
  110. else:
  111. tsks.append(new_task())
  112. bulk_insert_into_db(Task, tsks, True)
  113. set_dispatching(r["id"])
  114. except Exception as e:
  115. cron_logger.exception(e)
  116. tmf.write(str(r["update_time"]) + "\n")
  117. tmf.close()
  118. def update_progress():
  119. docs = DocumentService.get_unfinished_docs()
  120. for d in docs:
  121. try:
  122. tsks = TaskService.query(doc_id=d["id"], order_by=Task.create_time)
  123. if not tsks:
  124. continue
  125. msg = []
  126. prg = 0
  127. finished = True
  128. bad = 0
  129. status = TaskStatus.RUNNING.value
  130. for t in tsks:
  131. if 0 <= t.progress < 1:
  132. finished = False
  133. prg += t.progress if t.progress >= 0 else 0
  134. msg.append(t.progress_msg)
  135. if t.progress == -1:
  136. bad += 1
  137. prg /= len(tsks)
  138. if finished and bad:
  139. prg = -1
  140. status = TaskStatus.FAIL.value
  141. elif finished:
  142. status = TaskStatus.DONE.value
  143. msg = "\n".join(msg)
  144. info = {
  145. "process_duation": datetime.timestamp(
  146. datetime.now()) -
  147. d["process_begin_at"].timestamp(),
  148. "run": status}
  149. if prg != 0:
  150. info["progress"] = prg
  151. if msg:
  152. info["progress_msg"] = msg
  153. DocumentService.update_by_id(d["id"], info)
  154. except Exception as e:
  155. cron_logger.error("fetch task exception:" + str(e))
  156. if __name__ == "__main__":
  157. peewee_logger = logging.getLogger('peewee')
  158. peewee_logger.propagate = False
  159. peewee_logger.addHandler(database_logger.handlers[0])
  160. peewee_logger.setLevel(database_logger.level)
  161. while True:
  162. dispatch()
  163. time.sleep(1)
  164. update_progress()