You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193
  1. #
  2. # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. import logging
  17. import os
  18. import time
  19. import random
  20. from datetime import datetime
  21. from api.db.db_models import Task
  22. from api.db.db_utils import bulk_insert_into_db
  23. from api.db.services.task_service import TaskService
  24. from deepdoc.parser import PdfParser
  25. from deepdoc.parser.excel_parser import HuExcelParser
  26. from rag.settings import cron_logger
  27. from rag.utils import MINIO
  28. from rag.utils import findMaxTm
  29. import pandas as pd
  30. from api.db import FileType, TaskStatus
  31. from api.db.services.document_service import DocumentService
  32. from api.settings import database_logger
  33. from api.utils import get_format_time, get_uuid
  34. from api.utils.file_utils import get_project_base_directory
  35. from rag.utils.redis_conn import REDIS_CONN
  36. from api.db.db_models import init_database_tables as init_web_db
  37. from api.db.init_data import init_web_data
  38. def collect(tm):
  39. docs = DocumentService.get_newly_uploaded(tm)
  40. if len(docs) == 0:
  41. return pd.DataFrame()
  42. docs = pd.DataFrame(docs)
  43. mtm = docs["update_time"].max()
  44. cron_logger.info("TOTAL:{}, To:{}".format(len(docs), mtm))
  45. return docs
  46. def set_dispatching(docid):
  47. try:
  48. DocumentService.update_by_id(
  49. docid, {"progress": random.random() * 1 / 100.,
  50. "progress_msg": "Task dispatched...",
  51. "process_begin_at": get_format_time()
  52. })
  53. except Exception as e:
  54. cron_logger.error("set_dispatching:({}), {}".format(docid, str(e)))
  55. def dispatch():
  56. tm_fnm = os.path.join(
  57. get_project_base_directory(),
  58. "rag/res",
  59. f"broker.tm")
  60. tm = findMaxTm(tm_fnm)
  61. rows = collect(tm)
  62. if len(rows) == 0:
  63. return
  64. tmf = open(tm_fnm, "a+")
  65. for _, r in rows.iterrows():
  66. try:
  67. tsks = TaskService.query(doc_id=r["id"])
  68. if tsks:
  69. for t in tsks:
  70. TaskService.delete_by_id(t.id)
  71. except Exception as e:
  72. cron_logger.exception(e)
  73. def new_task():
  74. nonlocal r
  75. return {
  76. "id": get_uuid(),
  77. "doc_id": r["id"]
  78. }
  79. tsks = []
  80. try:
  81. file_bin = MINIO.get(r["kb_id"], r["location"])
  82. if REDIS_CONN.is_alive():
  83. try:
  84. REDIS_CONN.set("{}/{}".format(r["kb_id"], r["location"]), file_bin, 12*60)
  85. except Exception as e:
  86. cron_logger.warning("Put into redis[EXCEPTION]:" + str(e))
  87. if r["type"] == FileType.PDF.value:
  88. do_layout = r["parser_config"].get("layout_recognize", True)
  89. pages = PdfParser.total_page_number(r["name"], file_bin)
  90. page_size = r["parser_config"].get("task_page_size", 12)
  91. if r["parser_id"] == "paper":
  92. page_size = r["parser_config"].get("task_page_size", 22)
  93. if r["parser_id"] == "one":
  94. page_size = 1000000000
  95. if not do_layout:
  96. page_size = 1000000000
  97. page_ranges = r["parser_config"].get("pages")
  98. if not page_ranges:
  99. page_ranges = [(1, 100000)]
  100. for s, e in page_ranges:
  101. s -= 1
  102. s = max(0, s)
  103. e = min(e - 1, pages)
  104. for p in range(s, e, page_size):
  105. task = new_task()
  106. task["from_page"] = p
  107. task["to_page"] = min(p + page_size, e)
  108. tsks.append(task)
  109. elif r["parser_id"] == "table":
  110. rn = HuExcelParser.row_number(
  111. r["name"], file_bin)
  112. for i in range(0, rn, 3000):
  113. task = new_task()
  114. task["from_page"] = i
  115. task["to_page"] = min(i + 3000, rn)
  116. tsks.append(task)
  117. else:
  118. tsks.append(new_task())
  119. bulk_insert_into_db(Task, tsks, True)
  120. set_dispatching(r["id"])
  121. except Exception as e:
  122. cron_logger.exception(e)
  123. tmf.write(str(r["update_time"]) + "\n")
  124. tmf.close()
  125. def update_progress():
  126. docs = DocumentService.get_unfinished_docs()
  127. for d in docs:
  128. try:
  129. tsks = TaskService.query(doc_id=d["id"], order_by=Task.create_time)
  130. if not tsks:
  131. continue
  132. msg = []
  133. prg = 0
  134. finished = True
  135. bad = 0
  136. status = TaskStatus.RUNNING.value
  137. for t in tsks:
  138. if 0 <= t.progress < 1:
  139. finished = False
  140. prg += t.progress if t.progress >= 0 else 0
  141. msg.append(t.progress_msg)
  142. if t.progress == -1:
  143. bad += 1
  144. prg /= len(tsks)
  145. if finished and bad:
  146. prg = -1
  147. status = TaskStatus.FAIL.value
  148. elif finished:
  149. status = TaskStatus.DONE.value
  150. msg = "\n".join(msg)
  151. info = {
  152. "process_duation": datetime.timestamp(
  153. datetime.now()) -
  154. d["process_begin_at"].timestamp(),
  155. "run": status}
  156. if prg != 0:
  157. info["progress"] = prg
  158. if msg:
  159. info["progress_msg"] = msg
  160. DocumentService.update_by_id(d["id"], info)
  161. except Exception as e:
  162. cron_logger.error("fetch task exception:" + str(e))
  163. if __name__ == "__main__":
  164. peewee_logger = logging.getLogger('peewee')
  165. peewee_logger.propagate = False
  166. peewee_logger.addHandler(database_logger.handlers[0])
  167. peewee_logger.setLevel(database_logger.level)
  168. # init db
  169. init_web_db()
  170. init_web_data()
  171. while True:
  172. dispatch()
  173. time.sleep(1)
  174. update_progress()