Вы не можете выбрать более 25 тем Темы должны начинаться с буквы или цифры, могут содержать дефисы(-) и должны содержать не более 35 символов.

redis_conn.py 9.8KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311
  1. #
  2. # Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. import logging
  17. import json
  18. import time
  19. import uuid
  20. import valkey as redis
  21. from rag import settings
  22. from rag.utils import singleton
  23. class Payload:
  24. def __init__(self, consumer, queue_name, group_name, msg_id, message):
  25. self.__consumer = consumer
  26. self.__queue_name = queue_name
  27. self.__group_name = group_name
  28. self.__msg_id = msg_id
  29. self.__message = json.loads(message["message"])
  30. def ack(self):
  31. try:
  32. self.__consumer.xack(self.__queue_name, self.__group_name, self.__msg_id)
  33. return True
  34. except Exception as e:
  35. logging.warning("[EXCEPTION]ack" + str(self.__queue_name) + "||" + str(e))
  36. return False
  37. def get_message(self):
  38. return self.__message
  39. @singleton
  40. class RedisDB:
  41. def __init__(self):
  42. self.REDIS = None
  43. self.config = settings.REDIS
  44. self.__open__()
  45. def __open__(self):
  46. try:
  47. self.REDIS = redis.StrictRedis(
  48. host=self.config["host"].split(":")[0],
  49. port=int(self.config.get("host", ":6379").split(":")[1]),
  50. db=int(self.config.get("db", 1)),
  51. password=self.config.get("password"),
  52. decode_responses=True,
  53. )
  54. except Exception:
  55. logging.warning("Redis can't be connected.")
  56. return self.REDIS
  57. def health(self):
  58. self.REDIS.ping()
  59. a, b = "xx", "yy"
  60. self.REDIS.set(a, b, 3)
  61. if self.REDIS.get(a) == b:
  62. return True
  63. def is_alive(self):
  64. return self.REDIS is not None
  65. def exist(self, k):
  66. if not self.REDIS:
  67. return
  68. try:
  69. return self.REDIS.exists(k)
  70. except Exception as e:
  71. logging.warning("RedisDB.exist " + str(k) + " got exception: " + str(e))
  72. self.__open__()
  73. def get(self, k):
  74. if not self.REDIS:
  75. return
  76. try:
  77. return self.REDIS.get(k)
  78. except Exception as e:
  79. logging.warning("RedisDB.get " + str(k) + " got exception: " + str(e))
  80. self.__open__()
  81. def set_obj(self, k, obj, exp=3600):
  82. try:
  83. self.REDIS.set(k, json.dumps(obj, ensure_ascii=False), exp)
  84. return True
  85. except Exception as e:
  86. logging.warning("RedisDB.set_obj " + str(k) + " got exception: " + str(e))
  87. self.__open__()
  88. return False
  89. def set(self, k, v, exp=3600):
  90. try:
  91. self.REDIS.set(k, v, exp)
  92. return True
  93. except Exception as e:
  94. logging.warning("RedisDB.set " + str(k) + " got exception: " + str(e))
  95. self.__open__()
  96. return False
  97. def sadd(self, key: str, member: str):
  98. try:
  99. self.REDIS.sadd(key, member)
  100. return True
  101. except Exception as e:
  102. logging.warning("RedisDB.sadd " + str(key) + " got exception: " + str(e))
  103. self.__open__()
  104. return False
  105. def srem(self, key: str, member: str):
  106. try:
  107. self.REDIS.srem(key, member)
  108. return True
  109. except Exception as e:
  110. logging.warning("RedisDB.srem " + str(key) + " got exception: " + str(e))
  111. self.__open__()
  112. return False
  113. def smembers(self, key: str):
  114. try:
  115. res = self.REDIS.smembers(key)
  116. return res
  117. except Exception as e:
  118. logging.warning(
  119. "RedisDB.smembers " + str(key) + " got exception: " + str(e)
  120. )
  121. self.__open__()
  122. return None
  123. def zadd(self, key: str, member: str, score: float):
  124. try:
  125. self.REDIS.zadd(key, {member: score})
  126. return True
  127. except Exception as e:
  128. logging.warning("RedisDB.zadd " + str(key) + " got exception: " + str(e))
  129. self.__open__()
  130. return False
  131. def zcount(self, key: str, min: float, max: float):
  132. try:
  133. res = self.REDIS.zcount(key, min, max)
  134. return res
  135. except Exception as e:
  136. logging.warning("RedisDB.zcount " + str(key) + " got exception: " + str(e))
  137. self.__open__()
  138. return 0
  139. def zpopmin(self, key: str, count: int):
  140. try:
  141. res = self.REDIS.zpopmin(key, count)
  142. return res
  143. except Exception as e:
  144. logging.warning("RedisDB.zpopmin " + str(key) + " got exception: " + str(e))
  145. self.__open__()
  146. return None
  147. def zrangebyscore(self, key: str, min: float, max: float):
  148. try:
  149. res = self.REDIS.zrangebyscore(key, min, max)
  150. return res
  151. except Exception as e:
  152. logging.warning(
  153. "RedisDB.zrangebyscore " + str(key) + " got exception: " + str(e)
  154. )
  155. self.__open__()
  156. return None
  157. def transaction(self, key, value, exp=3600):
  158. try:
  159. pipeline = self.REDIS.pipeline(transaction=True)
  160. pipeline.set(key, value, exp, nx=True)
  161. pipeline.execute()
  162. return True
  163. except Exception as e:
  164. logging.warning(
  165. "RedisDB.transaction " + str(key) + " got exception: " + str(e)
  166. )
  167. self.__open__()
  168. return False
  169. def queue_product(self, queue, message, exp=settings.SVR_QUEUE_RETENTION) -> bool:
  170. for _ in range(3):
  171. try:
  172. payload = {"message": json.dumps(message)}
  173. pipeline = self.REDIS.pipeline()
  174. pipeline.xadd(queue, payload)
  175. # pipeline.expire(queue, exp)
  176. pipeline.execute()
  177. return True
  178. except Exception as e:
  179. logging.exception(
  180. "RedisDB.queue_product " + str(queue) + " got exception: " + str(e)
  181. )
  182. return False
  183. def queue_consumer(
  184. self, queue_name, group_name, consumer_name, msg_id=b">"
  185. ) -> Payload:
  186. try:
  187. group_info = self.REDIS.xinfo_groups(queue_name)
  188. if not any(e["name"] == group_name for e in group_info):
  189. self.REDIS.xgroup_create(queue_name, group_name, id="0", mkstream=True)
  190. args = {
  191. "groupname": group_name,
  192. "consumername": consumer_name,
  193. "count": 1,
  194. "block": 10000,
  195. "streams": {queue_name: msg_id},
  196. }
  197. messages = self.REDIS.xreadgroup(**args)
  198. if not messages:
  199. return None
  200. stream, element_list = messages[0]
  201. msg_id, payload = element_list[0]
  202. res = Payload(self.REDIS, queue_name, group_name, msg_id, payload)
  203. return res
  204. except Exception as e:
  205. if "key" in str(e):
  206. pass
  207. else:
  208. logging.exception(
  209. "RedisDB.queue_consumer "
  210. + str(queue_name)
  211. + " got exception: "
  212. + str(e)
  213. )
  214. return None
  215. def get_unacked_for(self, consumer_name, queue_name, group_name):
  216. try:
  217. group_info = self.REDIS.xinfo_groups(queue_name)
  218. if not any(e["name"] == group_name for e in group_info):
  219. return
  220. pendings = self.REDIS.xpending_range(
  221. queue_name,
  222. group_name,
  223. min=0,
  224. max=10000000000000,
  225. count=1,
  226. consumername=consumer_name,
  227. )
  228. if not pendings:
  229. return
  230. msg_id = pendings[0]["message_id"]
  231. msg = self.REDIS.xrange(queue_name, min=msg_id, count=1)
  232. _, payload = msg[0]
  233. return Payload(self.REDIS, queue_name, group_name, msg_id, payload)
  234. except Exception as e:
  235. if "key" in str(e):
  236. return
  237. logging.exception(
  238. "RedisDB.get_unacked_for " + consumer_name + " got exception: " + str(e)
  239. )
  240. self.__open__()
  241. def queue_info(self, queue, group_name) -> dict | None:
  242. try:
  243. groups = self.REDIS.xinfo_groups(queue)
  244. for group in groups:
  245. if group["name"] == group_name:
  246. return group
  247. except Exception as e:
  248. logging.warning(
  249. "RedisDB.queue_info " + str(queue) + " got exception: " + str(e)
  250. )
  251. return None
  252. REDIS_CONN = RedisDB()
  253. class RedisDistributedLock:
  254. def __init__(self, lock_key, timeout=10):
  255. self.lock_key = lock_key
  256. self.lock_value = str(uuid.uuid4())
  257. self.timeout = timeout
  258. @staticmethod
  259. def clean_lock(lock_key):
  260. REDIS_CONN.REDIS.delete(lock_key)
  261. def acquire_lock(self):
  262. end_time = time.time() + self.timeout
  263. while time.time() < end_time:
  264. if REDIS_CONN.REDIS.setnx(self.lock_key, self.lock_value):
  265. return True
  266. time.sleep(1)
  267. return False
  268. def release_lock(self):
  269. if REDIS_CONN.REDIS.get(self.lock_key) == self.lock_value:
  270. REDIS_CONN.REDIS.delete(self.lock_key)
  271. def __enter__(self):
  272. self.acquire_lock()
  273. def __exit__(self, exception_type, exception_value, exception_traceback):
  274. self.release_lock()