Вы не можете выбрать более 25 тем Темы должны начинаться с буквы или цифры, могут содержать дефисы(-) и должны содержать не более 35 символов.

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440
  1. import re
  2. import json
  3. import time
  4. import copy
  5. import elasticsearch
  6. from elasticsearch import Elasticsearch
  7. from elasticsearch_dsl import UpdateByQuery, Search, Index
  8. from rag.settings import es_logger
  9. from rag import settings
  10. from rag.utils import singleton
  11. es_logger.info("Elasticsearch version: "+ str(elasticsearch.__version__))
  12. @singleton
  13. class HuEs:
  14. def __init__(self):
  15. self.info = {}
  16. self.conn()
  17. self.idxnm = settings.ES.get("index_name", "")
  18. if not self.es.ping():
  19. raise Exception("Can't connect to ES cluster")
  20. def conn(self):
  21. for _ in range(10):
  22. try:
  23. self.es = Elasticsearch(
  24. settings.ES["hosts"].split(","),
  25. timeout=600
  26. )
  27. if self.es:
  28. self.info = self.es.info()
  29. es_logger.info("Connect to es.")
  30. break
  31. except Exception as e:
  32. es_logger.error("Fail to connect to es: " + str(e))
  33. time.sleep(1)
  34. def version(self):
  35. v = self.info.get("version", {"number": "5.6"})
  36. v = v["number"].split(".")[0]
  37. return int(v) >= 7
  38. def upsert(self, df, idxnm=""):
  39. res = []
  40. for d in df:
  41. id = d["id"]
  42. del d["id"]
  43. d = {"doc": d, "doc_as_upsert": "true"}
  44. T = False
  45. for _ in range(10):
  46. try:
  47. if not self.version():
  48. r = self.es.update(
  49. index=(
  50. self.idxnm if not idxnm else idxnm),
  51. body=d,
  52. id=id,
  53. doc_type="doc",
  54. refresh=False,
  55. retry_on_conflict=100)
  56. else:
  57. r = self.es.update(
  58. index=(
  59. self.idxnm if not idxnm else idxnm),
  60. body=d,
  61. id=id,
  62. refresh=False,
  63. retry_on_conflict=100)
  64. es_logger.info("Successfully upsert: %s" % id)
  65. T = True
  66. break
  67. except Exception as e:
  68. es_logger.warning("Fail to index: " +
  69. json.dumps(d, ensure_ascii=False) + str(e))
  70. if re.search(r"(Timeout|time out)", str(e), re.IGNORECASE):
  71. time.sleep(3)
  72. continue
  73. self.conn()
  74. T = False
  75. if not T:
  76. res.append(d)
  77. es_logger.error(
  78. "Fail to index: " +
  79. re.sub(
  80. "[\r\n]",
  81. "",
  82. json.dumps(
  83. d,
  84. ensure_ascii=False)))
  85. d["id"] = id
  86. d["_index"] = self.idxnm
  87. if not res:
  88. return True
  89. return False
  90. def bulk(self, df, idx_nm=None):
  91. ids, acts = {}, []
  92. for d in df:
  93. id = d["id"] if "id" in d else d["_id"]
  94. ids[id] = copy.deepcopy(d)
  95. ids[id]["_index"] = self.idxnm if not idx_nm else idx_nm
  96. if "id" in d:
  97. del d["id"]
  98. if "_id" in d:
  99. del d["_id"]
  100. acts.append(
  101. {"update": {"_id": id, "_index": ids[id]["_index"]}, "retry_on_conflict": 100})
  102. acts.append({"doc": d, "doc_as_upsert": "true"})
  103. res = []
  104. for _ in range(100):
  105. try:
  106. if elasticsearch.__version__[0] < 8:
  107. r = self.es.bulk(
  108. index=(
  109. self.idxnm if not idx_nm else idx_nm),
  110. body=acts,
  111. refresh=False,
  112. timeout="600s")
  113. else:
  114. r = self.es.bulk(index=(self.idxnm if not idx_nm else
  115. idx_nm), operations=acts,
  116. refresh=False, timeout="600s")
  117. if re.search(r"False", str(r["errors"]), re.IGNORECASE):
  118. return res
  119. for it in r["items"]:
  120. if "error" in it["update"]:
  121. res.append(str(it["update"]["_id"]) +
  122. ":" + str(it["update"]["error"]))
  123. return res
  124. except Exception as e:
  125. es_logger.warn("Fail to bulk: " + str(e))
  126. if re.search(r"(Timeout|time out)", str(e), re.IGNORECASE):
  127. time.sleep(3)
  128. continue
  129. self.conn()
  130. return res
  131. def bulk4script(self, df):
  132. ids, acts = {}, []
  133. for d in df:
  134. id = d["id"]
  135. ids[id] = copy.deepcopy(d["raw"])
  136. acts.append({"update": {"_id": id, "_index": self.idxnm}})
  137. acts.append(d["script"])
  138. es_logger.info("bulk upsert: %s" % id)
  139. res = []
  140. for _ in range(10):
  141. try:
  142. if not self.version():
  143. r = self.es.bulk(
  144. index=self.idxnm,
  145. body=acts,
  146. refresh=False,
  147. timeout="600s",
  148. doc_type="doc")
  149. else:
  150. r = self.es.bulk(
  151. index=self.idxnm,
  152. body=acts,
  153. refresh=False,
  154. timeout="600s")
  155. if re.search(r"False", str(r["errors"]), re.IGNORECASE):
  156. return res
  157. for it in r["items"]:
  158. if "error" in it["update"]:
  159. res.append(str(it["update"]["_id"]))
  160. return res
  161. except Exception as e:
  162. es_logger.warning("Fail to bulk: " + str(e))
  163. if re.search(r"(Timeout|time out)", str(e), re.IGNORECASE):
  164. time.sleep(3)
  165. continue
  166. self.conn()
  167. return res
  168. def rm(self, d):
  169. for _ in range(10):
  170. try:
  171. if not self.version():
  172. r = self.es.delete(
  173. index=self.idxnm,
  174. id=d["id"],
  175. doc_type="doc",
  176. refresh=True)
  177. else:
  178. r = self.es.delete(
  179. index=self.idxnm,
  180. id=d["id"],
  181. refresh=True,
  182. doc_type="_doc")
  183. es_logger.info("Remove %s" % d["id"])
  184. return True
  185. except Exception as e:
  186. es_logger.warn("Fail to delete: " + str(d) + str(e))
  187. if re.search(r"(Timeout|time out)", str(e), re.IGNORECASE):
  188. time.sleep(3)
  189. continue
  190. if re.search(r"(not_found)", str(e), re.IGNORECASE):
  191. return True
  192. self.conn()
  193. es_logger.error("Fail to delete: " + str(d))
  194. return False
  195. def search(self, q, idxnm=None, src=False, timeout="2s"):
  196. if not isinstance(q, dict):
  197. q = Search().query(q).to_dict()
  198. for i in range(3):
  199. try:
  200. res = self.es.search(index=(self.idxnm if not idxnm else idxnm),
  201. body=q,
  202. timeout=timeout,
  203. # search_type="dfs_query_then_fetch",
  204. track_total_hits=True,
  205. _source=src)
  206. if str(res.get("timed_out", "")).lower() == "true":
  207. raise Exception("Es Timeout.")
  208. return res
  209. except Exception as e:
  210. es_logger.error(
  211. "ES search exception: " +
  212. str(e) +
  213. "【Q】:" +
  214. str(q))
  215. if str(e).find("Timeout") > 0:
  216. continue
  217. raise e
  218. es_logger.error("ES search timeout for 3 times!")
  219. raise Exception("ES search timeout.")
  220. def get(self, doc_id, idxnm=None):
  221. for i in range(3):
  222. try:
  223. res = self.es.get(index=(self.idxnm if not idxnm else idxnm),
  224. id=doc_id)
  225. if str(res.get("timed_out", "")).lower() == "true":
  226. raise Exception("Es Timeout.")
  227. return res
  228. except Exception as e:
  229. es_logger.error(
  230. "ES get exception: " +
  231. str(e) +
  232. "【Q】:" +
  233. doc_id)
  234. if str(e).find("Timeout") > 0:
  235. continue
  236. raise e
  237. es_logger.error("ES search timeout for 3 times!")
  238. raise Exception("ES search timeout.")
  239. def updateByQuery(self, q, d):
  240. ubq = UpdateByQuery(index=self.idxnm).using(self.es).query(q)
  241. scripts = ""
  242. for k, v in d.items():
  243. scripts += "ctx._source.%s = params.%s;" % (str(k), str(k))
  244. ubq = ubq.script(source=scripts, params=d)
  245. ubq = ubq.params(refresh=False)
  246. ubq = ubq.params(slices=5)
  247. ubq = ubq.params(conflicts="proceed")
  248. for i in range(3):
  249. try:
  250. r = ubq.execute()
  251. return True
  252. except Exception as e:
  253. es_logger.error("ES updateByQuery exception: " +
  254. str(e) + "【Q】:" + str(q.to_dict()))
  255. if str(e).find("Timeout") > 0 or str(e).find("Conflict") > 0:
  256. continue
  257. self.conn()
  258. return False
  259. def updateScriptByQuery(self, q, scripts, idxnm=None):
  260. ubq = UpdateByQuery(
  261. index=self.idxnm if not idxnm else idxnm).using(
  262. self.es).query(q)
  263. ubq = ubq.script(source=scripts)
  264. ubq = ubq.params(refresh=True)
  265. ubq = ubq.params(slices=5)
  266. ubq = ubq.params(conflicts="proceed")
  267. for i in range(3):
  268. try:
  269. r = ubq.execute()
  270. return True
  271. except Exception as e:
  272. es_logger.error("ES updateByQuery exception: " +
  273. str(e) + "【Q】:" + str(q.to_dict()))
  274. if str(e).find("Timeout") > 0 or str(e).find("Conflict") > 0:
  275. continue
  276. self.conn()
  277. return False
  278. def deleteByQuery(self, query, idxnm=""):
  279. for i in range(3):
  280. try:
  281. r = self.es.delete_by_query(
  282. index=idxnm if idxnm else self.idxnm,
  283. body=Search().query(query).to_dict())
  284. return True
  285. except Exception as e:
  286. es_logger.error("ES updateByQuery deleteByQuery: " +
  287. str(e) + "【Q】:" + str(query.to_dict()))
  288. if str(e).find("NotFoundError") > 0: return True
  289. if str(e).find("Timeout") > 0 or str(e).find("Conflict") > 0:
  290. continue
  291. return False
  292. def update(self, id, script, routing=None):
  293. for i in range(3):
  294. try:
  295. if not self.version():
  296. r = self.es.update(
  297. index=self.idxnm,
  298. id=id,
  299. body=json.dumps(
  300. script,
  301. ensure_ascii=False),
  302. doc_type="doc",
  303. routing=routing,
  304. refresh=False)
  305. else:
  306. r = self.es.update(index=self.idxnm, id=id, body=json.dumps(script, ensure_ascii=False),
  307. routing=routing, refresh=False) # , doc_type="_doc")
  308. return True
  309. except Exception as e:
  310. es_logger.error(
  311. "ES update exception: " + str(e) + " id:" + str(id) + ", version:" + str(self.version()) +
  312. json.dumps(script, ensure_ascii=False))
  313. if str(e).find("Timeout") > 0:
  314. continue
  315. return False
  316. def indexExist(self, idxnm):
  317. s = Index(idxnm if idxnm else self.idxnm, self.es)
  318. for i in range(3):
  319. try:
  320. return s.exists()
  321. except Exception as e:
  322. es_logger.error("ES updateByQuery indexExist: " + str(e))
  323. if str(e).find("Timeout") > 0 or str(e).find("Conflict") > 0:
  324. continue
  325. return False
  326. def docExist(self, docid, idxnm=None):
  327. for i in range(3):
  328. try:
  329. return self.es.exists(index=(idxnm if idxnm else self.idxnm),
  330. id=docid)
  331. except Exception as e:
  332. es_logger.error("ES Doc Exist: " + str(e))
  333. if str(e).find("Timeout") > 0 or str(e).find("Conflict") > 0:
  334. continue
  335. return False
  336. def createIdx(self, idxnm, mapping):
  337. try:
  338. if elasticsearch.__version__[0] < 8:
  339. return self.es.indices.create(idxnm, body=mapping)
  340. from elasticsearch.client import IndicesClient
  341. return IndicesClient(self.es).create(index=idxnm,
  342. settings=mapping["settings"],
  343. mappings=mapping["mappings"])
  344. except Exception as e:
  345. es_logger.error("ES create index error %s ----%s" % (idxnm, str(e)))
  346. def deleteIdx(self, idxnm):
  347. try:
  348. return self.es.indices.delete(idxnm, allow_no_indices=True)
  349. except Exception as e:
  350. es_logger.error("ES delete index error %s ----%s" % (idxnm, str(e)))
  351. def getTotal(self, res):
  352. if isinstance(res["hits"]["total"], type({})):
  353. return res["hits"]["total"]["value"]
  354. return res["hits"]["total"]
  355. def getDocIds(self, res):
  356. return [d["_id"] for d in res["hits"]["hits"]]
  357. def getSource(self, res):
  358. rr = []
  359. for d in res["hits"]["hits"]:
  360. d["_source"]["id"] = d["_id"]
  361. d["_source"]["_score"] = d["_score"]
  362. rr.append(d["_source"])
  363. return rr
  364. def scrollIter(self, pagesize=100, scroll_time='2m', q={
  365. "query": {"match_all": {}}, "sort": [{"updated_at": {"order": "desc"}}]}):
  366. for _ in range(100):
  367. try:
  368. page = self.es.search(
  369. index=self.idxnm,
  370. scroll=scroll_time,
  371. size=pagesize,
  372. body=q,
  373. _source=None
  374. )
  375. break
  376. except Exception as e:
  377. es_logger.error("ES scrolling fail. " + str(e))
  378. time.sleep(3)
  379. sid = page['_scroll_id']
  380. scroll_size = page['hits']['total']["value"]
  381. es_logger.info("[TOTAL]%d" % scroll_size)
  382. # Start scrolling
  383. while scroll_size > 0:
  384. yield page["hits"]["hits"]
  385. for _ in range(100):
  386. try:
  387. page = self.es.scroll(scroll_id=sid, scroll=scroll_time)
  388. break
  389. except Exception as e:
  390. es_logger.error("ES scrolling fail. " + str(e))
  391. time.sleep(3)
  392. # Update the scroll ID
  393. sid = page['_scroll_id']
  394. # Get the number of results that we returned in the last scroll
  395. scroll_size = len(page['hits']['hits'])
  396. ELASTICSEARCH = HuEs()