You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477
  1. import logging
  2. import re
  3. import json
  4. import time
  5. import os
  6. import copy
  7. from elasticsearch import Elasticsearch, NotFoundError
  8. from elasticsearch_dsl import UpdateByQuery, Q, Search, Index
  9. from elastic_transport import ConnectionTimeout
  10. from rag import settings
  11. from rag.utils import singleton
  12. from api.utils.file_utils import get_project_base_directory
  13. import polars as pl
  14. from rag.utils.doc_store_conn import DocStoreConnection, MatchExpr, OrderByExpr, MatchTextExpr, MatchDenseExpr, \
  15. FusionExpr
  16. from rag.nlp import is_english, rag_tokenizer
  17. ATTEMPT_TIME = 2
  18. logger = logging.getLogger('ragflow.es_conn')
  19. @singleton
  20. class ESConnection(DocStoreConnection):
  21. def __init__(self):
  22. self.info = {}
  23. logger.info(f"Use Elasticsearch {settings.ES['hosts']} as the doc engine.")
  24. for _ in range(ATTEMPT_TIME):
  25. try:
  26. self.es = Elasticsearch(
  27. settings.ES["hosts"].split(","),
  28. basic_auth=(settings.ES["username"], settings.ES[
  29. "password"]) if "username" in settings.ES and "password" in settings.ES else None,
  30. verify_certs=False,
  31. timeout=600
  32. )
  33. if self.es:
  34. self.info = self.es.info()
  35. break
  36. except Exception as e:
  37. logger.warning(f"{str(e)}. Waiting Elasticsearch {settings.ES['hosts']} to be healthy.")
  38. time.sleep(5)
  39. if not self.es.ping():
  40. msg = f"Elasticsearch {settings.ES['hosts']} didn't become healthy in 120s."
  41. logger.error(msg)
  42. raise Exception(msg)
  43. v = self.info.get("version", {"number": "8.11.3"})
  44. v = v["number"].split(".")[0]
  45. if int(v) < 8:
  46. msg = f"Elasticsearch version must be greater than or equal to 8, current version: {v}"
  47. logger.error(msg)
  48. raise Exception(msg)
  49. fp_mapping = os.path.join(get_project_base_directory(), "conf", "mapping.json")
  50. if not os.path.exists(fp_mapping):
  51. msg = f"Elasticsearch mapping file not found at {fp_mapping}"
  52. logger.error(msg)
  53. raise Exception(msg)
  54. self.mapping = json.load(open(fp_mapping, "r"))
  55. logger.info(f"Elasticsearch {settings.ES['hosts']} is healthy.")
  56. """
  57. Database operations
  58. """
  59. def dbType(self) -> str:
  60. return "elasticsearch"
  61. def health(self) -> dict:
  62. health_dict = dict(self.es.cluster.health())
  63. health_dict["type"] = "elasticsearch"
  64. return health_dict
  65. """
  66. Table operations
  67. """
  68. def createIdx(self, indexName: str, knowledgebaseId: str, vectorSize: int):
  69. if self.indexExist(indexName, knowledgebaseId):
  70. return True
  71. try:
  72. from elasticsearch.client import IndicesClient
  73. return IndicesClient(self.es).create(index=indexName,
  74. settings=self.mapping["settings"],
  75. mappings=self.mapping["mappings"])
  76. except Exception:
  77. logger.exception("ESConnection.createIndex error %s" % (indexName))
  78. def deleteIdx(self, indexName: str, knowledgebaseId: str):
  79. if len(knowledgebaseId) > 0:
  80. # The index need to be alive after any kb deletion since all kb under this tenant are in one index.
  81. return
  82. try:
  83. self.es.indices.delete(index=indexName, allow_no_indices=True)
  84. except NotFoundError:
  85. pass
  86. except Exception:
  87. logger.exception("ESConnection.deleteIdx error %s" % (indexName))
  88. def indexExist(self, indexName: str, knowledgebaseId: str) -> bool:
  89. s = Index(indexName, self.es)
  90. for i in range(ATTEMPT_TIME):
  91. try:
  92. return s.exists()
  93. except Exception as e:
  94. logger.exception("ESConnection.indexExist got exception")
  95. if str(e).find("Timeout") > 0 or str(e).find("Conflict") > 0:
  96. continue
  97. return False
  98. """
  99. CRUD operations
  100. """
  101. def search(self, selectFields: list[str], highlightFields: list[str], condition: dict, matchExprs: list[MatchExpr],
  102. orderBy: OrderByExpr, offset: int, limit: int, indexNames: str | list[str],
  103. knowledgebaseIds: list[str]) -> list[dict] | pl.DataFrame:
  104. """
  105. Refers to https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl.html
  106. """
  107. if isinstance(indexNames, str):
  108. indexNames = indexNames.split(",")
  109. assert isinstance(indexNames, list) and len(indexNames) > 0
  110. assert "_id" not in condition
  111. bqry = Q("bool", must=[])
  112. condition["kb_id"] = knowledgebaseIds
  113. for k, v in condition.items():
  114. if k == "available_int":
  115. if v == 0:
  116. bqry.filter.append(Q("range", available_int={"lt": 1}))
  117. else:
  118. bqry.filter.append(
  119. Q("bool", must_not=Q("range", available_int={"lt": 1})))
  120. continue
  121. if not v:
  122. continue
  123. if isinstance(v, list):
  124. bqry.filter.append(Q("terms", **{k: v}))
  125. elif isinstance(v, str) or isinstance(v, int):
  126. bqry.filter.append(Q("term", **{k: v}))
  127. else:
  128. raise Exception(
  129. f"Condition `{str(k)}={str(v)}` value type is {str(type(v))}, expected to be int, str or list.")
  130. s = Search()
  131. vector_similarity_weight = 0.5
  132. for m in matchExprs:
  133. if isinstance(m, FusionExpr) and m.method == "weighted_sum" and "weights" in m.fusion_params:
  134. assert len(matchExprs) == 3 and isinstance(matchExprs[0], MatchTextExpr) and isinstance(matchExprs[1],
  135. MatchDenseExpr) and isinstance(
  136. matchExprs[2], FusionExpr)
  137. weights = m.fusion_params["weights"]
  138. vector_similarity_weight = float(weights.split(",")[1])
  139. for m in matchExprs:
  140. if isinstance(m, MatchTextExpr):
  141. minimum_should_match = m.extra_options.get("minimum_should_match", 0.0)
  142. if isinstance(minimum_should_match, float):
  143. minimum_should_match = str(int(minimum_should_match * 100)) + "%"
  144. bqry.must.append(Q("query_string", fields=m.fields,
  145. type="best_fields", query=m.matching_text,
  146. minimum_should_match=minimum_should_match,
  147. boost=1))
  148. bqry.boost = 1.0 - vector_similarity_weight
  149. elif isinstance(m, MatchDenseExpr):
  150. assert (bqry is not None)
  151. similarity = 0.0
  152. if "similarity" in m.extra_options:
  153. similarity = m.extra_options["similarity"]
  154. s = s.knn(m.vector_column_name,
  155. m.topn,
  156. m.topn * 2,
  157. query_vector=list(m.embedding_data),
  158. filter=bqry.to_dict(),
  159. similarity=similarity,
  160. )
  161. if bqry:
  162. bqry.should.append(Q("rank_feature", field="pagerank_fea", linear={}, boost=10))
  163. s = s.query(bqry)
  164. for field in highlightFields:
  165. s = s.highlight(field)
  166. if orderBy:
  167. orders = list()
  168. for field, order in orderBy.fields:
  169. order = "asc" if order == 0 else "desc"
  170. orders.append({field: {"order": order, "unmapped_type": "float",
  171. "mode": "avg", "numeric_type": "double"}})
  172. s = s.sort(*orders)
  173. if limit > 0:
  174. s = s[offset:limit]
  175. q = s.to_dict()
  176. logger.debug(f"ESConnection.search {str(indexNames)} query: " + json.dumps(q))
  177. for i in range(ATTEMPT_TIME):
  178. try:
  179. res = self.es.search(index=indexNames,
  180. body=q,
  181. timeout="600s",
  182. # search_type="dfs_query_then_fetch",
  183. track_total_hits=True,
  184. _source=True)
  185. if str(res.get("timed_out", "")).lower() == "true":
  186. raise Exception("Es Timeout.")
  187. logger.debug(f"ESConnection.search {str(indexNames)} res: " + str(res))
  188. return res
  189. except Exception as e:
  190. logger.exception(f"ESConnection.search {str(indexNames)} query: " + str(q))
  191. if str(e).find("Timeout") > 0:
  192. continue
  193. raise e
  194. logger.error("ESConnection.search timeout for 3 times!")
  195. raise Exception("ESConnection.search timeout.")
  196. def get(self, chunkId: str, indexName: str, knowledgebaseIds: list[str]) -> dict | None:
  197. for i in range(ATTEMPT_TIME):
  198. try:
  199. res = self.es.get(index=(indexName),
  200. id=chunkId, source=True, )
  201. if str(res.get("timed_out", "")).lower() == "true":
  202. raise Exception("Es Timeout.")
  203. chunk = res["_source"]
  204. chunk["id"] = chunkId
  205. return chunk
  206. except NotFoundError:
  207. return None
  208. except Exception as e:
  209. logger.exception(f"ESConnection.get({chunkId}) got exception")
  210. if str(e).find("Timeout") > 0:
  211. continue
  212. raise e
  213. logger.error("ESConnection.get timeout for 3 times!")
  214. raise Exception("ESConnection.get timeout.")
  215. def insert(self, documents: list[dict], indexName: str, knowledgebaseId: str) -> list[str]:
  216. # Refers to https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html
  217. operations = []
  218. for d in documents:
  219. assert "_id" not in d
  220. assert "id" in d
  221. d_copy = copy.deepcopy(d)
  222. meta_id = d_copy.pop("id", "")
  223. operations.append(
  224. {"index": {"_index": indexName, "_id": meta_id}})
  225. operations.append(d_copy)
  226. res = []
  227. for _ in range(ATTEMPT_TIME):
  228. try:
  229. res = []
  230. r = self.es.bulk(index=(indexName), operations=operations,
  231. refresh=False, timeout="60s")
  232. if re.search(r"False", str(r["errors"]), re.IGNORECASE):
  233. return res
  234. for item in r["items"]:
  235. for action in ["create", "delete", "index", "update"]:
  236. if action in item and "error" in item[action]:
  237. res.append(str(item[action]["_id"]) + ":" + str(item[action]["error"]))
  238. return res
  239. except Exception as e:
  240. res.append(str(e))
  241. logger.warning("ESConnection.insert got exception: " + str(e))
  242. res = []
  243. if re.search(r"(Timeout|time out)", str(e), re.IGNORECASE):
  244. res.append(str(e))
  245. time.sleep(3)
  246. continue
  247. return res
  248. def update(self, condition: dict, newValue: dict, indexName: str, knowledgebaseId: str) -> bool:
  249. doc = copy.deepcopy(newValue)
  250. doc.pop("id", None)
  251. if "id" in condition and isinstance(condition["id"], str):
  252. # update specific single document
  253. chunkId = condition["id"]
  254. for i in range(ATTEMPT_TIME):
  255. try:
  256. self.es.update(index=indexName, id=chunkId, doc=doc)
  257. return True
  258. except Exception as e:
  259. logger.exception(
  260. f"ESConnection.update(index={indexName}, id={id}, doc={json.dumps(condition, ensure_ascii=False)}) got exception")
  261. if str(e).find("Timeout") > 0:
  262. continue
  263. return False
  264. else:
  265. # update unspecific maybe-multiple documents
  266. bqry = Q("bool")
  267. for k, v in condition.items():
  268. if not isinstance(k, str) or not v:
  269. continue
  270. if k == "exist":
  271. bqry.filter.append(Q("exists", field=v))
  272. continue
  273. if isinstance(v, list):
  274. bqry.filter.append(Q("terms", **{k: v}))
  275. elif isinstance(v, str) or isinstance(v, int):
  276. bqry.filter.append(Q("term", **{k: v}))
  277. else:
  278. raise Exception(
  279. f"Condition `{str(k)}={str(v)}` value type is {str(type(v))}, expected to be int, str or list.")
  280. scripts = []
  281. for k, v in newValue.items():
  282. if k == "remove":
  283. scripts.append(f"ctx._source.remove('{v}');")
  284. continue
  285. if (not isinstance(k, str) or not v) and k != "available_int":
  286. continue
  287. if isinstance(v, str):
  288. scripts.append(f"ctx._source.{k} = '{v}'")
  289. elif isinstance(v, int):
  290. scripts.append(f"ctx._source.{k} = {v}")
  291. else:
  292. raise Exception(
  293. f"newValue `{str(k)}={str(v)}` value type is {str(type(v))}, expected to be int, str.")
  294. ubq = UpdateByQuery(
  295. index=indexName).using(
  296. self.es).query(bqry)
  297. ubq = ubq.script(source="; ".join(scripts))
  298. ubq = ubq.params(refresh=True)
  299. ubq = ubq.params(slices=5)
  300. ubq = ubq.params(conflicts="proceed")
  301. for i in range(3):
  302. try:
  303. _ = ubq.execute()
  304. return True
  305. except Exception as e:
  306. logger.error("ESConnection.update got exception: " + str(e))
  307. if str(e).find("Timeout") > 0 or str(e).find("Conflict") > 0:
  308. continue
  309. return False
  310. def delete(self, condition: dict, indexName: str, knowledgebaseId: str) -> int:
  311. qry = None
  312. assert "_id" not in condition
  313. if "id" in condition:
  314. chunk_ids = condition["id"]
  315. if not isinstance(chunk_ids, list):
  316. chunk_ids = [chunk_ids]
  317. qry = Q("ids", values=chunk_ids)
  318. else:
  319. qry = Q("bool")
  320. for k, v in condition.items():
  321. if isinstance(v, list):
  322. qry.must.append(Q("terms", **{k: v}))
  323. elif isinstance(v, str) or isinstance(v, int):
  324. qry.must.append(Q("term", **{k: v}))
  325. else:
  326. raise Exception("Condition value must be int, str or list.")
  327. logger.debug("ESConnection.delete query: " + json.dumps(qry.to_dict()))
  328. for _ in range(ATTEMPT_TIME):
  329. try:
  330. res = self.es.delete_by_query(
  331. index=indexName,
  332. body=Search().query(qry).to_dict(),
  333. refresh=True)
  334. return res["deleted"]
  335. except Exception as e:
  336. logger.warning("ESConnection.delete got exception: " + str(e))
  337. if re.search(r"(Timeout|time out)", str(e), re.IGNORECASE):
  338. time.sleep(3)
  339. continue
  340. if re.search(r"(not_found)", str(e), re.IGNORECASE):
  341. return 0
  342. return 0
  343. """
  344. Helper functions for search result
  345. """
  346. def getTotal(self, res):
  347. if isinstance(res["hits"]["total"], type({})):
  348. return res["hits"]["total"]["value"]
  349. return res["hits"]["total"]
  350. def getChunkIds(self, res):
  351. return [d["_id"] for d in res["hits"]["hits"]]
  352. def __getSource(self, res):
  353. rr = []
  354. for d in res["hits"]["hits"]:
  355. d["_source"]["id"] = d["_id"]
  356. d["_source"]["_score"] = d["_score"]
  357. rr.append(d["_source"])
  358. return rr
  359. def getFields(self, res, fields: list[str]) -> dict[str, dict]:
  360. res_fields = {}
  361. if not fields:
  362. return {}
  363. for d in self.__getSource(res):
  364. m = {n: d.get(n) for n in fields if d.get(n) is not None}
  365. for n, v in m.items():
  366. if isinstance(v, list):
  367. m[n] = v
  368. continue
  369. if not isinstance(v, str):
  370. m[n] = str(m[n])
  371. # if n.find("tks") > 0:
  372. # m[n] = rmSpace(m[n])
  373. if m:
  374. res_fields[d["id"]] = m
  375. return res_fields
  376. def getHighlight(self, res, keywords: list[str], fieldnm: str):
  377. ans = {}
  378. for d in res["hits"]["hits"]:
  379. hlts = d.get("highlight")
  380. if not hlts:
  381. continue
  382. txt = "...".join([a for a in list(hlts.items())[0][1]])
  383. if not is_english(txt.split()):
  384. ans[d["_id"]] = txt
  385. continue
  386. txt = d["_source"][fieldnm]
  387. txt = re.sub(r"[\r\n]", " ", txt, flags=re.IGNORECASE | re.MULTILINE)
  388. txts = []
  389. for t in re.split(r"[.?!;\n]", txt):
  390. for w in keywords:
  391. t = re.sub(r"(^|[ .?/'\"\(\)!,:;-])(%s)([ .?/'\"\(\)!,:;-])" % re.escape(w), r"\1<em>\2</em>\3", t,
  392. flags=re.IGNORECASE | re.MULTILINE)
  393. if not re.search(r"<em>[^<>]+</em>", t, flags=re.IGNORECASE | re.MULTILINE):
  394. continue
  395. txts.append(t)
  396. ans[d["_id"]] = "...".join(txts) if txts else "...".join([a for a in list(hlts.items())[0][1]])
  397. return ans
  398. def getAggregation(self, res, fieldnm: str):
  399. agg_field = "aggs_" + fieldnm
  400. if "aggregations" not in res or agg_field not in res["aggregations"]:
  401. return list()
  402. bkts = res["aggregations"][agg_field]["buckets"]
  403. return [(b["key"], b["doc_count"]) for b in bkts]
  404. """
  405. SQL
  406. """
  407. def sql(self, sql: str, fetch_size: int, format: str):
  408. logger.debug(f"ESConnection.sql get sql: {sql}")
  409. sql = re.sub(r"[ `]+", " ", sql)
  410. sql = sql.replace("%", "")
  411. replaces = []
  412. for r in re.finditer(r" ([a-z_]+_l?tks)( like | ?= ?)'([^']+)'", sql):
  413. fld, v = r.group(1), r.group(3)
  414. match = " MATCH({}, '{}', 'operator=OR;minimum_should_match=30%') ".format(
  415. fld, rag_tokenizer.fine_grained_tokenize(rag_tokenizer.tokenize(v)))
  416. replaces.append(
  417. ("{}{}'{}'".format(
  418. r.group(1),
  419. r.group(2),
  420. r.group(3)),
  421. match))
  422. for p, r in replaces:
  423. sql = sql.replace(p, r, 1)
  424. logger.debug(f"ESConnection.sql to es: {sql}")
  425. for i in range(ATTEMPT_TIME):
  426. try:
  427. res = self.es.sql.query(body={"query": sql, "fetch_size": fetch_size}, format=format,
  428. request_timeout="2s")
  429. return res
  430. except ConnectionTimeout:
  431. logger.exception("ESConnection.sql timeout")
  432. continue
  433. except Exception:
  434. logger.exception("ESConnection.sql got exception")
  435. return None
  436. logger.error("ESConnection.sql timeout for 3 times!")
  437. return None