Ви не можете вибрати більше 25 тем Теми мають розпочинатися з літери або цифри, можуть містити дефіси (-) і не повинні перевищувати 35 символів.

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581
  1. #
  2. # Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. import logging
  17. import re
  18. import json
  19. import time
  20. import os
  21. import copy
  22. from elasticsearch import Elasticsearch, NotFoundError
  23. from elasticsearch_dsl import UpdateByQuery, Q, Search, Index
  24. from elastic_transport import ConnectionTimeout
  25. from rag import settings
  26. from rag.settings import TAG_FLD, PAGERANK_FLD
  27. from rag.utils import singleton, get_float
  28. from api.utils.file_utils import get_project_base_directory
  29. from rag.utils.doc_store_conn import DocStoreConnection, MatchExpr, OrderByExpr, MatchTextExpr, MatchDenseExpr, \
  30. FusionExpr
  31. from rag.nlp import is_english, rag_tokenizer
  32. ATTEMPT_TIME = 2
  33. logger = logging.getLogger('ragflow.es_conn')
  34. @singleton
  35. class ESConnection(DocStoreConnection):
  36. def __init__(self):
  37. self.info = {}
  38. logger.info(f"Use Elasticsearch {settings.ES['hosts']} as the doc engine.")
  39. for _ in range(ATTEMPT_TIME):
  40. try:
  41. if self._connect():
  42. break
  43. except Exception as e:
  44. logger.warning(f"{str(e)}. Waiting Elasticsearch {settings.ES['hosts']} to be healthy.")
  45. time.sleep(5)
  46. if not self.es.ping():
  47. msg = f"Elasticsearch {settings.ES['hosts']} is unhealthy in 120s."
  48. logger.error(msg)
  49. raise Exception(msg)
  50. v = self.info.get("version", {"number": "8.11.3"})
  51. v = v["number"].split(".")[0]
  52. if int(v) < 8:
  53. msg = f"Elasticsearch version must be greater than or equal to 8, current version: {v}"
  54. logger.error(msg)
  55. raise Exception(msg)
  56. fp_mapping = os.path.join(get_project_base_directory(), "conf", "mapping.json")
  57. if not os.path.exists(fp_mapping):
  58. msg = f"Elasticsearch mapping file not found at {fp_mapping}"
  59. logger.error(msg)
  60. raise Exception(msg)
  61. self.mapping = json.load(open(fp_mapping, "r"))
  62. logger.info(f"Elasticsearch {settings.ES['hosts']} is healthy.")
  63. def _connect(self):
  64. self.es = Elasticsearch(
  65. settings.ES["hosts"].split(","),
  66. basic_auth=(settings.ES["username"], settings.ES[
  67. "password"]) if "username" in settings.ES and "password" in settings.ES else None,
  68. verify_certs=False,
  69. timeout=600
  70. )
  71. if self.es:
  72. self.info = self.es.info()
  73. return True
  74. return False
  75. """
  76. Database operations
  77. """
  78. def dbType(self) -> str:
  79. return "elasticsearch"
  80. def health(self) -> dict:
  81. health_dict = dict(self.es.cluster.health())
  82. health_dict["type"] = "elasticsearch"
  83. return health_dict
  84. """
  85. Table operations
  86. """
  87. def createIdx(self, indexName: str, knowledgebaseId: str, vectorSize: int):
  88. if self.indexExist(indexName, knowledgebaseId):
  89. return True
  90. try:
  91. from elasticsearch.client import IndicesClient
  92. return IndicesClient(self.es).create(index=indexName,
  93. settings=self.mapping["settings"],
  94. mappings=self.mapping["mappings"])
  95. except Exception:
  96. logger.exception("ESConnection.createIndex error %s" % (indexName))
  97. def deleteIdx(self, indexName: str, knowledgebaseId: str):
  98. if len(knowledgebaseId) > 0:
  99. # The index need to be alive after any kb deletion since all kb under this tenant are in one index.
  100. return
  101. try:
  102. self.es.indices.delete(index=indexName, allow_no_indices=True)
  103. except NotFoundError:
  104. pass
  105. except Exception:
  106. logger.exception("ESConnection.deleteIdx error %s" % (indexName))
  107. def indexExist(self, indexName: str, knowledgebaseId: str = None) -> bool:
  108. s = Index(indexName, self.es)
  109. for i in range(ATTEMPT_TIME):
  110. try:
  111. return s.exists()
  112. except ConnectionTimeout:
  113. logger.exception("ES request timeout")
  114. time.sleep(3)
  115. self._connect()
  116. continue
  117. except Exception as e:
  118. logger.exception(e)
  119. break
  120. return False
  121. """
  122. CRUD operations
  123. """
  124. def search(
  125. self, selectFields: list[str],
  126. highlightFields: list[str],
  127. condition: dict,
  128. matchExprs: list[MatchExpr],
  129. orderBy: OrderByExpr,
  130. offset: int,
  131. limit: int,
  132. indexNames: str | list[str],
  133. knowledgebaseIds: list[str],
  134. aggFields: list[str] = [],
  135. rank_feature: dict | None = None
  136. ):
  137. """
  138. Refers to https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl.html
  139. """
  140. if isinstance(indexNames, str):
  141. indexNames = indexNames.split(",")
  142. assert isinstance(indexNames, list) and len(indexNames) > 0
  143. assert "_id" not in condition
  144. bqry = Q("bool", must=[])
  145. condition["kb_id"] = knowledgebaseIds
  146. for k, v in condition.items():
  147. if k == "available_int":
  148. if v == 0:
  149. bqry.filter.append(Q("range", available_int={"lt": 1}))
  150. else:
  151. bqry.filter.append(
  152. Q("bool", must_not=Q("range", available_int={"lt": 1})))
  153. continue
  154. if not v:
  155. continue
  156. if isinstance(v, list):
  157. bqry.filter.append(Q("terms", **{k: v}))
  158. elif isinstance(v, str) or isinstance(v, int):
  159. bqry.filter.append(Q("term", **{k: v}))
  160. else:
  161. raise Exception(
  162. f"Condition `{str(k)}={str(v)}` value type is {str(type(v))}, expected to be int, str or list.")
  163. s = Search()
  164. vector_similarity_weight = 0.5
  165. for m in matchExprs:
  166. if isinstance(m, FusionExpr) and m.method == "weighted_sum" and "weights" in m.fusion_params:
  167. assert len(matchExprs) == 3 and isinstance(matchExprs[0], MatchTextExpr) and isinstance(matchExprs[1],
  168. MatchDenseExpr) and isinstance(
  169. matchExprs[2], FusionExpr)
  170. weights = m.fusion_params["weights"]
  171. vector_similarity_weight = get_float(weights.split(",")[1])
  172. for m in matchExprs:
  173. if isinstance(m, MatchTextExpr):
  174. minimum_should_match = m.extra_options.get("minimum_should_match", 0.0)
  175. if isinstance(minimum_should_match, float):
  176. minimum_should_match = str(int(minimum_should_match * 100)) + "%"
  177. bqry.must.append(Q("query_string", fields=m.fields,
  178. type="best_fields", query=m.matching_text,
  179. minimum_should_match=minimum_should_match,
  180. boost=1))
  181. bqry.boost = 1.0 - vector_similarity_weight
  182. elif isinstance(m, MatchDenseExpr):
  183. assert (bqry is not None)
  184. similarity = 0.0
  185. if "similarity" in m.extra_options:
  186. similarity = m.extra_options["similarity"]
  187. s = s.knn(m.vector_column_name,
  188. m.topn,
  189. m.topn * 2,
  190. query_vector=list(m.embedding_data),
  191. filter=bqry.to_dict(),
  192. similarity=similarity,
  193. )
  194. if bqry and rank_feature:
  195. for fld, sc in rank_feature.items():
  196. if fld != PAGERANK_FLD:
  197. fld = f"{TAG_FLD}.{fld}"
  198. bqry.should.append(Q("rank_feature", field=fld, linear={}, boost=sc))
  199. if bqry:
  200. s = s.query(bqry)
  201. for field in highlightFields:
  202. s = s.highlight(field)
  203. if orderBy:
  204. orders = list()
  205. for field, order in orderBy.fields:
  206. order = "asc" if order == 0 else "desc"
  207. if field in ["page_num_int", "top_int"]:
  208. order_info = {"order": order, "unmapped_type": "float",
  209. "mode": "avg", "numeric_type": "double"}
  210. elif field.endswith("_int") or field.endswith("_flt"):
  211. order_info = {"order": order, "unmapped_type": "float"}
  212. else:
  213. order_info = {"order": order, "unmapped_type": "text"}
  214. orders.append({field: order_info})
  215. s = s.sort(*orders)
  216. for fld in aggFields:
  217. s.aggs.bucket(f'aggs_{fld}', 'terms', field=fld, size=1000000)
  218. if limit > 0:
  219. s = s[offset:offset + limit]
  220. q = s.to_dict()
  221. logger.debug(f"ESConnection.search {str(indexNames)} query: " + json.dumps(q))
  222. for i in range(ATTEMPT_TIME):
  223. try:
  224. #print(json.dumps(q, ensure_ascii=False))
  225. res = self.es.search(index=indexNames,
  226. body=q,
  227. timeout="600s",
  228. # search_type="dfs_query_then_fetch",
  229. track_total_hits=True,
  230. _source=True)
  231. if str(res.get("timed_out", "")).lower() == "true":
  232. raise Exception("Es Timeout.")
  233. logger.debug(f"ESConnection.search {str(indexNames)} res: " + str(res))
  234. return res
  235. except ConnectionTimeout:
  236. logger.exception("ES request timeout")
  237. self._connect()
  238. continue
  239. except Exception as e:
  240. logger.exception(f"ESConnection.search {str(indexNames)} query: " + str(q) + str(e))
  241. raise e
  242. logger.error(f"ESConnection.search timeout for {ATTEMPT_TIME} times!")
  243. raise Exception("ESConnection.search timeout.")
  244. def get(self, chunkId: str, indexName: str, knowledgebaseIds: list[str]) -> dict | None:
  245. for i in range(ATTEMPT_TIME):
  246. try:
  247. res = self.es.get(index=(indexName),
  248. id=chunkId, source=True, )
  249. if str(res.get("timed_out", "")).lower() == "true":
  250. raise Exception("Es Timeout.")
  251. chunk = res["_source"]
  252. chunk["id"] = chunkId
  253. return chunk
  254. except NotFoundError:
  255. return None
  256. except Exception as e:
  257. logger.exception(f"ESConnection.get({chunkId}) got exception")
  258. raise e
  259. logger.error(f"ESConnection.get timeout for {ATTEMPT_TIME} times!")
  260. raise Exception("ESConnection.get timeout.")
  261. def insert(self, documents: list[dict], indexName: str, knowledgebaseId: str = None) -> list[str]:
  262. # Refers to https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html
  263. operations = []
  264. for d in documents:
  265. assert "_id" not in d
  266. assert "id" in d
  267. d_copy = copy.deepcopy(d)
  268. d_copy["kb_id"] = knowledgebaseId
  269. meta_id = d_copy.pop("id", "")
  270. operations.append(
  271. {"index": {"_index": indexName, "_id": meta_id}})
  272. operations.append(d_copy)
  273. res = []
  274. for _ in range(ATTEMPT_TIME):
  275. try:
  276. res = []
  277. r = self.es.bulk(index=(indexName), operations=operations,
  278. refresh=False, timeout="60s")
  279. if re.search(r"False", str(r["errors"]), re.IGNORECASE):
  280. return res
  281. for item in r["items"]:
  282. for action in ["create", "delete", "index", "update"]:
  283. if action in item and "error" in item[action]:
  284. res.append(str(item[action]["_id"]) + ":" + str(item[action]["error"]))
  285. return res
  286. except ConnectionTimeout:
  287. logger.exception("ES request timeout")
  288. time.sleep(3)
  289. self._connect()
  290. continue
  291. except Exception as e:
  292. res.append(str(e))
  293. logger.warning("ESConnection.insert got exception: " + str(e))
  294. return res
  295. def update(self, condition: dict, newValue: dict, indexName: str, knowledgebaseId: str) -> bool:
  296. doc = copy.deepcopy(newValue)
  297. doc.pop("id", None)
  298. condition["kb_id"] = knowledgebaseId
  299. if "id" in condition and isinstance(condition["id"], str):
  300. # update specific single document
  301. chunkId = condition["id"]
  302. for i in range(ATTEMPT_TIME):
  303. for k in doc.keys():
  304. if "feas" != k.split("_")[-1]:
  305. continue
  306. try:
  307. self.es.update(index=indexName, id=chunkId, script=f"ctx._source.remove(\"{k}\");")
  308. except Exception:
  309. logger.exception(f"ESConnection.update(index={indexName}, id={chunkId}, doc={json.dumps(condition, ensure_ascii=False)}) got exception")
  310. try:
  311. self.es.update(index=indexName, id=chunkId, doc=doc)
  312. return True
  313. except Exception as e:
  314. logger.exception(
  315. f"ESConnection.update(index={indexName}, id={chunkId}, doc={json.dumps(condition, ensure_ascii=False)}) got exception: "+str(e))
  316. break
  317. return False
  318. # update unspecific maybe-multiple documents
  319. bqry = Q("bool")
  320. for k, v in condition.items():
  321. if not isinstance(k, str) or not v:
  322. continue
  323. if k == "exists":
  324. bqry.filter.append(Q("exists", field=v))
  325. continue
  326. if isinstance(v, list):
  327. bqry.filter.append(Q("terms", **{k: v}))
  328. elif isinstance(v, str) or isinstance(v, int):
  329. bqry.filter.append(Q("term", **{k: v}))
  330. else:
  331. raise Exception(
  332. f"Condition `{str(k)}={str(v)}` value type is {str(type(v))}, expected to be int, str or list.")
  333. scripts = []
  334. params = {}
  335. for k, v in newValue.items():
  336. if k == "remove":
  337. if isinstance(v, str):
  338. scripts.append(f"ctx._source.remove('{v}');")
  339. if isinstance(v, dict):
  340. for kk, vv in v.items():
  341. scripts.append(f"int i=ctx._source.{kk}.indexOf(params.p_{kk});ctx._source.{kk}.remove(i);")
  342. params[f"p_{kk}"] = vv
  343. continue
  344. if k == "add":
  345. if isinstance(v, dict):
  346. for kk, vv in v.items():
  347. scripts.append(f"ctx._source.{kk}.add(params.pp_{kk});")
  348. params[f"pp_{kk}"] = vv.strip()
  349. continue
  350. if (not isinstance(k, str) or not v) and k != "available_int":
  351. continue
  352. if isinstance(v, str):
  353. v = re.sub(r"(['\n\r]|\\.)", " ", v)
  354. params[f"pp_{k}"] = v
  355. scripts.append(f"ctx._source.{k}=params.pp_{k};")
  356. elif isinstance(v, int) or isinstance(v, float):
  357. scripts.append(f"ctx._source.{k}={v};")
  358. elif isinstance(v, list):
  359. scripts.append(f"ctx._source.{k}=params.pp_{k};")
  360. params[f"pp_{k}"] = json.dumps(v, ensure_ascii=False)
  361. else:
  362. raise Exception(
  363. f"newValue `{str(k)}={str(v)}` value type is {str(type(v))}, expected to be int, str.")
  364. ubq = UpdateByQuery(
  365. index=indexName).using(
  366. self.es).query(bqry)
  367. ubq = ubq.script(source="".join(scripts), params=params)
  368. ubq = ubq.params(refresh=True)
  369. ubq = ubq.params(slices=5)
  370. ubq = ubq.params(conflicts="proceed")
  371. for _ in range(ATTEMPT_TIME):
  372. try:
  373. _ = ubq.execute()
  374. return True
  375. except ConnectionTimeout:
  376. logger.exception("ES request timeout")
  377. time.sleep(3)
  378. self._connect()
  379. continue
  380. except Exception as e:
  381. logger.error("ESConnection.update got exception: " + str(e) + "\n".join(scripts))
  382. break
  383. return False
  384. def delete(self, condition: dict, indexName: str, knowledgebaseId: str) -> int:
  385. qry = None
  386. assert "_id" not in condition
  387. condition["kb_id"] = knowledgebaseId
  388. if "id" in condition:
  389. chunk_ids = condition["id"]
  390. if not isinstance(chunk_ids, list):
  391. chunk_ids = [chunk_ids]
  392. if not chunk_ids: # when chunk_ids is empty, delete all
  393. qry = Q("match_all")
  394. else:
  395. qry = Q("ids", values=chunk_ids)
  396. else:
  397. qry = Q("bool")
  398. for k, v in condition.items():
  399. if k == "exists":
  400. qry.filter.append(Q("exists", field=v))
  401. elif k == "must_not":
  402. if isinstance(v, dict):
  403. for kk, vv in v.items():
  404. if kk == "exists":
  405. qry.must_not.append(Q("exists", field=vv))
  406. elif isinstance(v, list):
  407. qry.must.append(Q("terms", **{k: v}))
  408. elif isinstance(v, str) or isinstance(v, int):
  409. qry.must.append(Q("term", **{k: v}))
  410. else:
  411. raise Exception("Condition value must be int, str or list.")
  412. logger.debug("ESConnection.delete query: " + json.dumps(qry.to_dict()))
  413. for _ in range(ATTEMPT_TIME):
  414. try:
  415. res = self.es.delete_by_query(
  416. index=indexName,
  417. body=Search().query(qry).to_dict(),
  418. refresh=True)
  419. return res["deleted"]
  420. except ConnectionTimeout:
  421. logger.exception("ES request timeout")
  422. time.sleep(3)
  423. self._connect()
  424. continue
  425. except Exception as e:
  426. logger.warning("ESConnection.delete got exception: " + str(e))
  427. if re.search(r"(not_found)", str(e), re.IGNORECASE):
  428. return 0
  429. return 0
  430. """
  431. Helper functions for search result
  432. """
  433. def getTotal(self, res):
  434. if isinstance(res["hits"]["total"], type({})):
  435. return res["hits"]["total"]["value"]
  436. return res["hits"]["total"]
  437. def getChunkIds(self, res):
  438. return [d["_id"] for d in res["hits"]["hits"]]
  439. def __getSource(self, res):
  440. rr = []
  441. for d in res["hits"]["hits"]:
  442. d["_source"]["id"] = d["_id"]
  443. d["_source"]["_score"] = d["_score"]
  444. rr.append(d["_source"])
  445. return rr
  446. def getFields(self, res, fields: list[str]) -> dict[str, dict]:
  447. res_fields = {}
  448. if not fields:
  449. return {}
  450. for d in self.__getSource(res):
  451. m = {n: d.get(n) for n in fields if d.get(n) is not None}
  452. for n, v in m.items():
  453. if isinstance(v, list):
  454. m[n] = v
  455. continue
  456. if n == "available_int" and isinstance(v, (int, float)):
  457. m[n] = v
  458. continue
  459. if not isinstance(v, str):
  460. m[n] = str(m[n])
  461. # if n.find("tks") > 0:
  462. # m[n] = rmSpace(m[n])
  463. if m:
  464. res_fields[d["id"]] = m
  465. return res_fields
  466. def getHighlight(self, res, keywords: list[str], fieldnm: str):
  467. ans = {}
  468. for d in res["hits"]["hits"]:
  469. hlts = d.get("highlight")
  470. if not hlts:
  471. continue
  472. txt = "...".join([a for a in list(hlts.items())[0][1]])
  473. if not is_english(txt.split()):
  474. ans[d["_id"]] = txt
  475. continue
  476. txt = d["_source"][fieldnm]
  477. txt = re.sub(r"[\r\n]", " ", txt, flags=re.IGNORECASE | re.MULTILINE)
  478. txts = []
  479. for t in re.split(r"[.?!;\n]", txt):
  480. for w in keywords:
  481. t = re.sub(r"(^|[ .?/'\"\(\)!,:;-])(%s)([ .?/'\"\(\)!,:;-])" % re.escape(w), r"\1<em>\2</em>\3", t,
  482. flags=re.IGNORECASE | re.MULTILINE)
  483. if not re.search(r"<em>[^<>]+</em>", t, flags=re.IGNORECASE | re.MULTILINE):
  484. continue
  485. txts.append(t)
  486. ans[d["_id"]] = "...".join(txts) if txts else "...".join([a for a in list(hlts.items())[0][1]])
  487. return ans
  488. def getAggregation(self, res, fieldnm: str):
  489. agg_field = "aggs_" + fieldnm
  490. if "aggregations" not in res or agg_field not in res["aggregations"]:
  491. return list()
  492. bkts = res["aggregations"][agg_field]["buckets"]
  493. return [(b["key"], b["doc_count"]) for b in bkts]
  494. """
  495. SQL
  496. """
  497. def sql(self, sql: str, fetch_size: int, format: str):
  498. logger.debug(f"ESConnection.sql get sql: {sql}")
  499. sql = re.sub(r"[ `]+", " ", sql)
  500. sql = sql.replace("%", "")
  501. replaces = []
  502. for r in re.finditer(r" ([a-z_]+_l?tks)( like | ?= ?)'([^']+)'", sql):
  503. fld, v = r.group(1), r.group(3)
  504. match = " MATCH({}, '{}', 'operator=OR;minimum_should_match=30%') ".format(
  505. fld, rag_tokenizer.fine_grained_tokenize(rag_tokenizer.tokenize(v)))
  506. replaces.append(
  507. ("{}{}'{}'".format(
  508. r.group(1),
  509. r.group(2),
  510. r.group(3)),
  511. match))
  512. for p, r in replaces:
  513. sql = sql.replace(p, r, 1)
  514. logger.debug(f"ESConnection.sql to es: {sql}")
  515. for i in range(ATTEMPT_TIME):
  516. try:
  517. res = self.es.sql.query(body={"query": sql, "fetch_size": fetch_size}, format=format,
  518. request_timeout="2s")
  519. return res
  520. except ConnectionTimeout:
  521. logger.exception("ES request timeout")
  522. time.sleep(3)
  523. self._connect()
  524. continue
  525. except Exception:
  526. logger.exception("ESConnection.sql got exception")
  527. break
  528. logger.error(f"ESConnection.sql timeout for {ATTEMPT_TIME} times!")
  529. return None