|
|
|
|
|
|
|
|
from rag.utils import singleton |
|
|
from rag.utils import singleton |
|
|
from api.utils.file_utils import get_project_base_directory |
|
|
from api.utils.file_utils import get_project_base_directory |
|
|
import polars as pl |
|
|
import polars as pl |
|
|
from rag.utils.doc_store_conn import DocStoreConnection, MatchExpr, OrderByExpr, MatchTextExpr, MatchDenseExpr, FusionExpr |
|
|
|
|
|
|
|
|
from rag.utils.doc_store_conn import DocStoreConnection, MatchExpr, OrderByExpr, MatchTextExpr, MatchDenseExpr, \ |
|
|
|
|
|
FusionExpr |
|
|
from rag.nlp import is_english, rag_tokenizer |
|
|
from rag.nlp import is_english, rag_tokenizer |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
try: |
|
|
try: |
|
|
self.es = Elasticsearch( |
|
|
self.es = Elasticsearch( |
|
|
settings.ES["hosts"].split(","), |
|
|
settings.ES["hosts"].split(","), |
|
|
basic_auth=(settings.ES["username"], settings.ES["password"]) if "username" in settings.ES and "password" in settings.ES else None, |
|
|
|
|
|
|
|
|
basic_auth=(settings.ES["username"], settings.ES[ |
|
|
|
|
|
"password"]) if "username" in settings.ES and "password" in settings.ES else None, |
|
|
verify_certs=False, |
|
|
verify_certs=False, |
|
|
timeout=600 |
|
|
timeout=600 |
|
|
) |
|
|
) |
|
|
|
|
|
|
|
|
""" |
|
|
""" |
|
|
Database operations |
|
|
Database operations |
|
|
""" |
|
|
""" |
|
|
|
|
|
|
|
|
def dbType(self) -> str: |
|
|
def dbType(self) -> str: |
|
|
return "elasticsearch" |
|
|
return "elasticsearch" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
""" |
|
|
""" |
|
|
Table operations |
|
|
Table operations |
|
|
""" |
|
|
""" |
|
|
|
|
|
|
|
|
def createIdx(self, indexName: str, knowledgebaseId: str, vectorSize: int): |
|
|
def createIdx(self, indexName: str, knowledgebaseId: str, vectorSize: int): |
|
|
if self.indexExist(indexName, knowledgebaseId): |
|
|
if self.indexExist(indexName, knowledgebaseId): |
|
|
return True |
|
|
return True |
|
|
|
|
|
|
|
|
""" |
|
|
""" |
|
|
CRUD operations |
|
|
CRUD operations |
|
|
""" |
|
|
""" |
|
|
def search(self, selectFields: list[str], highlightFields: list[str], condition: dict, matchExprs: list[MatchExpr], orderBy: OrderByExpr, offset: int, limit: int, indexNames: str|list[str], knowledgebaseIds: list[str]) -> list[dict] | pl.DataFrame: |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def search(self, selectFields: list[str], highlightFields: list[str], condition: dict, matchExprs: list[MatchExpr], |
|
|
|
|
|
orderBy: OrderByExpr, offset: int, limit: int, indexNames: str | list[str], |
|
|
|
|
|
knowledgebaseIds: list[str]) -> list[dict] | pl.DataFrame: |
|
|
""" |
|
|
""" |
|
|
Refers to https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl.html |
|
|
Refers to https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl.html |
|
|
""" |
|
|
""" |
|
|
|
|
|
|
|
|
bqry = None |
|
|
bqry = None |
|
|
vector_similarity_weight = 0.5 |
|
|
vector_similarity_weight = 0.5 |
|
|
for m in matchExprs: |
|
|
for m in matchExprs: |
|
|
if isinstance(m, FusionExpr) and m.method=="weighted_sum" and "weights" in m.fusion_params: |
|
|
|
|
|
assert len(matchExprs)==3 and isinstance(matchExprs[0], MatchTextExpr) and isinstance(matchExprs[1], MatchDenseExpr) and isinstance(matchExprs[2], FusionExpr) |
|
|
|
|
|
|
|
|
if isinstance(m, FusionExpr) and m.method == "weighted_sum" and "weights" in m.fusion_params: |
|
|
|
|
|
assert len(matchExprs) == 3 and isinstance(matchExprs[0], MatchTextExpr) and isinstance(matchExprs[1], |
|
|
|
|
|
MatchDenseExpr) and isinstance( |
|
|
|
|
|
matchExprs[2], FusionExpr) |
|
|
weights = m.fusion_params["weights"] |
|
|
weights = m.fusion_params["weights"] |
|
|
vector_similarity_weight = float(weights.split(",")[1]) |
|
|
vector_similarity_weight = float(weights.split(",")[1]) |
|
|
for m in matchExprs: |
|
|
for m in matchExprs: |
|
|
|
|
|
|
|
|
if "minimum_should_match" in m.extra_options: |
|
|
if "minimum_should_match" in m.extra_options: |
|
|
minimum_should_match = str(int(m.extra_options["minimum_should_match"] * 100)) + "%" |
|
|
minimum_should_match = str(int(m.extra_options["minimum_should_match"] * 100)) + "%" |
|
|
bqry = Q("bool", |
|
|
bqry = Q("bool", |
|
|
must=Q("query_string", fields=m.fields, |
|
|
|
|
|
|
|
|
must=Q("query_string", fields=m.fields, |
|
|
type="best_fields", query=m.matching_text, |
|
|
type="best_fields", query=m.matching_text, |
|
|
minimum_should_match = minimum_should_match, |
|
|
|
|
|
|
|
|
minimum_should_match=minimum_should_match, |
|
|
boost=1), |
|
|
boost=1), |
|
|
boost = 1.0 - vector_similarity_weight, |
|
|
|
|
|
) |
|
|
|
|
|
if condition: |
|
|
|
|
|
for k, v in condition.items(): |
|
|
|
|
|
if not isinstance(k, str) or not v: |
|
|
|
|
|
continue |
|
|
|
|
|
if isinstance(v, list): |
|
|
|
|
|
bqry.filter.append(Q("terms", **{k: v})) |
|
|
|
|
|
elif isinstance(v, str) or isinstance(v, int): |
|
|
|
|
|
bqry.filter.append(Q("term", **{k: v})) |
|
|
|
|
|
else: |
|
|
|
|
|
raise Exception(f"Condition `{str(k)}={str(v)}` value type is {str(type(v))}, expected to be int, str or list.") |
|
|
|
|
|
|
|
|
boost=1.0 - vector_similarity_weight, |
|
|
|
|
|
) |
|
|
elif isinstance(m, MatchDenseExpr): |
|
|
elif isinstance(m, MatchDenseExpr): |
|
|
assert(bqry is not None) |
|
|
|
|
|
|
|
|
assert (bqry is not None) |
|
|
similarity = 0.0 |
|
|
similarity = 0.0 |
|
|
if "similarity" in m.extra_options: |
|
|
if "similarity" in m.extra_options: |
|
|
similarity = m.extra_options["similarity"] |
|
|
similarity = m.extra_options["similarity"] |
|
|
s = s.knn(m.vector_column_name, |
|
|
s = s.knn(m.vector_column_name, |
|
|
m.topn, |
|
|
|
|
|
m.topn * 2, |
|
|
|
|
|
query_vector = list(m.embedding_data), |
|
|
|
|
|
filter = bqry.to_dict(), |
|
|
|
|
|
similarity = similarity, |
|
|
|
|
|
) |
|
|
|
|
|
if matchExprs: |
|
|
|
|
|
s.query = bqry |
|
|
|
|
|
|
|
|
m.topn, |
|
|
|
|
|
m.topn * 2, |
|
|
|
|
|
query_vector=list(m.embedding_data), |
|
|
|
|
|
filter=bqry.to_dict(), |
|
|
|
|
|
similarity=similarity, |
|
|
|
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
if condition: |
|
|
|
|
|
if not bqry: |
|
|
|
|
|
bqry = Q("bool", must=[]) |
|
|
|
|
|
for k, v in condition.items(): |
|
|
|
|
|
if not isinstance(k, str) or not v: |
|
|
|
|
|
continue |
|
|
|
|
|
if isinstance(v, list): |
|
|
|
|
|
bqry.filter.append(Q("terms", **{k: v})) |
|
|
|
|
|
elif isinstance(v, str) or isinstance(v, int): |
|
|
|
|
|
bqry.filter.append(Q("term", **{k: v})) |
|
|
|
|
|
else: |
|
|
|
|
|
raise Exception( |
|
|
|
|
|
f"Condition `{str(k)}={str(v)}` value type is {str(type(v))}, expected to be int, str or list.") |
|
|
|
|
|
|
|
|
|
|
|
if bqry: |
|
|
|
|
|
s = s.query(bqry) |
|
|
for field in highlightFields: |
|
|
for field in highlightFields: |
|
|
s = s.highlight(field) |
|
|
s = s.highlight(field) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
for field, order in orderBy.fields: |
|
|
for field, order in orderBy.fields: |
|
|
order = "asc" if order == 0 else "desc" |
|
|
order = "asc" if order == 0 else "desc" |
|
|
orders.append({field: {"order": order, "unmapped_type": "float", |
|
|
orders.append({field: {"order": order, "unmapped_type": "float", |
|
|
"mode": "avg", "numeric_type": "double"}}) |
|
|
|
|
|
|
|
|
"mode": "avg", "numeric_type": "double"}}) |
|
|
s = s.sort(*orders) |
|
|
s = s.sort(*orders) |
|
|
|
|
|
|
|
|
if limit > 0: |
|
|
if limit > 0: |
|
|
s = s[offset:limit] |
|
|
s = s[offset:limit] |
|
|
q = s.to_dict() |
|
|
q = s.to_dict() |
|
|
|
|
|
print(json.dumps(q), flush=True) |
|
|
# logger.info("ESConnection.search [Q]: " + json.dumps(q)) |
|
|
# logger.info("ESConnection.search [Q]: " + json.dumps(q)) |
|
|
|
|
|
|
|
|
for i in range(3): |
|
|
for i in range(3): |
|
|
|
|
|
|
|
|
for i in range(3): |
|
|
for i in range(3): |
|
|
try: |
|
|
try: |
|
|
res = self.es.get(index=(indexName), |
|
|
res = self.es.get(index=(indexName), |
|
|
id=chunkId, source=True,) |
|
|
|
|
|
|
|
|
id=chunkId, source=True, ) |
|
|
if str(res.get("timed_out", "")).lower() == "true": |
|
|
if str(res.get("timed_out", "")).lower() == "true": |
|
|
raise Exception("Es Timeout.") |
|
|
raise Exception("Es Timeout.") |
|
|
if not res.get("found"): |
|
|
if not res.get("found"): |
|
|
|
|
|
|
|
|
for _ in range(100): |
|
|
for _ in range(100): |
|
|
try: |
|
|
try: |
|
|
r = self.es.bulk(index=(indexName), operations=operations, |
|
|
r = self.es.bulk(index=(indexName), operations=operations, |
|
|
refresh=False, timeout="600s") |
|
|
|
|
|
|
|
|
refresh=False, timeout="600s") |
|
|
if re.search(r"False", str(r["errors"]), re.IGNORECASE): |
|
|
if re.search(r"False", str(r["errors"]), re.IGNORECASE): |
|
|
return res |
|
|
return res |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
self.es.update(index=indexName, id=chunkId, doc=doc) |
|
|
self.es.update(index=indexName, id=chunkId, doc=doc) |
|
|
return True |
|
|
return True |
|
|
except Exception as e: |
|
|
except Exception as e: |
|
|
logger.exception(f"ES failed to update(index={indexName}, id={id}, doc={json.dumps(condition, ensure_ascii=False)})") |
|
|
|
|
|
|
|
|
logger.exception( |
|
|
|
|
|
f"ES failed to update(index={indexName}, id={id}, doc={json.dumps(condition, ensure_ascii=False)})") |
|
|
if str(e).find("Timeout") > 0: |
|
|
if str(e).find("Timeout") > 0: |
|
|
continue |
|
|
continue |
|
|
else: |
|
|
else: |
|
|
|
|
|
|
|
|
elif isinstance(v, str) or isinstance(v, int): |
|
|
elif isinstance(v, str) or isinstance(v, int): |
|
|
bqry.filter.append(Q("term", **{k: v})) |
|
|
bqry.filter.append(Q("term", **{k: v})) |
|
|
else: |
|
|
else: |
|
|
raise Exception(f"Condition `{str(k)}={str(v)}` value type is {str(type(v))}, expected to be int, str or list.") |
|
|
|
|
|
|
|
|
raise Exception( |
|
|
|
|
|
f"Condition `{str(k)}={str(v)}` value type is {str(type(v))}, expected to be int, str or list.") |
|
|
scripts = [] |
|
|
scripts = [] |
|
|
for k, v in newValue.items(): |
|
|
for k, v in newValue.items(): |
|
|
if not isinstance(k, str) or not v: |
|
|
if not isinstance(k, str) or not v: |
|
|
|
|
|
|
|
|
elif isinstance(v, int): |
|
|
elif isinstance(v, int): |
|
|
scripts.append(f"ctx._source.{k} = {v}") |
|
|
scripts.append(f"ctx._source.{k} = {v}") |
|
|
else: |
|
|
else: |
|
|
raise Exception(f"newValue `{str(k)}={str(v)}` value type is {str(type(v))}, expected to be int, str.") |
|
|
|
|
|
|
|
|
raise Exception( |
|
|
|
|
|
f"newValue `{str(k)}={str(v)}` value type is {str(type(v))}, expected to be int, str.") |
|
|
ubq = UpdateByQuery( |
|
|
ubq = UpdateByQuery( |
|
|
index=indexName).using( |
|
|
index=indexName).using( |
|
|
self.es).query(bqry) |
|
|
self.es).query(bqry) |
|
|
|
|
|
|
|
|
try: |
|
|
try: |
|
|
res = self.es.delete_by_query( |
|
|
res = self.es.delete_by_query( |
|
|
index=indexName, |
|
|
index=indexName, |
|
|
body = Search().query(qry).to_dict(), |
|
|
|
|
|
|
|
|
body=Search().query(qry).to_dict(), |
|
|
refresh=True) |
|
|
refresh=True) |
|
|
return res["deleted"] |
|
|
return res["deleted"] |
|
|
except Exception as e: |
|
|
except Exception as e: |
|
|
|
|
|
|
|
|
return 0 |
|
|
return 0 |
|
|
return 0 |
|
|
return 0 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
""" |
|
|
""" |
|
|
Helper functions for search result |
|
|
Helper functions for search result |
|
|
""" |
|
|
""" |
|
|
|
|
|
|
|
|
def getTotal(self, res): |
|
|
def getTotal(self, res): |
|
|
if isinstance(res["hits"]["total"], type({})): |
|
|
if isinstance(res["hits"]["total"], type({})): |
|
|
return res["hits"]["total"]["value"] |
|
|
return res["hits"]["total"]["value"] |
|
|
|
|
|
|
|
|
continue |
|
|
continue |
|
|
|
|
|
|
|
|
txt = d["_source"][fieldnm] |
|
|
txt = d["_source"][fieldnm] |
|
|
txt = re.sub(r"[\r\n]", " ", txt, flags=re.IGNORECASE|re.MULTILINE) |
|
|
|
|
|
|
|
|
txt = re.sub(r"[\r\n]", " ", txt, flags=re.IGNORECASE | re.MULTILINE) |
|
|
txts = [] |
|
|
txts = [] |
|
|
for t in re.split(r"[.?!;\n]", txt): |
|
|
for t in re.split(r"[.?!;\n]", txt): |
|
|
for w in keywords: |
|
|
for w in keywords: |
|
|
t = re.sub(r"(^|[ .?/'\"\(\)!,:;-])(%s)([ .?/'\"\(\)!,:;-])"%re.escape(w), r"\1<em>\2</em>\3", t, flags=re.IGNORECASE|re.MULTILINE) |
|
|
|
|
|
if not re.search(r"<em>[^<>]+</em>", t, flags=re.IGNORECASE|re.MULTILINE): |
|
|
|
|
|
|
|
|
t = re.sub(r"(^|[ .?/'\"\(\)!,:;-])(%s)([ .?/'\"\(\)!,:;-])" % re.escape(w), r"\1<em>\2</em>\3", t, |
|
|
|
|
|
flags=re.IGNORECASE | re.MULTILINE) |
|
|
|
|
|
if not re.search(r"<em>[^<>]+</em>", t, flags=re.IGNORECASE | re.MULTILINE): |
|
|
continue |
|
|
continue |
|
|
txts.append(t) |
|
|
txts.append(t) |
|
|
ans[d["_id"]] = "...".join(txts) if txts else "...".join([a for a in list(hlts.items())[0][1]]) |
|
|
ans[d["_id"]] = "...".join(txts) if txts else "...".join([a for a in list(hlts.items())[0][1]]) |
|
|
|
|
|
|
|
|
bkts = res["aggregations"][agg_field]["buckets"] |
|
|
bkts = res["aggregations"][agg_field]["buckets"] |
|
|
return [(b["key"], b["doc_count"]) for b in bkts] |
|
|
return [(b["key"], b["doc_count"]) for b in bkts] |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
""" |
|
|
""" |
|
|
SQL |
|
|
SQL |
|
|
""" |
|
|
""" |
|
|
|
|
|
|
|
|
def sql(self, sql: str, fetch_size: int, format: str): |
|
|
def sql(self, sql: str, fetch_size: int, format: str): |
|
|
logger.info(f"ESConnection.sql get sql: {sql}") |
|
|
logger.info(f"ESConnection.sql get sql: {sql}") |
|
|
sql = re.sub(r"[ `]+", " ", sql) |
|
|
sql = re.sub(r"[ `]+", " ", sql) |
|
|
|
|
|
|
|
|
r.group(1), |
|
|
r.group(1), |
|
|
r.group(2), |
|
|
r.group(2), |
|
|
r.group(3)), |
|
|
r.group(3)), |
|
|
match)) |
|
|
|
|
|
|
|
|
match)) |
|
|
|
|
|
|
|
|
for p, r in replaces: |
|
|
for p, r in replaces: |
|
|
sql = sql.replace(p, r, 1) |
|
|
sql = sql.replace(p, r, 1) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
for i in range(3): |
|
|
for i in range(3): |
|
|
try: |
|
|
try: |
|
|
res = self.es.sql.query(body={"query": sql, "fetch_size": fetch_size}, format=format, request_timeout="2s") |
|
|
|
|
|
|
|
|
res = self.es.sql.query(body={"query": sql, "fetch_size": fetch_size}, format=format, |
|
|
|
|
|
request_timeout="2s") |
|
|
return res |
|
|
return res |
|
|
except ConnectionTimeout: |
|
|
except ConnectionTimeout: |
|
|
logger.exception("ESConnection.sql timeout [Q]: " + sql) |
|
|
logger.exception("ESConnection.sql timeout [Q]: " + sql) |