浏览代码

Fetch chunk by batches. (#4177)

### What problem does this PR solve?

#4173

### Type of change

- [x] Performance Improvement
tags/v0.15.1
Kevin Hu 10 个月前
父节点
当前提交
31d67c850e
没有帐户链接到提交者的电子邮件
共有 2 个文件被更改,包括 12 次插入5 次删除
  1. 11
    4
      rag/nlp/search.py
  2. 1
    1
      rag/utils/es_conn.py

+ 11
- 4
rag/nlp/search.py 查看文件

@@ -70,7 +70,7 @@ class Dealer:
pg = int(req.get("page", 1)) - 1
topk = int(req.get("topk", 1024))
ps = int(req.get("size", topk))
offset, limit = pg * ps, (pg + 1) * ps
offset, limit = pg * ps, ps

src = req.get("fields", ["docnm_kwd", "content_ltks", "kb_id", "img_id", "title_tks", "important_kwd", "position_int",
"doc_id", "page_num_int", "top_int", "create_timestamp_flt", "knowledge_graph_kwd", "question_kwd", "question_tks",
@@ -380,6 +380,13 @@ class Dealer:

def chunk_list(self, doc_id: str, tenant_id: str, kb_ids: list[str], max_count=1024, fields=["docnm_kwd", "content_with_weight", "img_id"]):
condition = {"doc_id": doc_id}
res = self.dataStore.search(fields, [], condition, [], OrderByExpr(), 0, max_count, index_name(tenant_id), kb_ids)
dict_chunks = self.dataStore.getFields(res, fields)
return dict_chunks.values()
res = []
bs = 128
for p in range(0, max_count, bs):
res = self.dataStore.search(fields, [], condition, [], OrderByExpr(), p, bs, index_name(tenant_id), kb_ids)
dict_chunks = self.dataStore.getFields(res, fields)
if dict_chunks:
res.extend(dict_chunks.values())
if len(dict_chunks.values()) < bs:
break
return res

+ 1
- 1
rag/utils/es_conn.py 查看文件

@@ -196,7 +196,7 @@ class ESConnection(DocStoreConnection):
s = s.sort(*orders)

if limit > 0:
s = s[offset:limit]
s = s[offset:offset+limit]
q = s.to_dict()
logger.debug(f"ESConnection.search {str(indexNames)} query: " + json.dumps(q))


正在加载...
取消
保存