Browse Source

add new model gpt-3-turbo (#352)

### What problem does this PR solve?


Issue link:#351

### Type of change

- [x] New Feature (non-breaking change which adds functionality)
tags/v0.1.0
KevinHuSh 1 year ago
parent
commit
4608cccd05
No account linked to committer's email address
2 changed files with 15 additions and 4 deletions
  1. 9
    4
      api/apps/conversation_app.py
  2. 6
    0
      api/db/init_data.py

+ 9
- 4
api/apps/conversation_app.py View File

embd_mdl = LLMBundle(dialog.tenant_id, LLMType.EMBEDDING) embd_mdl = LLMBundle(dialog.tenant_id, LLMType.EMBEDDING)
chat_mdl = LLMBundle(dialog.tenant_id, LLMType.CHAT, dialog.llm_id) chat_mdl = LLMBundle(dialog.tenant_id, LLMType.CHAT, dialog.llm_id)
prompt_config = dialog.prompt_config
field_map = KnowledgebaseService.get_field_map(dialog.kb_ids) field_map = KnowledgebaseService.get_field_map(dialog.kb_ids)
# try to use sql if field mapping is good to go # try to use sql if field mapping is good to go
if field_map: if field_map:
chat_logger.info("Use SQL to retrieval:{}".format(questions[-1])) chat_logger.info("Use SQL to retrieval:{}".format(questions[-1]))
ans = use_sql(questions[-1], field_map, dialog.tenant_id, chat_mdl)
ans = use_sql(questions[-1], field_map, dialog.tenant_id, chat_mdl, prompt_config.get("quote", True))
if ans: return ans if ans: return ans
prompt_config = dialog.prompt_config
for p in prompt_config["parameters"]: for p in prompt_config["parameters"]:
if p["key"] == "knowledge": if p["key"] == "knowledge":
continue continue
d for d in kbinfos["doc_aggs"] if d["doc_id"] in idx] d for d in kbinfos["doc_aggs"] if d["doc_id"] in idx]
if not recall_docs: recall_docs = kbinfos["doc_aggs"] if not recall_docs: recall_docs = kbinfos["doc_aggs"]
kbinfos["doc_aggs"] = recall_docs kbinfos["doc_aggs"] = recall_docs
for c in kbinfos["chunks"]: for c in kbinfos["chunks"]:
if c.get("vector"): if c.get("vector"):
del c["vector"] del c["vector"]
return {"answer": answer, "reference": kbinfos} return {"answer": answer, "reference": kbinfos}
def use_sql(question, field_map, tenant_id, chat_mdl):
def use_sql(question, field_map, tenant_id, chat_mdl, quota=True):
sys_prompt = "你是一个DBA。你需要这对以下表的字段结构,根据用户的问题列表,写出最后一个问题对应的SQL。" sys_prompt = "你是一个DBA。你需要这对以下表的字段结构,根据用户的问题列表,写出最后一个问题对应的SQL。"
user_promt = """ user_promt = """
表名:{}; 表名:{};
# compose markdown table # compose markdown table
clmns = "|" + "|".join([re.sub(r"(/.*|([^()]+))", "", field_map.get(tbl["columns"][i]["name"], clmns = "|" + "|".join([re.sub(r"(/.*|([^()]+))", "", field_map.get(tbl["columns"][i]["name"],
tbl["columns"][i]["name"])) for i in clmn_idx]) + ("|Source|" if docid_idx and docid_idx else "|") tbl["columns"][i]["name"])) for i in clmn_idx]) + ("|Source|" if docid_idx and docid_idx else "|")
line = "|" + "|".join(["------" for _ in range(len(clmn_idx))]) + \ line = "|" + "|".join(["------" for _ in range(len(clmn_idx))]) + \
("|------|" if docid_idx and docid_idx else "") ("|------|" if docid_idx and docid_idx else "")
rows = ["|" + rows = ["|" +
"|".join([rmSpace(str(r[i])) for i in clmn_idx]).replace("None", " ") + "|".join([rmSpace(str(r[i])) for i in clmn_idx]).replace("None", " ") +
"|" for r in tbl["rows"]] "|" for r in tbl["rows"]]
rows = "\n".join([r + f" ##{ii}$$ |" for ii, r in enumerate(rows)])
if quota:
rows = "\n".join([r + f" ##{ii}$$ |" for ii, r in enumerate(rows)])
else: rows = "\n".join([r + f" ##{ii}$$ |" for ii, r in enumerate(rows)])
rows = re.sub(r"T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+Z)?\|", "|", rows) rows = re.sub(r"T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+Z)?\|", "|", rows)
if not docid_idx or not docnm_idx: if not docid_idx or not docnm_idx:

+ 6
- 0
api/db/init_data.py View File

"max_tokens": 8191, "max_tokens": 8191,
"model_type": LLMType.CHAT.value "model_type": LLMType.CHAT.value
}, { }, {
"fid": factory_infos[0]["name"],
"llm_name": "gpt-4-turbo",
"tags": "LLM,CHAT,8K",
"max_tokens": 8191,
"model_type": LLMType.CHAT.value
},{
"fid": factory_infos[0]["name"], "fid": factory_infos[0]["name"],
"llm_name": "gpt-4-32k", "llm_name": "gpt-4-32k",
"tags": "LLM,CHAT,32K", "tags": "LLM,CHAT,32K",

Loading…
Cancel
Save