Browse Source

Fix: missing graph resolution and community extraction in graphrag tasks (#7586)

### What problem does this PR solve?

Info of whether applying graph resolution and community extraction is
storage in `task["kb_parser_config"]`. However, previous code get
`graphrag_conf` from `task["parser_config"]`, making `with_resolution`
and `with_community` are always false.

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
- [ ] New Feature (non-breaking change which adds functionality)
- [ ] Documentation Update
- [ ] Refactoring
- [ ] Performance Improvement
- [ ] Other (please describe):
tags/v0.19.0
alkscr 5 months ago
parent
commit
4ae8f87754
No account linked to committer's email address
2 changed files with 3 additions and 3 deletions
  1. 1
    1
      graphrag/general/index.py
  2. 2
    2
      rag/svr/task_executor.py

+ 1
- 1
graphrag/general/index.py View File

@@ -88,7 +88,7 @@ async def run_graphrag(
)
assert new_graph is not None

if not with_resolution or not with_community:
if not with_resolution and not with_community:
return

if with_resolution:

+ 2
- 2
rag/svr/task_executor.py View File

@@ -537,9 +537,9 @@ async def do_handle_task(task):
elif task.get("task_type", "") == "graphrag":
global task_limiter
task_limiter = trio.CapacityLimiter(2)
graphrag_conf = task_parser_config.get("graphrag", {})
if not graphrag_conf.get("use_graphrag", False):
if not task_parser_config.get("graphrag", {}).get("use_graphrag", False):
return
graphrag_conf = task["kb_parser_config"].get("graphrag", {})
start_ts = timer()
chat_model = LLMBundle(task_tenant_id, LLMType.CHAT, llm_name=task_llm_id, lang=task_language)
with_resolution = graphrag_conf.get("resolution", False)

Loading…
Cancel
Save