Sfoglia il codice sorgente

feat: LLM prompt Jinja2 template now support more variables (#24944)

tags/1.8.1
17hz 2 mesi fa
parent
commit
044f96bd93
Nessun account collegato all'indirizzo email del committer

+ 1
- 1
web/app/components/workflow/nodes/llm/panel.tsx Vedi File

@@ -140,7 +140,7 @@ const Panel: FC<NodePanelProps<LLMNodeType>> = ({
<ConfigPrompt
readOnly={readOnly}
nodeId={id}
filterVar={filterInputVar}
filterVar={isShowVars ? filterJinja2InputVar : filterInputVar}
isChatModel={isChatModel}
isChatApp={isChatMode}
isShowContext

+ 1
- 1
web/app/components/workflow/nodes/llm/use-config.ts Vedi File

@@ -308,7 +308,7 @@ const useConfig = (id: string, payload: LLMNodeType) => {
}, [])

const filterJinja2InputVar = useCallback((varPayload: Var) => {
return [VarType.number, VarType.string, VarType.secret, VarType.arrayString, VarType.arrayNumber].includes(varPayload.type)
return [VarType.number, VarType.string, VarType.secret, VarType.arrayString, VarType.arrayNumber, VarType.arrayBoolean, VarType.arrayObject, VarType.object, VarType.array, VarType.boolean].includes(varPayload.type)
}, [])

const filterMemoryPromptVar = useCallback((varPayload: Var) => {

Loading…
Annulla
Salva