Browse Source

feat: LLM prompt Jinja2 template now support more variables (#24944)

tags/1.8.1
17hz 2 months ago
parent
commit
044f96bd93
No account linked to committer's email address

+ 1
- 1
web/app/components/workflow/nodes/llm/panel.tsx View File

<ConfigPrompt <ConfigPrompt
readOnly={readOnly} readOnly={readOnly}
nodeId={id} nodeId={id}
filterVar={filterInputVar}
filterVar={isShowVars ? filterJinja2InputVar : filterInputVar}
isChatModel={isChatModel} isChatModel={isChatModel}
isChatApp={isChatMode} isChatApp={isChatMode}
isShowContext isShowContext

+ 1
- 1
web/app/components/workflow/nodes/llm/use-config.ts View File

}, []) }, [])


const filterJinja2InputVar = useCallback((varPayload: Var) => { const filterJinja2InputVar = useCallback((varPayload: Var) => {
return [VarType.number, VarType.string, VarType.secret, VarType.arrayString, VarType.arrayNumber].includes(varPayload.type)
return [VarType.number, VarType.string, VarType.secret, VarType.arrayString, VarType.arrayNumber, VarType.arrayBoolean, VarType.arrayObject, VarType.object, VarType.array, VarType.boolean].includes(varPayload.type)
}, []) }, [])


const filterMemoryPromptVar = useCallback((varPayload: Var) => { const filterMemoryPromptVar = useCallback((varPayload: Var) => {

Loading…
Cancel
Save