Browse Source

fix(web): optimize prompt change logic for LLM nodes (#20841) (#20865)

tags/1.4.2
HyaCinth 4 months ago
parent
commit
fc6e2d14a5
No account linked to committer's email address

+ 2
- 2
web/app/components/workflow/nodes/llm/use-config.ts View File

}, [inputs, setInputs]) }, [inputs, setInputs])


const handlePromptChange = useCallback((newPrompt: PromptItem[] | PromptItem) => { const handlePromptChange = useCallback((newPrompt: PromptItem[] | PromptItem) => {
const newInputs = produce(inputRef.current, (draft) => {
const newInputs = produce(inputs, (draft) => {
draft.prompt_template = newPrompt draft.prompt_template = newPrompt
}) })
setInputs(newInputs) setInputs(newInputs)
}, [setInputs])
}, [inputs, setInputs])


const handleMemoryChange = useCallback((newMemory?: Memory) => { const handleMemoryChange = useCallback((newMemory?: Memory) => {
const newInputs = produce(inputs, (draft) => { const newInputs = produce(inputs, (draft) => {

+ 1
- 1
web/app/components/workflow/types.ts View File

hint?: string hint?: string
options?: string[] options?: string[]
value_selector?: ValueSelector value_selector?: ValueSelector
hide: boolean
hide?: boolean
} & Partial<UploadFileSetting> } & Partial<UploadFileSetting>


export type ModelConfig = { export type ModelConfig = {

Loading…
Cancel
Save