Commit 3823ae58 authored by Joel's avatar Joel

chore: prompt to promt template

parent 14d71fb5
......@@ -12,7 +12,7 @@ const nodeDefault: NodeDefault<LLMNodeType> = {
},
},
variables: [],
prompt: [{
prompt_template: [{
role: PromptRole.system,
text: '',
}],
......
......@@ -160,7 +160,7 @@ const Panel: FC<NodePanelProps<LLMNodeType>> = ({
<ConfigPrompt
readOnly={readOnly}
isChatModel={isChatModel}
payload={inputs.prompt}
payload={inputs.prompt_template}
variables={inputs.variables.map(item => item.variable)}
onChange={handlePromptChange}
/>
......
......@@ -4,7 +4,7 @@ import type { CommonNodeType, Memory, ModelConfig, PromptItem, ValueSelector, Va
export type LLMNodeType = CommonNodeType & {
model: ModelConfig
variables: Variable[]
prompt: PromptItem[] | PromptItem
prompt_template: PromptItem[] | PromptItem
memory: Memory
context: {
enabled: boolean
......
......@@ -27,7 +27,7 @@ const useConfig = (id: string, payload: LLMNodeType) => {
draft.model.mode = model.mode!
const isModeChange = model.mode !== inputs.model.mode
if (isModeChange)
draft.prompt = model.mode === 'chat' ? [{ role: PromptRole.system, text: '' }] : { text: '' }
draft.prompt_template = model.mode === 'chat' ? [{ role: PromptRole.system, text: '' }] : { text: '' }
})
setInputs(newInputs)
}, [inputs, setInputs])
......@@ -65,7 +65,7 @@ const useConfig = (id: string, payload: LLMNodeType) => {
const handlePromptChange = useCallback((newPrompt: PromptItem[] | PromptItem) => {
const newInputs = produce(inputs, (draft) => {
draft.prompt = newPrompt
draft.prompt_template = newPrompt
})
setInputs(newInputs)
}, [inputs, setInputs])
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment