Commit 3823ae58 authored by Joel's avatar Joel

chore: prompt to promt template

parent 14d71fb5
...@@ -12,7 +12,7 @@ const nodeDefault: NodeDefault<LLMNodeType> = { ...@@ -12,7 +12,7 @@ const nodeDefault: NodeDefault<LLMNodeType> = {
}, },
}, },
variables: [], variables: [],
prompt: [{ prompt_template: [{
role: PromptRole.system, role: PromptRole.system,
text: '', text: '',
}], }],
......
...@@ -160,7 +160,7 @@ const Panel: FC<NodePanelProps<LLMNodeType>> = ({ ...@@ -160,7 +160,7 @@ const Panel: FC<NodePanelProps<LLMNodeType>> = ({
<ConfigPrompt <ConfigPrompt
readOnly={readOnly} readOnly={readOnly}
isChatModel={isChatModel} isChatModel={isChatModel}
payload={inputs.prompt} payload={inputs.prompt_template}
variables={inputs.variables.map(item => item.variable)} variables={inputs.variables.map(item => item.variable)}
onChange={handlePromptChange} onChange={handlePromptChange}
/> />
......
...@@ -4,7 +4,7 @@ import type { CommonNodeType, Memory, ModelConfig, PromptItem, ValueSelector, Va ...@@ -4,7 +4,7 @@ import type { CommonNodeType, Memory, ModelConfig, PromptItem, ValueSelector, Va
export type LLMNodeType = CommonNodeType & { export type LLMNodeType = CommonNodeType & {
model: ModelConfig model: ModelConfig
variables: Variable[] variables: Variable[]
prompt: PromptItem[] | PromptItem prompt_template: PromptItem[] | PromptItem
memory: Memory memory: Memory
context: { context: {
enabled: boolean enabled: boolean
......
...@@ -27,7 +27,7 @@ const useConfig = (id: string, payload: LLMNodeType) => { ...@@ -27,7 +27,7 @@ const useConfig = (id: string, payload: LLMNodeType) => {
draft.model.mode = model.mode! draft.model.mode = model.mode!
const isModeChange = model.mode !== inputs.model.mode const isModeChange = model.mode !== inputs.model.mode
if (isModeChange) if (isModeChange)
draft.prompt = model.mode === 'chat' ? [{ role: PromptRole.system, text: '' }] : { text: '' } draft.prompt_template = model.mode === 'chat' ? [{ role: PromptRole.system, text: '' }] : { text: '' }
}) })
setInputs(newInputs) setInputs(newInputs)
}, [inputs, setInputs]) }, [inputs, setInputs])
...@@ -65,7 +65,7 @@ const useConfig = (id: string, payload: LLMNodeType) => { ...@@ -65,7 +65,7 @@ const useConfig = (id: string, payload: LLMNodeType) => {
const handlePromptChange = useCallback((newPrompt: PromptItem[] | PromptItem) => { const handlePromptChange = useCallback((newPrompt: PromptItem[] | PromptItem) => {
const newInputs = produce(inputs, (draft) => { const newInputs = produce(inputs, (draft) => {
draft.prompt = newPrompt draft.prompt_template = newPrompt
}) })
setInputs(newInputs) setInputs(newInputs)
}, [inputs, setInputs]) }, [inputs, setInputs])
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment