Commit 3202f12c authored by Joel's avatar Joel

feat: config prompt

parent 6448d71c
......@@ -32,8 +32,8 @@ const allMockData = {
[BlockEnum.End]: EndNodeMock,
}
const nodes = [
BlockEnum.Code/* 7 */, BlockEnum.Start/* 1 */, BlockEnum.QuestionClassifier/* 5 */, BlockEnum.DirectAnswer/* 2 */, BlockEnum.LLM/* 3 */, BlockEnum.KnowledgeRetrieval/* 4 */,
BlockEnum.IfElse/* 6 */, BlockEnum.TemplateTransform/* 8 */, BlockEnum.HttpRequest/* 9 */, BlockEnum.Tool/* 10 */,
BlockEnum.LLM/* 3 */, BlockEnum.Start/* 1 */, BlockEnum.QuestionClassifier/* 5 */, BlockEnum.DirectAnswer/* 2 */, BlockEnum.KnowledgeRetrieval/* 4 */,
BlockEnum.IfElse/* 6 */, BlockEnum.Code/* 7 */, BlockEnum.TemplateTransform/* 8 */, BlockEnum.HttpRequest/* 9 */, BlockEnum.Tool/* 10 */,
BlockEnum.VariableAssigner/* 11 */, BlockEnum.End/* 12 */,
].map((item, i) => {
const payload = allMockData[item]
......
......@@ -9,12 +9,16 @@ import PromptEditor from '@/app/components/base/prompt-editor'
import { Clipboard, ClipboardCheck } from '@/app/components/base/icons/src/vender/line/files'
import { Expand04 } from '@/app/components/base/icons/src/vender/solid/arrows'
import s from '@/app/components/app/configuration/config-prompt/style.module.css'
import { Trash03 } from '@/app/components/base/icons/src/vender/line/general'
type Props = {
title: string
title: string | JSX.Element
value: string
variables: string[]
onChange: (value: string) => void
readOnly?: boolean
showRemove?: boolean
onRemove?: () => void
}
const Editor: FC<Props> = ({
......@@ -22,6 +26,9 @@ const Editor: FC<Props> = ({
value,
variables,
onChange,
readOnly,
showRemove,
onRemove,
}) => {
const { t } = useTranslation()
......@@ -40,20 +47,27 @@ const Editor: FC<Props> = ({
return (
<div className={cn(s.gradientBorder, '!rounded-[9px] shadow-md')}>
<div className='rounded-lg bg-white'>
<div className='pt-1 pl-3 pr-1 flex justify-between h-6 items-center'>
<div className='pt-1 pl-3 pr-2 flex justify-between h-6 items-center'>
<div className='leading-4 text-xs font-semibold text-gray-700 uppercase'>{title}</div>
<div className='flex items-center'>
<div className='leading-[18px] text-xs font-medium text-gray-500'>{value.length}</div>
<div className='w-px h-3 ml-2 mr-3 bg-gray-200'></div>
<div className='w-px h-3 ml-2 mr-2 bg-gray-200'></div>
{/* Operations */}
<div className='flex items-center space-x-2'>
{showRemove && (
<Trash03 className='w-3.5 h-3.5 text-gray-500 cursor-pointer' onClick={onRemove} />
)}
{!isCopied
? (
<Clipboard className='mx-1 w-3.5 h-3.5 text-gray-500 cursor-pointer' onClick={handleCopy} />
<Clipboard className='w-3.5 h-3.5 text-gray-500 cursor-pointer' onClick={handleCopy} />
)
: (
<ClipboardCheck className='mx-1 w-3.5 h-3.5 text-gray-500' />
)
}
<Expand04 className='ml-2 mr-2 w-3.5 h-3.5 text-gray-500 cursor-pointer' onClick={toggleExpand} />
<Expand04 className='w-3.5 h-3.5 text-gray-500 cursor-pointer' onClick={toggleExpand} />
</div>
</div>
</div>
<PromptEditorHeightResizeWrap
......@@ -99,6 +113,7 @@ const Editor: FC<Props> = ({
}}
onChange={onChange}
onBlur={() => { }}
editable={!readOnly}
/>
</PromptEditorHeightResizeWrap>
</div>
......
'use client'
import type { FC } from 'react'
import React from 'react'
import React, { useCallback } from 'react'
import { useTranslation } from 'react-i18next'
import produce from 'immer'
import type { PromptItem } from '../../../types'
import { PromptRole } from '../../../types'
import Editor from '@/app/components/workflow/nodes/_base/components/prompt/editor'
import AddButton from '@/app/components/workflow/nodes/_base/components/add-button'
const i18nPrefix = 'workflow.nodes.llm'
type Props = {
prompt: string
readOnly: boolean
isChatModel: boolean
payload: PromptItem | PromptItem[]
variables: string[]
onChange: (payload: PromptItem | PromptItem[]) => void
}
const ConfigPrompt: FC<Props> = ({
prompt,
readOnly,
isChatModel,
payload,
variables,
onChange,
}) => {
const { t } = useTranslation()
const handleChatModePromptChange = useCallback((index: number) => {
return (prompt: string) => {
const newPrompt = produce(payload as PromptItem[], (draft) => {
draft[index].text = prompt
})
onChange(newPrompt)
}
}, [onChange, payload])
const handleAddPrompt = useCallback(() => {
const newPrompt = produce(payload as PromptItem[], (draft) => {
const isLastItemUser = draft[draft.length - 1].role === PromptRole.user
draft.push({ role: isLastItemUser ? PromptRole.system : PromptRole.user, text: '' })
})
onChange(newPrompt)
}, [onChange, payload])
const handleRemove = useCallback((index: number) => {
return () => {
const newPrompt = produce(payload as PromptItem[], (draft) => {
draft.splice(index, 1)
})
onChange(newPrompt)
}
}, [onChange, payload])
const handleCompletionPromptChange = useCallback((prompt: string) => {
const newPrompt = produce(payload as PromptItem, (draft) => {
draft.text = prompt
})
onChange(newPrompt)
}, [onChange, payload])
return (
<div>
{isChatModel
? (
<div>
<div className='space-y-2'>
{
(payload as PromptItem[]).map((item, index) => {
return (
<Editor
key={index}
title={item.role === PromptRole.user ? 'User' : 'System'}
value={item.text}
onChange={handleChatModePromptChange(index)}
variables={variables}
readOnly={readOnly}
showRemove={(payload as PromptItem[]).length > 1}
onRemove={handleRemove(index)}
/>
)
})
}
</div>
<AddButton
className='mt-2'
text={t(`${i18nPrefix}.addMessage`)}
onClick={handleAddPrompt}
/>
</div>
)
: (
<div>
<Editor
title={<span className='capitalize'>{t(`${i18nPrefix}.prompt`)}</span>}
value={(payload as PromptItem).text}
onChange={handleCompletionPromptChange}
variables={variables}
readOnly={readOnly}
/>
</div>
)}
</div>
)
}
......
import type { NodeDefault } from '../../types'
import { type NodeDefault, PromptRole } from '../../types'
import type { LLMNodeType } from './types'
const nodeDefault: NodeDefault<LLMNodeType> = {
......@@ -12,7 +12,10 @@ const nodeDefault: NodeDefault<LLMNodeType> = {
},
},
variables: [],
prompt: [],
prompt: [{
role: PromptRole.system,
text: '',
}],
context: {
enabled: false,
variable_selector: [],
......
import { BlockEnum } from '../../types'
import { BlockEnum, PromptRole } from '../../types'
import type { LLMNodeType } from './types'
import { Resolution } from '@/types/app'
......@@ -24,7 +24,12 @@ export const mockData: LLMNodeType = {
value_selector: ['bbb', 'b', 'c'],
},
],
prompt: [],
prompt: [
{
role: PromptRole.system,
text: '',
},
],
memory: {
role_prefix: {
user: 'user: ',
......
......@@ -6,6 +6,7 @@ import VarReferencePicker from '../_base/components/variable/var-reference-picke
import useConfig from './use-config'
import ResolutionPicker from './components/resolution-picker'
import type { LLMNodeType } from './types'
import ConfigPrompt from './components/config-prompt'
import VarList from '@/app/components/workflow/nodes/_base/components/variable/var-list'
import Field from '@/app/components/workflow/nodes/_base/components/field'
import AddButton from '@/app/components/base/button/add-button'
......@@ -26,21 +27,21 @@ const Panel: FC<NodePanelProps<LLMNodeType>> = ({
const {
inputs,
isChatModel,
isCompletionModel,
isShowVisionConfig,
handleModelChanged,
handleCompletionParamsChange,
handleVarListChange,
handleAddVariable,
handleContextVarChange,
handlePromptChange,
handleMemoryChange,
handleVisionResolutionChange,
} = useConfig(id, data)
const isChatApp = true // TODO: get from app context
const model = inputs.model
const modelMode = inputs.model?.mode
const isChatModel = modelMode === 'chat'
const isCompletionModel = !isChatModel
return (
<div className='mt-2'>
......@@ -90,16 +91,20 @@ const Panel: FC<NodePanelProps<LLMNodeType>> = ({
</Field>
{/* Prompt */}
<Field
title={t(`${i18nPrefix}.prompt`)}
>
Prompt
</Field>
{model.name && (
<ConfigPrompt
readOnly={readOnly}
isChatModel={isChatModel}
payload={inputs.prompt}
variables={inputs.variables.map(item => item.variable)}
onChange={handlePromptChange}
/>
)}
{/* Memory examples */}
{isChatApp && isChatModel && (
{/* Memory examples. Wait for design */}
{/* {isChatApp && isChatModel && (
<div className='text-xs text-gray-300'>Memory examples(Designing)</div>
)}
)} */}
{/* Memory */}
{isChatApp && (
<>
......
import { useCallback } from 'react'
import produce from 'immer'
import useVarList from '../_base/hooks/use-var-list'
import type { Memory, ValueSelector } from '../../types'
import { type Memory, PromptRole, type ValueSelector } from '../../types'
import type { LLMNodeType } from './types'
import type { Resolution } from '@/types/app'
import { Resolution } from '@/types/app'
import { useTextGenerationCurrentProviderAndModelAndModelList } from '@/app/components/header/account-setting/model-provider-page/hooks'
import { ModelFeatureEnum } from '@/app/components/header/account-setting/model-provider-page/declarations'
import useNodeCrud from '@/app/components/workflow/nodes/_base/hooks/use-node-crud'
import type { PromptItem } from '@/models/debug'
const useConfig = (id: string, payload: LLMNodeType) => {
const { inputs, setInputs } = useNodeCrud<LLMNodeType>(id, payload)
// model
const model = inputs.model
const modelMode = inputs.model?.mode
const isChatModel = modelMode === 'chat'
const isCompletionModel = !isChatModel
const handleModelChanged = useCallback((model: { provider: string; modelId: string; mode?: string }) => {
const newInputs = produce(inputs, (draft) => {
draft.model.provider = model.provider
draft.model.name = model.modelId
draft.model.mode = model.mode!
const isModeChange = model.mode !== inputs.model.mode
if (isModeChange)
draft.prompt = model.mode === 'chat' ? [{ role: PromptRole.system, text: '' }] : { text: '' }
})
setInputs(newInputs)
}, [inputs, setInputs])
......@@ -53,6 +61,13 @@ const useConfig = (id: string, payload: LLMNodeType) => {
setInputs(newInputs)
}, [inputs, setInputs])
const handlePromptChange = useCallback((newPrompt: PromptItem[] | PromptItem) => {
const newInputs = produce(inputs, (draft) => {
draft.prompt = newPrompt
})
setInputs(newInputs)
}, [inputs, setInputs])
const handleMemoryChange = useCallback((newMemory: Memory) => {
const newInputs = produce(inputs, (draft) => {
draft.memory = newMemory
......@@ -62,6 +77,11 @@ const useConfig = (id: string, payload: LLMNodeType) => {
const handleVisionResolutionChange = useCallback((newResolution: Resolution) => {
const newInputs = produce(inputs, (draft) => {
if (!draft.vision.configs) {
draft.vision.configs = {
detail: Resolution.high,
}
}
draft.vision.configs.detail = newResolution
})
setInputs(newInputs)
......@@ -69,12 +89,15 @@ const useConfig = (id: string, payload: LLMNodeType) => {
return {
inputs,
isChatModel,
isCompletionModel,
isShowVisionConfig,
handleModelChanged,
handleCompletionParamsChange,
handleVarListChange,
handleAddVariable,
handleContextVarChange,
handlePromptChange,
handleMemoryChange,
handleVisionResolutionChange,
}
......
......@@ -96,6 +96,7 @@ const translation = {
context: 'context',
contextTooltip: 'You can import Knowledge as context',
prompt: 'prompt',
addMessage: 'Add Message',
vision: 'vision',
resolution: {
name: 'Resolution',
......
......@@ -96,6 +96,7 @@ const translation = {
context: '上下文',
contextTooltip: '您可以导入知识库作为上下文',
prompt: '提示词',
addMessage: '添加消息',
vision: '视觉',
resolution: {
name: '分辨率',
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment