Commit ab6a01b4 authored by Joel's avatar Joel

chore: handle llm model type

parent dce01cf0
......@@ -7,7 +7,7 @@ import TooltipPlus from '@/app/components/base/tooltip-plus'
type Props = {
title: string
tooltip?: string
children: JSX.Element | string
children?: JSX.Element | string | null
operations?: JSX.Element
inline?: boolean
}
......@@ -33,7 +33,7 @@ const Filed: FC<Props> = ({
</div>
{operations && <div>{operations}</div>}
</div>
<div>{children}</div>
{children && <div className={cn(!inline && 'mt-1')}>{children}</div>}
</div>
)
}
......
......@@ -9,7 +9,7 @@ export const mockLLMNodeData: LLMNodeData = {
model: {
provider: 'openai',
name: 'gpt-4',
mode: 'completion',
mode: 'chat',
completion_params: {
temperature: 0.7,
},
......
......@@ -7,15 +7,21 @@ import Field from '@/app/components/workflow/nodes/_base/components/field'
import AddButton from '@/app/components/base/button/add-button'
import Split from '@/app/components/workflow/nodes/_base/components/split'
import ModelSelector from '@/app/components/header/account-setting/model-provider-page/model-selector'
import { useTextGenerationCurrentProviderAndModelAndModelList } from '@/app/components/header/account-setting/model-provider-page/hooks'
import Switch from '@/app/components/base/switch'
const i18nPrefix = 'workflow.nodes.llm'
const Panel: FC = () => {
const { t } = useTranslation()
const { inputs, handleModelChanged } = useInput(mockLLMNodeData)
const {
textGenerationModelList,
} = useTextGenerationCurrentProviderAndModelAndModelList()
inputs,
handleModelChanged,
toggleContextEnabled,
} = useInput(mockLLMNodeData)
const modelMode = inputs.model.mode
const isChatMode = modelMode === 'chat'
const handleAddVariable = () => {
console.log('add variable')
}
......@@ -49,11 +55,22 @@ const Panel: FC = () => {
<Field
title={t(`${i18nPrefix}.context`)}
operations={
<Switch
defaultValue={inputs.context.enabled}
onChange={toggleContextEnabled}
size='md'
/>
}
>
Context
{inputs.context.enabled
? (
<div>Context</div>
)
: null}
</Field>
<Field
title={t(`${i18nPrefix}.context`)}
title={t(`${i18nPrefix}.prompt`)}
>
Prompt
</Field>
......
import { useCallback, useState } from 'react'
import produce from 'immer'
import type { LLMNodeData } from '../../types'
import { useTextGenerationCurrentProviderAndModelAndModelList } from '@/app/components/header/account-setting/model-provider-page/hooks'
const useInput = (initInputs: LLMNodeData) => {
const {
textGenerationModelList,
} = useTextGenerationCurrentProviderAndModelAndModelList()
const [inputs, setInputs] = useState<LLMNodeData>(initInputs)
const handleModelChanged = useCallback((model: { provider: string; model: string }) => {
const targetProvider = textGenerationModelList.find(modelItem => modelItem.provider === model.provider)
const targetModelItem = targetProvider?.models.find(modelItem => modelItem.model === model.model)
const newInputs = produce(inputs, (draft) => {
draft.model.provider = model.provider
draft.model.name = model.model
draft.model.mode = targetModelItem?.model_properties.mode as string
})
setInputs(newInputs)
}, [inputs.model])
}, [inputs.model, textGenerationModelList])
const toggleContextEnabled = useCallback(() => {
const newInputs = produce(inputs, (draft) => {
draft.context.enabled = !draft.context.enabled
})
setInputs(newInputs)
}, [inputs.context.enabled])
return {
textGenerationModelList,
inputs,
setInputs: (key: string, payload: any) => {
setInputs({
...inputs,
[key]: payload,
} as LLMNodeData)
},
handleModelChanged,
toggleContextEnabled,
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment