Commit 16c24dcc authored by Joel's avatar Joel

Merge branch 'feat/support-claude' into deploy/dev

parents 9ea413ba 6409b2e6
...@@ -9,7 +9,7 @@ import ParamItem from './param-item' ...@@ -9,7 +9,7 @@ import ParamItem from './param-item'
import Radio from '@/app/components/base/radio' import Radio from '@/app/components/base/radio'
import Panel from '@/app/components/base/panel' import Panel from '@/app/components/base/panel'
import type { CompletionParams } from '@/models/debug' import type { CompletionParams } from '@/models/debug'
import { AppType } from '@/types/app' import { AppType, ProviderType } from '@/types/app'
import { TONE_LIST } from '@/config' import { TONE_LIST } from '@/config'
import Toast from '@/app/components/base/toast' import Toast from '@/app/components/base/toast'
import { AlertTriangle } from '@/app/components/base/icons/src/vender/solid/alertsAndFeedback' import { AlertTriangle } from '@/app/components/base/icons/src/vender/solid/alertsAndFeedback'
...@@ -17,7 +17,7 @@ import { AlertTriangle } from '@/app/components/base/icons/src/vender/solid/aler ...@@ -17,7 +17,7 @@ import { AlertTriangle } from '@/app/components/base/icons/src/vender/solid/aler
export type IConifgModelProps = { export type IConifgModelProps = {
mode: string mode: string
modelId: string modelId: string
setModelId: (id: string) => void setModelId: (id: string, provider: ProviderType) => void
completionParams: CompletionParams completionParams: CompletionParams
onCompletionParamsChange: (newParams: CompletionParams) => void onCompletionParamsChange: (newParams: CompletionParams) => void
disabled: boolean disabled: boolean
...@@ -29,12 +29,26 @@ const options = [ ...@@ -29,12 +29,26 @@ const options = [
{ id: 'gpt-3.5-turbo', name: 'gpt-3.5-turbo', type: AppType.chat }, { id: 'gpt-3.5-turbo', name: 'gpt-3.5-turbo', type: AppType.chat },
{ id: 'gpt-3.5-turbo-16k', name: 'gpt-3.5-turbo-16k', type: AppType.chat }, { id: 'gpt-3.5-turbo-16k', name: 'gpt-3.5-turbo-16k', type: AppType.chat },
{ id: 'gpt-4', name: 'gpt-4', type: AppType.chat }, // 8k version { id: 'gpt-4', name: 'gpt-4', type: AppType.chat }, // 8k version
{ id: 'claude-instant-1', name: 'claude-instant-1', type: AppType.chat, provider: ProviderType.anthropic }, // set 30k
{ id: 'claude-2', name: 'claude-2', type: AppType.chat, provider: ProviderType.anthropic }, // set 30k
{ id: 'gpt-3.5-turbo', name: 'gpt-3.5-turbo', type: AppType.completion }, { id: 'gpt-3.5-turbo', name: 'gpt-3.5-turbo', type: AppType.completion },
{ id: 'gpt-3.5-turbo-16k', name: 'gpt-3.5-turbo-16k', type: AppType.completion }, { id: 'gpt-3.5-turbo-16k', name: 'gpt-3.5-turbo-16k', type: AppType.completion },
{ id: 'text-davinci-003', name: 'text-davinci-003', type: AppType.completion }, { id: 'text-davinci-003', name: 'text-davinci-003', type: AppType.completion },
{ id: 'gpt-4', name: 'gpt-4', type: AppType.completion }, // 8k version { id: 'gpt-4', name: 'gpt-4', type: AppType.completion }, // 8k version
{ id: 'claude-instant-1', name: 'claude-instant-1', type: AppType.completion, provider: ProviderType.anthropic }, // set 30k
{ id: 'claude-2', name: 'claude-2', type: AppType.completion, provider: ProviderType.anthropic }, // set 30k
] ]
const getMaxToken = (modelId: string) => {
if (['claude-instant-1', 'claude-2'].includes(modelId))
return 30 * 1000
if (['gpt-4', 'gpt-3.5-turbo-16k'].includes(modelId))
return 8000
return 4000
}
const ModelIcon = ({ className }: { className?: string }) => ( const ModelIcon = ({ className }: { className?: string }) => (
<svg className={`w-4 h-4 ${className}`} width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg"> <svg className={`w-4 h-4 ${className}`} width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg">
<rect width="20" height="20" rx="6" fill="black" /> <rect width="20" height="20" rx="6" fill="black" />
...@@ -99,7 +113,7 @@ const ConifgModel: FC<IConifgModelProps> = ({ ...@@ -99,7 +113,7 @@ const ConifgModel: FC<IConifgModelProps> = ({
key: 'max_tokens', key: 'max_tokens',
tip: t('common.model.params.maxTokenTip'), tip: t('common.model.params.maxTokenTip'),
step: 100, step: 100,
max: (modelId === 'gpt-4' || modelId === 'gpt-3.5-turbo-16k') ? 8000 : 4000, max: getMaxToken(modelId),
}, },
] ]
...@@ -112,7 +126,7 @@ const ConifgModel: FC<IConifgModelProps> = ({ ...@@ -112,7 +126,7 @@ const ConifgModel: FC<IConifgModelProps> = ({
hideOption() hideOption()
}, triggerRef) }, triggerRef)
const handleSelectModel = (id: string) => { const handleSelectModel = (id: string, provider = ProviderType.openai) => {
return () => { return () => {
if (id === 'gpt-4' && !canUseGPT4) { if (id === 'gpt-4' && !canUseGPT4) {
hideConfig() hideConfig()
...@@ -130,7 +144,7 @@ const ConifgModel: FC<IConifgModelProps> = ({ ...@@ -130,7 +144,7 @@ const ConifgModel: FC<IConifgModelProps> = ({
max_tokens: 4000, max_tokens: 4000,
}) })
} }
setModelId(id) setModelId(id, provider)
} }
} }
...@@ -227,7 +241,7 @@ const ConifgModel: FC<IConifgModelProps> = ({ ...@@ -227,7 +241,7 @@ const ConifgModel: FC<IConifgModelProps> = ({
{isShowOption && ( {isShowOption && (
<div className={cn(isChatApp ? 'min-w-[159px]' : 'w-[179px]', 'absolute right-0 bg-gray-50 rounded-lg shadow')}> <div className={cn(isChatApp ? 'min-w-[159px]' : 'w-[179px]', 'absolute right-0 bg-gray-50 rounded-lg shadow')}>
{availableModels.map(item => ( {availableModels.map(item => (
<div key={item.id} onClick={handleSelectModel(item.id)} className="flex items-center h-9 px-3 rounded-lg cursor-pointer hover:bg-gray-100"> <div key={item.id} onClick={handleSelectModel(item.id, item.provider)} className="flex items-center h-9 px-3 rounded-lg cursor-pointer hover:bg-gray-100">
<ModelIcon className='shrink-0 mr-2' /> <ModelIcon className='shrink-0 mr-2' />
<div className="text-sm gray-900 whitespace-nowrap">{item.name}</div> <div className="text-sm gray-900 whitespace-nowrap">{item.name}</div>
</div> </div>
......
...@@ -16,6 +16,7 @@ import ConfigModel from '@/app/components/app/configuration/config-model' ...@@ -16,6 +16,7 @@ import ConfigModel from '@/app/components/app/configuration/config-model'
import Config from '@/app/components/app/configuration/config' import Config from '@/app/components/app/configuration/config'
import Debug from '@/app/components/app/configuration/debug' import Debug from '@/app/components/app/configuration/debug'
import Confirm from '@/app/components/base/confirm' import Confirm from '@/app/components/base/confirm'
import { ProviderType } from '@/types/app'
import type { AppDetailResponse } from '@/models/app' import type { AppDetailResponse } from '@/models/app'
import { ToastContext } from '@/app/components/base/toast' import { ToastContext } from '@/app/components/base/toast'
import { fetchTenantInfo } from '@/service/common' import { fetchTenantInfo } from '@/service/common'
...@@ -67,7 +68,7 @@ const Configuration: FC = () => { ...@@ -67,7 +68,7 @@ const Configuration: FC = () => {
frequency_penalty: 1, // -2-2 frequency_penalty: 1, // -2-2
}) })
const [modelConfig, doSetModelConfig] = useState<ModelConfig>({ const [modelConfig, doSetModelConfig] = useState<ModelConfig>({
provider: 'openai', provider: ProviderType.openai,
model_id: 'gpt-3.5-turbo', model_id: 'gpt-3.5-turbo',
configs: { configs: {
prompt_template: '', prompt_template: '',
...@@ -84,8 +85,9 @@ const Configuration: FC = () => { ...@@ -84,8 +85,9 @@ const Configuration: FC = () => {
doSetModelConfig(newModelConfig) doSetModelConfig(newModelConfig)
} }
const setModelId = (modelId: string) => { const setModelId = (modelId: string, provider: ProviderType) => {
const newModelConfig = produce(modelConfig, (draft: any) => { const newModelConfig = produce(modelConfig, (draft: any) => {
draft.provider = provider
draft.model_id = modelId draft.model_id = modelId
}) })
setModelConfig(newModelConfig) setModelConfig(newModelConfig)
......
export enum ProviderType {
openai = 'openai',
anthropic = 'anthropic',
}
export enum AppType { export enum AppType {
'chat' = 'chat', 'chat' = 'chat',
'completion' = 'completion', 'completion' = 'completion',
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment