Commit 0518da1e authored by Joel's avatar Joel

feat: handle llm memory

parent 6f6f0322
import { MemoryRole } from '../../types'
import { BlockEnum } from '../../types'
import type { LLMNodeType } from './types'
import { Resolution } from '@/types/app'
export const mockData: LLMNodeType = {
title: 'Test',
desc: 'Test',
type: 'Test',
type: BlockEnum.LLM,
model: {
provider: 'openai',
name: 'gpt-4',
......@@ -26,7 +26,10 @@ export const mockData: LLMNodeType = {
],
prompt: [],
memory: {
role_prefix: MemoryRole.assistant,
role_prefix: {
user: 'user: ',
assistant: 'assistant: ',
},
window: {
enabled: false,
size: 0,
......
......@@ -26,9 +26,11 @@ const Panel: FC = () => {
handleContextVarChange,
handleMemoryChange,
} = useConfig(mockData)
const isChatApp = true // TODO: get from app context
const model = inputs.model
const modelMode = inputs.model?.mode
const isChatMode = modelMode === 'chat'
const isChatModel = modelMode === 'chat'
const isCompletionModel = !isChatModel
return (
<div className='mt-2'>
......@@ -84,14 +86,18 @@ const Panel: FC = () => {
Prompt
</Field>
{/* */}
{isChatApp && isChatApp && (
<div className='text-xs text-gray-300'>Memory examples(Designing)</div>
)}
{/* Memory */}
{isChatMode && (
{isChatApp && (
<>
<MemoryConfig
readonly={readOnly}
payload={inputs.memory}
onChange={handleMemoryChange}
canSetRoleName
canSetRoleName={isCompletionModel}
/>
<Split />
</>
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment