Unverified Commit 07fe10d1 authored by Yeuoly's avatar Yeuoly

feat: upload image

parent e8210ef7
...@@ -3,7 +3,7 @@ from typing import Generator, List, Optional, Tuple, Union, cast ...@@ -3,7 +3,7 @@ from typing import Generator, List, Optional, Tuple, Union, cast
from core.application_queue_manager import ApplicationQueueManager, PublishFrom from core.application_queue_manager import ApplicationQueueManager, PublishFrom
from core.entities.application_entities import AppOrchestrationConfigEntity, ModelConfigEntity, \ from core.entities.application_entities import AppOrchestrationConfigEntity, ModelConfigEntity, \
PromptTemplateEntity, ExternalDataVariableEntity, ApplicationGenerateEntity, InvokeFrom PromptTemplateEntity, ExternalDataVariableEntity, ApplicationGenerateEntity, InvokeFrom, AgentEntity
from core.file.file_obj import FileObj from core.file.file_obj import FileObj
from core.memory.token_buffer_memory import TokenBufferMemory from core.memory.token_buffer_memory import TokenBufferMemory
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage
...@@ -57,6 +57,7 @@ class AppRunner: ...@@ -57,6 +57,7 @@ class AppRunner:
prompt_messages, stop = self.organize_prompt_messages( prompt_messages, stop = self.organize_prompt_messages(
app_record=app_record, app_record=app_record,
model_config=model_config, model_config=model_config,
agent_config=None,
prompt_template_entity=prompt_template_entity, prompt_template_entity=prompt_template_entity,
inputs=inputs, inputs=inputs,
files=files, files=files,
...@@ -116,6 +117,7 @@ class AppRunner: ...@@ -116,6 +117,7 @@ class AppRunner:
prompt_template_entity: PromptTemplateEntity, prompt_template_entity: PromptTemplateEntity,
inputs: dict[str, str], inputs: dict[str, str],
files: list[FileObj], files: list[FileObj],
agent_config: Optional[AgentEntity] = None,
query: Optional[str] = None, query: Optional[str] = None,
context: Optional[str] = None, context: Optional[str] = None,
memory: Optional[TokenBufferMemory] = None) \ memory: Optional[TokenBufferMemory] = None) \
...@@ -144,7 +146,8 @@ class AppRunner: ...@@ -144,7 +146,8 @@ class AppRunner:
files=files, files=files,
context=context, context=context,
memory=memory, memory=memory,
model_config=model_config model_config=model_config,
agent_config=agent_config
) )
else: else:
prompt_messages = prompt_transform.get_advanced_prompt( prompt_messages = prompt_transform.get_advanced_prompt(
......
...@@ -80,6 +80,7 @@ class AssistantApplicationRunner(AppRunner): ...@@ -80,6 +80,7 @@ class AssistantApplicationRunner(AppRunner):
prompt_messages, _ = self.organize_prompt_messages( prompt_messages, _ = self.organize_prompt_messages(
app_record=app_record, app_record=app_record,
model_config=app_orchestration_config.model_config, model_config=app_orchestration_config.model_config,
agent_config=app_orchestration_config.agent,
prompt_template_entity=app_orchestration_config.prompt_template, prompt_template_entity=app_orchestration_config.prompt_template,
inputs=inputs, inputs=inputs,
files=files, files=files,
...@@ -148,6 +149,7 @@ class AssistantApplicationRunner(AppRunner): ...@@ -148,6 +149,7 @@ class AssistantApplicationRunner(AppRunner):
prompt_messages, _ = self.organize_prompt_messages( prompt_messages, _ = self.organize_prompt_messages(
app_record=app_record, app_record=app_record,
model_config=app_orchestration_config.model_config, model_config=app_orchestration_config.model_config,
agent_config=app_orchestration_config.agent,
prompt_template_entity=app_orchestration_config.prompt_template, prompt_template_entity=app_orchestration_config.prompt_template,
inputs=inputs, inputs=inputs,
files=files, files=files,
...@@ -188,6 +190,7 @@ class AssistantApplicationRunner(AppRunner): ...@@ -188,6 +190,7 @@ class AssistantApplicationRunner(AppRunner):
prompt_message, _ = self.organize_prompt_messages( prompt_message, _ = self.organize_prompt_messages(
app_record=app_record, app_record=app_record,
model_config=app_orchestration_config.model_config, model_config=app_orchestration_config.model_config,
agent_config=app_orchestration_config.agent,
prompt_template_entity=app_orchestration_config.prompt_template, prompt_template_entity=app_orchestration_config.prompt_template,
inputs=inputs, inputs=inputs,
files=files, files=files,
......
...@@ -37,7 +37,11 @@ class TokenBufferMemory: ...@@ -37,7 +37,11 @@ class TokenBufferMemory:
prompt_messages = [] prompt_messages = []
for message in messages: for message in messages:
files = message.message_files files = message.message_files
config = message.app_model_config
if files: if files:
if config.agent_mode_dict.get('enabled', False) and config.agent_mode_dict.get('strategy') in ['function_call', 'react', 'cot']:
prompt_messages.append(UserPromptMessage(content=message.query))
else:
file_objs = message_file_parser.transform_message_files( file_objs = message_file_parser.transform_message_files(
files, message.app_model_config files, message.app_model_config
) )
......
...@@ -5,7 +5,7 @@ import re ...@@ -5,7 +5,7 @@ import re
from typing import List, Optional, Tuple, cast from typing import List, Optional, Tuple, cast
from core.entities.application_entities import (AdvancedCompletionPromptTemplateEntity, ModelConfigEntity, from core.entities.application_entities import (AdvancedCompletionPromptTemplateEntity, ModelConfigEntity,
PromptTemplateEntity) PromptTemplateEntity, AgentEntity)
from core.file.file_obj import FileObj from core.file.file_obj import FileObj
from core.memory.token_buffer_memory import TokenBufferMemory from core.memory.token_buffer_memory import TokenBufferMemory
from core.model_runtime.entities.message_entities import (AssistantPromptMessage, PromptMessage, PromptMessageRole, from core.model_runtime.entities.message_entities import (AssistantPromptMessage, PromptMessage, PromptMessageRole,
...@@ -62,7 +62,8 @@ class PromptTransform: ...@@ -62,7 +62,8 @@ class PromptTransform:
files: List[FileObj], files: List[FileObj],
context: Optional[str], context: Optional[str],
memory: Optional[TokenBufferMemory], memory: Optional[TokenBufferMemory],
model_config: ModelConfigEntity) -> \ model_config: ModelConfigEntity,
agent_config: AgentEntity) -> \
Tuple[List[PromptMessage], Optional[List[str]]]: Tuple[List[PromptMessage], Optional[List[str]]]:
app_mode = AppMode.value_of(app_mode) app_mode = AppMode.value_of(app_mode)
model_mode = ModelMode.value_of(model_config.mode) model_mode = ModelMode.value_of(model_config.mode)
...@@ -84,7 +85,8 @@ class PromptTransform: ...@@ -84,7 +85,8 @@ class PromptTransform:
files=files, files=files,
context=context, context=context,
memory=memory, memory=memory,
model_config=model_config model_config=model_config,
agent_config=agent_config
) )
else: else:
stops = prompt_rules.get('stops') stops = prompt_rules.get('stops')
...@@ -219,7 +221,8 @@ class PromptTransform: ...@@ -219,7 +221,8 @@ class PromptTransform:
context: Optional[str], context: Optional[str],
files: List[FileObj], files: List[FileObj],
memory: Optional[TokenBufferMemory], memory: Optional[TokenBufferMemory],
model_config: ModelConfigEntity) -> List[PromptMessage]: model_config: ModelConfigEntity,
agent_config: Optional[AgentEntity] = None) -> List[PromptMessage]:
prompt_messages = [] prompt_messages = []
context_prompt_content = '' context_prompt_content = ''
...@@ -256,10 +259,12 @@ class PromptTransform: ...@@ -256,10 +259,12 @@ class PromptTransform:
) )
if files: if files:
if agent_config and agent_config.strategy in AgentEntity.Strategy:
prompt_messages.append(UserPromptMessage(content=query))
else:
prompt_message_contents = [TextPromptMessageContent(data=query)] prompt_message_contents = [TextPromptMessageContent(data=query)]
for file in files: for file in files:
prompt_message_contents.append(file.prompt_message_content) prompt_message_contents.append(file.prompt_message_content)
prompt_messages.append(UserPromptMessage(content=prompt_message_contents)) prompt_messages.append(UserPromptMessage(content=prompt_message_contents))
else: else:
prompt_messages.append(UserPromptMessage(content=query)) prompt_messages.append(UserPromptMessage(content=query))
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment