Commit 297b33aa authored by takatost's avatar takatost

lint

parent 0d858cc0
from typing import Optional from typing import Optional
from core.entities.application_entities import PromptTemplateEntity, ModelConfigEntity, \ from core.entities.application_entities import (
AdvancedCompletionPromptTemplateEntity AdvancedCompletionPromptTemplateEntity,
ModelConfigEntity,
PromptTemplateEntity,
)
from core.file.file_obj import FileObj from core.file.file_obj import FileObj
from core.memory.token_buffer_memory import TokenBufferMemory from core.memory.token_buffer_memory import TokenBufferMemory
from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageRole, UserPromptMessage, \ from core.model_runtime.entities.message_entities import (
SystemPromptMessage, AssistantPromptMessage, TextPromptMessageContent AssistantPromptMessage,
PromptMessage,
PromptMessageRole,
SystemPromptMessage,
TextPromptMessageContent,
UserPromptMessage,
)
from core.prompt.prompt_template import PromptTemplateParser from core.prompt.prompt_template import PromptTemplateParser
from core.prompt.prompt_transform import PromptTransform from core.prompt.prompt_transform import PromptTransform
from core.prompt.simple_prompt_transform import ModelMode from core.prompt.simple_prompt_transform import ModelMode
......
import enum import enum
import json import json
import os import os
from typing import Optional, Tuple from typing import Optional
from core.entities.application_entities import ( from core.entities.application_entities import (
ModelConfigEntity, ModelConfigEntity,
...@@ -85,7 +85,7 @@ class SimplePromptTransform(PromptTransform): ...@@ -85,7 +85,7 @@ class SimplePromptTransform(PromptTransform):
query: Optional[str] = None, query: Optional[str] = None,
context: Optional[str] = None, context: Optional[str] = None,
histories: Optional[str] = None, histories: Optional[str] = None,
) -> Tuple[str, dict]: ) -> tuple[str, dict]:
# get prompt template # get prompt template
prompt_template_config = self.get_prompt_template( prompt_template_config = self.get_prompt_template(
app_mode=app_mode, app_mode=app_mode,
...@@ -160,7 +160,7 @@ class SimplePromptTransform(PromptTransform): ...@@ -160,7 +160,7 @@ class SimplePromptTransform(PromptTransform):
files: list[FileObj], files: list[FileObj],
memory: Optional[TokenBufferMemory], memory: Optional[TokenBufferMemory],
model_config: ModelConfigEntity) \ model_config: ModelConfigEntity) \
-> Tuple[list[PromptMessage], Optional[list[str]]]: -> tuple[list[PromptMessage], Optional[list[str]]]:
prompt_messages = [] prompt_messages = []
# get prompt # get prompt
...@@ -193,7 +193,7 @@ class SimplePromptTransform(PromptTransform): ...@@ -193,7 +193,7 @@ class SimplePromptTransform(PromptTransform):
files: list[FileObj], files: list[FileObj],
memory: Optional[TokenBufferMemory], memory: Optional[TokenBufferMemory],
model_config: ModelConfigEntity) \ model_config: ModelConfigEntity) \
-> Tuple[list[PromptMessage], Optional[list[str]]]: -> tuple[list[PromptMessage], Optional[list[str]]]:
# get prompt # get prompt
prompt, prompt_rules = self.get_prompt_str_and_rules( prompt, prompt_rules = self.get_prompt_str_and_rules(
app_mode=AppMode.CHAT, app_mode=AppMode.CHAT,
......
...@@ -4,11 +4,12 @@ from typing import Optional ...@@ -4,11 +4,12 @@ from typing import Optional
from core.application_manager import ApplicationManager from core.application_manager import ApplicationManager
from core.entities.application_entities import ( from core.entities.application_entities import (
DatasetEntity, DatasetEntity,
DatasetRetrieveConfigEntity,
ExternalDataVariableEntity, ExternalDataVariableEntity,
FileUploadEntity, FileUploadEntity,
ModelConfigEntity, ModelConfigEntity,
PromptTemplateEntity, PromptTemplateEntity,
VariableEntity, DatasetRetrieveConfigEntity, VariableEntity,
) )
from core.model_runtime.entities.llm_entities import LLMMode from core.model_runtime.entities.llm_entities import LLMMode
from core.model_runtime.utils import helper from core.model_runtime.utils import helper
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment