Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
D
dify
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
ai-tech
dify
Commits
695841a3
Unverified
Commit
695841a3
authored
Oct 13, 2023
by
Garfield Dai
Committed by
GitHub
Oct 13, 2023
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Feat/advanced prompt enhancement (#1340)
parent
3efaa713
Changes
4
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
35 additions
and
22 deletions
+35
-22
advanced_prompt_template.py
api/controllers/console/app/advanced_prompt_template.py
+1
-2
advanced_prompt_templates.py
api/core/prompt/advanced_prompt_templates.py
+8
-4
advanced_prompt_template_service.py
api/services/advanced_prompt_template_service.py
+23
-16
app_model_config_service.py
api/services/app_model_config_service.py
+3
-0
No files found.
api/controllers/console/app/advanced_prompt_template.py
View file @
695841a3
...
...
@@ -20,7 +20,6 @@ class AdvancedPromptTemplateList(Resource):
parser
.
add_argument
(
'model_name'
,
type
=
str
,
required
=
True
,
location
=
'args'
)
args
=
parser
.
parse_args
()
service
=
AdvancedPromptTemplateService
()
return
service
.
get_prompt
(
args
)
return
AdvancedPromptTemplateService
.
get_prompt
(
args
)
api
.
add_resource
(
AdvancedPromptTemplateList
,
'/app/prompt-templates'
)
\ No newline at end of file
api/core/prompt/advanced_prompt_templates.py
View file @
695841a3
...
...
@@ -11,7 +11,8 @@ CHAT_APP_COMPLETION_PROMPT_CONFIG = {
"user_prefix"
:
"Human"
,
"assistant_prefix"
:
"Assistant"
}
}
},
"stop"
:
[
"Human:"
]
}
CHAT_APP_CHAT_PROMPT_CONFIG
=
{
...
...
@@ -37,7 +38,8 @@ COMPLETION_APP_COMPLETION_PROMPT_CONFIG = {
"prompt"
:
{
"text"
:
"{{#pre_prompt#}}"
}
}
},
"stop"
:
[
"Human:"
]
}
BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG
=
{
...
...
@@ -49,7 +51,8 @@ BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG = {
"user_prefix"
:
"用户"
,
"assistant_prefix"
:
"助手"
}
}
},
"stop"
:
[
"用户:"
]
}
BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG
=
{
...
...
@@ -75,5 +78,6 @@ BAICHUAN_COMPLETION_APP_COMPLETION_PROMPT_CONFIG = {
"prompt"
:
{
"text"
:
"{{#pre_prompt#}}"
}
}
},
"stop"
:
[
"用户:"
]
}
api/services/advanced_prompt_template_service.py
View file @
695841a3
...
...
@@ -6,51 +6,58 @@ from core.prompt.advanced_prompt_templates import CHAT_APP_COMPLETION_PROMPT_CON
class
AdvancedPromptTemplateService
:
def
get_prompt
(
self
,
args
:
dict
)
->
dict
:
@
classmethod
def
get_prompt
(
cls
,
args
:
dict
)
->
dict
:
app_mode
=
args
[
'app_mode'
]
model_mode
=
args
[
'model_mode'
]
model_name
=
args
[
'model_name'
]
has_context
=
args
[
'has_context'
]
if
'baichuan'
in
model_name
:
return
self
.
get_baichuan_prompt
(
app_mode
,
model_mode
,
has_context
)
return
cls
.
get_baichuan_prompt
(
app_mode
,
model_mode
,
has_context
)
else
:
return
self
.
get_common_prompt
(
app_mode
,
model_mode
,
has_context
)
return
cls
.
get_common_prompt
(
app_mode
,
model_mode
,
has_context
)
@
classmethod
def
get_common_prompt
(
cls
,
app_mode
:
str
,
model_mode
:
str
,
has_context
:
str
)
->
dict
:
context_prompt
=
copy
.
deepcopy
(
CONTEXT
)
def
get_common_prompt
(
self
,
app_mode
:
str
,
model_mode
:
str
,
has_context
:
bool
)
->
dict
:
if
app_mode
==
'chat'
:
if
model_mode
==
'completion'
:
return
self
.
get_completion_prompt
(
copy
.
deepcopy
(
CHAT_APP_COMPLETION_PROMPT_CONFIG
),
has_context
,
CONTEXT
)
return
cls
.
get_completion_prompt
(
copy
.
deepcopy
(
CHAT_APP_COMPLETION_PROMPT_CONFIG
),
has_context
,
context_prompt
)
elif
model_mode
==
'chat'
:
return
self
.
get_chat_prompt
(
copy
.
deepcopy
(
CHAT_APP_CHAT_PROMPT_CONFIG
),
has_context
,
CONTEXT
)
return
cls
.
get_chat_prompt
(
copy
.
deepcopy
(
CHAT_APP_CHAT_PROMPT_CONFIG
),
has_context
,
context_prompt
)
elif
app_mode
==
'completion'
:
if
model_mode
==
'completion'
:
return
self
.
get_completion_prompt
(
copy
.
deepcopy
(
COMPLETION_APP_COMPLETION_PROMPT_CONFIG
),
has_context
,
CONTEXT
)
return
cls
.
get_completion_prompt
(
copy
.
deepcopy
(
COMPLETION_APP_COMPLETION_PROMPT_CONFIG
),
has_context
,
context_prompt
)
elif
model_mode
==
'chat'
:
return
self
.
get_chat_prompt
(
copy
.
deepcopy
(
COMPLETION_APP_CHAT_PROMPT_CONFIG
),
has_context
,
CONTEXT
)
return
cls
.
get_chat_prompt
(
copy
.
deepcopy
(
COMPLETION_APP_CHAT_PROMPT_CONFIG
),
has_context
,
context_prompt
)
def
get_completion_prompt
(
self
,
prompt_template
:
str
,
has_context
:
bool
,
context
:
str
)
->
dict
:
@
classmethod
def
get_completion_prompt
(
cls
,
prompt_template
:
dict
,
has_context
:
str
,
context
:
str
)
->
dict
:
if
has_context
==
'true'
:
prompt_template
[
'completion_prompt_config'
][
'prompt'
][
'text'
]
=
context
+
prompt_template
[
'completion_prompt_config'
][
'prompt'
][
'text'
]
return
prompt_template
def
get_chat_prompt
(
self
,
prompt_template
:
str
,
has_context
:
bool
,
context
:
str
)
->
dict
:
@
classmethod
def
get_chat_prompt
(
cls
,
prompt_template
:
dict
,
has_context
:
str
,
context
:
str
)
->
dict
:
if
has_context
==
'true'
:
prompt_template
[
'chat_prompt_config'
][
'prompt'
][
0
][
'text'
]
=
context
+
prompt_template
[
'chat_prompt_config'
][
'prompt'
][
0
][
'text'
]
return
prompt_template
@
classmethod
def
get_baichuan_prompt
(
cls
,
app_mode
:
str
,
model_mode
:
str
,
has_context
:
str
)
->
dict
:
baichuan_context_prompt
=
copy
.
deepcopy
(
BAICHUAN_CONTEXT
)
def
get_baichuan_prompt
(
self
,
app_mode
:
str
,
model_mode
:
str
,
has_context
:
bool
)
->
dict
:
if
app_mode
==
'chat'
:
if
model_mode
==
'completion'
:
return
self
.
get_completion_prompt
(
copy
.
deepcopy
(
BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG
),
has_context
,
BAICHUAN_CONTEXT
)
return
cls
.
get_completion_prompt
(
copy
.
deepcopy
(
BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG
),
has_context
,
baichuan_context_prompt
)
elif
model_mode
==
'chat'
:
return
self
.
get_chat_prompt
(
copy
.
deepcopy
(
BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG
),
has_context
,
BAICHUAN_CONTEXT
)
return
cls
.
get_chat_prompt
(
copy
.
deepcopy
(
BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG
),
has_context
,
baichuan_context_prompt
)
elif
app_mode
==
'completion'
:
if
model_mode
==
'completion'
:
return
self
.
get_completion_prompt
(
copy
.
deepcopy
(
BAICHUAN_COMPLETION_APP_COMPLETION_PROMPT_CONFIG
),
has_context
,
BAICHUAN_CONTEXT
)
return
cls
.
get_completion_prompt
(
copy
.
deepcopy
(
BAICHUAN_COMPLETION_APP_COMPLETION_PROMPT_CONFIG
),
has_context
,
baichuan_context_prompt
)
elif
model_mode
==
'chat'
:
return
self
.
get_chat_prompt
(
copy
.
deepcopy
(
BAICHUAN_COMPLETION_APP_CHAT_PROMPT_CONFIG
),
has_context
,
BAICHUAN_CONTEXT
)
\ No newline at end of file
return
cls
.
get_chat_prompt
(
copy
.
deepcopy
(
BAICHUAN_COMPLETION_APP_CHAT_PROMPT_CONFIG
),
has_context
,
baichuan_context_prompt
)
\ No newline at end of file
api/services/app_model_config_service.py
View file @
695841a3
...
...
@@ -56,6 +56,9 @@ class AppModelConfigService:
cp
[
"stop"
]
=
[]
elif
not
isinstance
(
cp
[
"stop"
],
list
):
raise
ValueError
(
"stop in model.completion_params must be of list type"
)
if
len
(
cp
[
"stop"
])
>
4
:
raise
ValueError
(
"stop sequences must be less than 4"
)
# Filter out extra parameters
filtered_cp
=
{
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment