Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
D
dify
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
ai-tech
dify
Commits
0c5892bc
Unverified
Commit
0c5892bc
authored
Jan 04, 2024
by
takatost
Committed by
GitHub
Jan 04, 2024
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
fix: zhipuai chatglm turbo prompts must user, assistant in sequence (#1899)
parent
91ff07fc
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
29 additions
and
5 deletions
+29
-5
llm.py
api/core/model_runtime/model_providers/zhipuai/llm/llm.py
+29
-5
No files found.
api/core/model_runtime/model_providers/zhipuai/llm/llm.py
View file @
0c5892bc
...
...
@@ -8,8 +8,9 @@ from typing import (
Union
)
from
core.model_runtime.entities.message_entities
import
PromptMessage
,
PromptMessageTool
,
UserPromptMessage
,
AssistantPromptMessage
,
\
SystemPromptMessage
from
core.model_runtime.entities.message_entities
import
PromptMessage
,
PromptMessageTool
,
UserPromptMessage
,
\
AssistantPromptMessage
,
\
SystemPromptMessage
,
PromptMessageRole
from
core.model_runtime.entities.llm_entities
import
LLMResult
,
LLMResultChunk
,
\
LLMResultChunkDelta
from
core.model_runtime.errors.validate
import
CredentialsValidateFailedError
...
...
@@ -111,16 +112,39 @@ class ZhipuAILargeLanguageModel(_CommonZhipuaiAI, LargeLanguageModel):
if
len
(
prompt_messages
)
==
0
:
raise
ValueError
(
'At least one message is required'
)
if
prompt_messages
[
0
]
.
role
.
value
==
'system'
:
if
prompt_messages
[
0
]
.
role
==
PromptMessageRole
.
SYSTEM
:
if
not
prompt_messages
[
0
]
.
content
:
prompt_messages
=
prompt_messages
[
1
:]
# resolve zhipuai model not support system message and user message, assistant message must be in sequence
new_prompt_messages
=
[]
for
prompt_message
in
prompt_messages
:
copy_prompt_message
=
prompt_message
.
copy
()
if
copy_prompt_message
.
role
in
[
PromptMessageRole
.
USER
,
PromptMessageRole
.
SYSTEM
,
PromptMessageRole
.
TOOL
]:
if
not
isinstance
(
copy_prompt_message
.
content
,
str
):
# not support image message
continue
if
new_prompt_messages
and
new_prompt_messages
[
-
1
]
.
role
==
PromptMessageRole
.
USER
:
new_prompt_messages
[
-
1
]
.
content
+=
"
\n\n
"
+
copy_prompt_message
.
content
else
:
if
copy_prompt_message
.
role
==
PromptMessageRole
.
USER
:
new_prompt_messages
.
append
(
copy_prompt_message
)
else
:
new_prompt_message
=
UserPromptMessage
(
content
=
copy_prompt_message
.
content
)
new_prompt_messages
.
append
(
new_prompt_message
)
else
:
if
new_prompt_messages
and
new_prompt_messages
[
-
1
]
.
role
==
PromptMessageRole
.
ASSISTANT
:
new_prompt_messages
[
-
1
]
.
content
+=
"
\n\n
"
+
copy_prompt_message
.
content
else
:
new_prompt_messages
.
append
(
copy_prompt_message
)
params
=
{
'model'
:
model
,
'prompt'
:
[{
'role'
:
prompt_message
.
role
.
value
if
prompt_message
.
role
.
value
!=
'system'
else
'user'
,
'role'
:
prompt_message
.
role
.
value
,
'content'
:
prompt_message
.
content
}
for
prompt_message
in
prompt_messages
],
}
for
prompt_message
in
new_
prompt_messages
],
**
model_parameters
}
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment