Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
D
dify
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
ai-tech
dify
Commits
2851a9f0
Unverified
Commit
2851a9f0
authored
Oct 11, 2023
by
takatost
Committed by
GitHub
Oct 11, 2023
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
feat: optimize minimax llm call (#1312)
parent
c536f85b
Changes
3
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
287 additions
and
12 deletions
+287
-12
minimax_model.py
api/core/model_providers/models/llm/minimax_model.py
+10
-9
minimax_provider.py
api/core/model_providers/providers/minimax_provider.py
+4
-3
minimax_llm.py
api/core/third_party/langchain/llms/minimax_llm.py
+273
-0
No files found.
api/core/model_providers/models/llm/minimax_model.py
View file @
2851a9f0
import
decimal
from
typing
import
List
,
Optional
,
Any
from
typing
import
List
,
Optional
,
Any
from
langchain.callbacks.manager
import
Callbacks
from
langchain.callbacks.manager
import
Callbacks
from
langchain.llms
import
Minimax
from
langchain.schema
import
LLMResult
from
langchain.schema
import
LLMResult
from
core.model_providers.error
import
LLMBadRequestError
from
core.model_providers.error
import
LLMBadRequestError
from
core.model_providers.models.llm.base
import
BaseLLM
from
core.model_providers.models.llm.base
import
BaseLLM
from
core.model_providers.models.entity.message
import
PromptMessage
,
MessageType
from
core.model_providers.models.entity.message
import
PromptMessage
from
core.model_providers.models.entity.model_params
import
ModelMode
,
ModelKwargs
from
core.model_providers.models.entity.model_params
import
ModelMode
,
ModelKwargs
from
core.third_party.langchain.llms.minimax_llm
import
MinimaxChatLLM
class
MinimaxModel
(
BaseLLM
):
class
MinimaxModel
(
BaseLLM
):
model_mode
:
ModelMode
=
ModelMode
.
C
OMPLETION
model_mode
:
ModelMode
=
ModelMode
.
C
HAT
def
_init_client
(
self
)
->
Any
:
def
_init_client
(
self
)
->
Any
:
provider_model_kwargs
=
self
.
_to_model_kwargs_input
(
self
.
model_rules
,
self
.
model_kwargs
)
provider_model_kwargs
=
self
.
_to_model_kwargs_input
(
self
.
model_rules
,
self
.
model_kwargs
)
return
Minimax
(
return
Minimax
ChatLLM
(
model
=
self
.
name
,
model
=
self
.
name
,
model_kwargs
=
{
streaming
=
self
.
streaming
,
'stream'
:
False
},
callbacks
=
self
.
callbacks
,
callbacks
=
self
.
callbacks
,
**
self
.
credentials
,
**
self
.
credentials
,
**
provider_model_kwargs
**
provider_model_kwargs
...
@@ -49,7 +46,7 @@ class MinimaxModel(BaseLLM):
...
@@ -49,7 +46,7 @@ class MinimaxModel(BaseLLM):
:return:
:return:
"""
"""
prompts
=
self
.
_get_prompt_from_messages
(
messages
)
prompts
=
self
.
_get_prompt_from_messages
(
messages
)
return
max
(
self
.
_client
.
get_num_tokens
(
prompts
),
0
)
return
max
(
self
.
_client
.
get_num_tokens
_from_messages
(
prompts
),
0
)
def
get_currency
(
self
):
def
get_currency
(
self
):
return
'RMB'
return
'RMB'
...
@@ -65,3 +62,7 @@ class MinimaxModel(BaseLLM):
...
@@ -65,3 +62,7 @@ class MinimaxModel(BaseLLM):
return
LLMBadRequestError
(
f
"Minimax: {str(ex)}"
)
return
LLMBadRequestError
(
f
"Minimax: {str(ex)}"
)
else
:
else
:
return
ex
return
ex
@
property
def
support_streaming
(
self
):
return
True
api/core/model_providers/providers/minimax_provider.py
View file @
2851a9f0
...
@@ -2,7 +2,7 @@ import json
...
@@ -2,7 +2,7 @@ import json
from
json
import
JSONDecodeError
from
json
import
JSONDecodeError
from
typing
import
Type
from
typing
import
Type
from
langchain.
llms
import
Minimax
from
langchain.
schema
import
HumanMessage
from
core.helper
import
encrypter
from
core.helper
import
encrypter
from
core.model_providers.models.base
import
BaseProviderModel
from
core.model_providers.models.base
import
BaseProviderModel
...
@@ -10,6 +10,7 @@ from core.model_providers.models.embedding.minimax_embedding import MinimaxEmbed
...
@@ -10,6 +10,7 @@ from core.model_providers.models.embedding.minimax_embedding import MinimaxEmbed
from
core.model_providers.models.entity.model_params
import
ModelKwargsRules
,
KwargRule
,
ModelType
from
core.model_providers.models.entity.model_params
import
ModelKwargsRules
,
KwargRule
,
ModelType
from
core.model_providers.models.llm.minimax_model
import
MinimaxModel
from
core.model_providers.models.llm.minimax_model
import
MinimaxModel
from
core.model_providers.providers.base
import
BaseModelProvider
,
CredentialsValidateFailedError
from
core.model_providers.providers.base
import
BaseModelProvider
,
CredentialsValidateFailedError
from
core.third_party.langchain.llms.minimax_llm
import
MinimaxChatLLM
from
models.provider
import
ProviderType
,
ProviderQuotaType
from
models.provider
import
ProviderType
,
ProviderQuotaType
...
@@ -98,14 +99,14 @@ class MinimaxProvider(BaseModelProvider):
...
@@ -98,14 +99,14 @@ class MinimaxProvider(BaseModelProvider):
'minimax_api_key'
:
credentials
[
'minimax_api_key'
],
'minimax_api_key'
:
credentials
[
'minimax_api_key'
],
}
}
llm
=
Minimax
(
llm
=
Minimax
ChatLLM
(
model
=
'abab5.5-chat'
,
model
=
'abab5.5-chat'
,
max_tokens
=
10
,
max_tokens
=
10
,
temperature
=
0.01
,
temperature
=
0.01
,
**
credential_kwargs
**
credential_kwargs
)
)
llm
(
"ping"
)
llm
(
[
HumanMessage
(
content
=
'ping'
)]
)
except
Exception
as
ex
:
except
Exception
as
ex
:
raise
CredentialsValidateFailedError
(
str
(
ex
))
raise
CredentialsValidateFailedError
(
str
(
ex
))
...
...
api/core/third_party/langchain/llms/minimax_llm.py
0 → 100644
View file @
2851a9f0
This diff is collapsed.
Click to expand it.
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment