Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
D
dify
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
ai-tech
dify
Commits
eceb4209
Commit
eceb4209
authored
Jul 15, 2023
by
John Wang
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
feat: optimize error message
parent
dd500e3b
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
16 additions
and
5 deletions
+16
-5
error.py
api/controllers/console/app/error.py
+1
-1
providers.py
api/controllers/console/workspace/providers.py
+2
-1
anthropic_provider.py
api/core/llm/provider/anthropic_provider.py
+13
-3
No files found.
api/controllers/console/app/error.py
View file @
eceb4209
...
...
@@ -16,7 +16,7 @@ class ProviderNotInitializeError(BaseHTTPException):
class
ProviderQuotaExceededError
(
BaseHTTPException
):
error_code
=
'provider_quota_exceeded'
description
=
"Your quota for Dify Hosted
OpenAI
has been exhausted. "
\
description
=
"Your quota for Dify Hosted
Model Provider
has been exhausted. "
\
"Please go to Settings -> Model Provider to complete your own provider credentials."
code
=
400
...
...
api/controllers/console/workspace/providers.py
View file @
eceb4209
...
...
@@ -123,9 +123,10 @@ class ProviderTokenApi(Resource):
is_valid
=
token_is_valid
)
db
.
session
.
add
(
provider_model
)
if
provider_model
.
is_valid
:
if
provider
in
[
ProviderName
.
OPENAI
.
value
,
ProviderName
.
AZURE_OPENAI
.
value
]
and
provider
_model
.
is_valid
:
other_providers
=
db
.
session
.
query
(
Provider
)
.
filter
(
Provider
.
tenant_id
==
tenant
.
id
,
Provider
.
provider_name
.
in_
([
ProviderName
.
OPENAI
.
value
,
ProviderName
.
AZURE_OPENAI
.
value
]),
Provider
.
provider_name
!=
provider
,
Provider
.
provider_type
==
ProviderType
.
CUSTOM
.
value
)
.
all
()
...
...
api/core/llm/provider/anthropic_provider.py
View file @
eceb4209
...
...
@@ -10,7 +10,7 @@ from core import hosted_llm_credentials
from
core.llm.error
import
ProviderTokenNotInitError
from
core.llm.provider.base
import
BaseProvider
from
core.llm.provider.errors
import
ValidateFailedError
from
models.provider
import
ProviderName
from
models.provider
import
ProviderName
,
ProviderType
class
AnthropicProvider
(
BaseProvider
):
...
...
@@ -90,6 +90,13 @@ class AnthropicProvider(BaseProvider):
if
not
provider
:
raise
ValidateFailedError
(
f
"OpenAI or Azure OpenAI provider must be configured first."
)
if
provider
.
provider_type
==
ProviderType
.
SYSTEM
.
value
:
quota_used
=
provider
.
quota_used
if
provider
.
quota_used
is
not
None
else
0
quota_limit
=
provider
.
quota_limit
if
provider
.
quota_limit
is
not
None
else
0
if
quota_used
>=
quota_limit
:
raise
ValidateFailedError
(
f
"Your quota for Dify Hosted OpenAI has been exhausted, "
f
"please configure OpenAI or Azure OpenAI provider first."
)
try
:
if
not
isinstance
(
config
,
dict
):
raise
ValueError
(
'Config must be a object.'
)
...
...
@@ -112,8 +119,11 @@ class AnthropicProvider(BaseProvider):
]
chat_llm
(
messages
)
except
(
anthropic
.
APIStatusError
,
anthropic
.
APIConnectionError
,
anthropic
.
RateLimitError
)
as
ex
:
raise
ValidateFailedError
(
f
"Anthropic: {ex.message}"
)
except
anthropic
.
APIConnectionError
:
raise
ValidateFailedError
(
f
"Anthropic: Connection error."
)
except
(
anthropic
.
APIStatusError
,
anthropic
.
RateLimitError
)
as
ex
:
raise
ValidateFailedError
(
f
"Anthropic: Error code: {ex.status_code} - "
f
"{ex.body['error']['type']}: {ex.body['error']['message']}"
)
except
Exception
as
ex
:
logging
.
exception
(
'Anthropic config validation failed'
)
raise
ex
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment