Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
D
dify
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
ai-tech
dify
Commits
de93272b
Commit
de93272b
authored
Nov 20, 2023
by
jyong
Committed by
takatost
Nov 24, 2023
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
add evaluate service api
parent
14c17f6c
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
104 additions
and
4 deletions
+104
-4
completion.py
api/controllers/service_api/app/completion.py
+88
-3
requirements.txt
api/requirements.txt
+16
-1
No files found.
api/controllers/service_api/app/completion.py
View file @
de93272b
...
...
@@ -2,8 +2,11 @@ import json
import
logging
from
typing
import
Union
,
Generator
from
datasets
import
Dataset
from
flask
import
stream_with_context
,
Response
from
flask_restful
import
reqparse
from
ragas
import
evaluate
from
ragas.metrics
import
answer_relevancy
,
context_precision
,
faithfulness
from
werkzeug.exceptions
import
NotFound
,
InternalServerError
import
services
...
...
@@ -14,7 +17,8 @@ from controllers.service_api.app.error import AppUnavailableError, ProviderNotIn
ProviderModelCurrentlyNotSupportError
from
controllers.service_api.wraps
import
AppApiResource
from
core.conversation_message_task
import
PubHandler
from
core.model_providers.error
import
LLMBadRequestError
,
LLMAuthorizationError
,
LLMAPIUnavailableError
,
LLMAPIConnectionError
,
\
from
core.model_providers.error
import
LLMBadRequestError
,
LLMAuthorizationError
,
LLMAPIUnavailableError
,
\
LLMAPIConnectionError
,
\
LLMRateLimitError
,
ProviderTokenNotInitError
,
QuotaExceededError
,
ModelCurrentlyNotSupportError
from
libs.helper
import
uuid_value
from
services.completion_service
import
CompletionService
...
...
@@ -75,6 +79,84 @@ class CompletionApi(AppApiResource):
raise
InternalServerError
()
class
CompletionEvaluateApi
(
AppApiResource
):
def
post
(
self
,
app_model
,
end_user
):
if
app_model
.
mode
!=
'chat'
:
raise
AppUnavailableError
()
parser
=
reqparse
.
RequestParser
()
parser
.
add_argument
(
'inputs'
,
type
=
dict
,
required
=
True
,
location
=
'json'
)
parser
.
add_argument
(
'queries'
,
type
=
list
,
location
=
'json'
,
default
=
''
)
parser
.
add_argument
(
'files'
,
type
=
list
,
required
=
False
,
location
=
'json'
)
parser
.
add_argument
(
'response_mode'
,
type
=
str
,
choices
=
[
'blocking'
,
'streaming'
],
location
=
'json'
)
parser
.
add_argument
(
'user'
,
type
=
str
,
location
=
'json'
)
parser
.
add_argument
(
'retriever_from'
,
type
=
str
,
required
=
False
,
default
=
'dev'
,
location
=
'json'
)
args
=
parser
.
parse_args
()
streaming
=
args
[
'response_mode'
]
==
'streaming'
if
end_user
is
None
and
args
[
'user'
]
is
not
None
:
end_user
=
create_or_update_end_user_for_user_id
(
app_model
,
args
[
'user'
])
args
[
'auto_generate_name'
]
=
False
queries
=
args
[
'queries'
]
try
:
questions
=
[]
answers
=
[]
contexts
=
[]
for
query
in
queries
:
args
[
'query'
]
=
query
response
=
CompletionService
.
completion
(
app_model
=
app_model
,
user
=
end_user
,
args
=
args
,
from_source
=
'api'
,
streaming
=
streaming
,
)
questions
.
append
(
query
)
answers
.
append
(
response
[
'answer'
])
context
=
[]
metadata
=
response
[
'metadata'
]
if
'retriever_resources'
in
metadata
and
metadata
[
'retriever_resources'
]:
retriever_resources
=
response
[
'metadata'
][
'retriever_resources'
]
for
retriever_resource
in
retriever_resources
:
context
.
append
(
retriever_resource
[
'content'
])
else
:
context
.
append
(
''
)
contexts
.
append
(
context
)
ds
=
Dataset
.
from_dict
(
{
"question"
:
questions
,
"answer"
:
answers
,
"contexts"
:
contexts
,
}
)
result
=
evaluate
(
ds
,
[
answer_relevancy
,
context_precision
,
faithfulness
])
return
result
except
services
.
errors
.
conversation
.
ConversationNotExistsError
:
raise
NotFound
(
"Conversation Not Exists."
)
except
services
.
errors
.
conversation
.
ConversationCompletedError
:
raise
ConversationCompletedError
()
except
services
.
errors
.
app_model_config
.
AppModelConfigBrokenError
:
logging
.
exception
(
"App model config broken."
)
raise
AppUnavailableError
()
except
ProviderTokenNotInitError
as
ex
:
raise
ProviderNotInitializeError
(
ex
.
description
)
except
QuotaExceededError
:
raise
ProviderQuotaExceededError
()
except
ModelCurrentlyNotSupportError
:
raise
ProviderModelCurrentlyNotSupportError
()
except
(
LLMBadRequestError
,
LLMAPIConnectionError
,
LLMAPIUnavailableError
,
LLMRateLimitError
,
LLMAuthorizationError
)
as
e
:
raise
CompletionRequestError
(
str
(
e
))
except
ValueError
as
e
:
raise
e
except
Exception
as
e
:
logging
.
exception
(
"internal server error."
)
raise
InternalServerError
()
class
CompletionStopApi
(
AppApiResource
):
def
post
(
self
,
app_model
,
end_user
,
task_id
):
if
app_model
.
mode
!=
'completion'
:
...
...
@@ -166,11 +248,13 @@ def compact_response(response: Union[dict | Generator]) -> Response:
logging
.
exception
(
"App model config broken."
)
yield
"data: "
+
json
.
dumps
(
api
.
handle_error
(
AppUnavailableError
())
.
get_json
())
+
"
\n\n
"
except
ProviderTokenNotInitError
as
ex
:
yield
"data: "
+
json
.
dumps
(
api
.
handle_error
(
ProviderNotInitializeError
(
ex
.
description
))
.
get_json
())
+
"
\n\n
"
yield
"data: "
+
json
.
dumps
(
api
.
handle_error
(
ProviderNotInitializeError
(
ex
.
description
))
.
get_json
())
+
"
\n\n
"
except
QuotaExceededError
:
yield
"data: "
+
json
.
dumps
(
api
.
handle_error
(
ProviderQuotaExceededError
())
.
get_json
())
+
"
\n\n
"
except
ModelCurrentlyNotSupportError
:
yield
"data: "
+
json
.
dumps
(
api
.
handle_error
(
ProviderModelCurrentlyNotSupportError
())
.
get_json
())
+
"
\n\n
"
yield
"data: "
+
json
.
dumps
(
api
.
handle_error
(
ProviderModelCurrentlyNotSupportError
())
.
get_json
())
+
"
\n\n
"
except
(
LLMBadRequestError
,
LLMAPIConnectionError
,
LLMAPIUnavailableError
,
LLMRateLimitError
,
LLMAuthorizationError
)
as
e
:
yield
"data: "
+
json
.
dumps
(
api
.
handle_error
(
CompletionRequestError
(
str
(
e
)))
.
get_json
())
+
"
\n\n
"
...
...
@@ -185,6 +269,7 @@ def compact_response(response: Union[dict | Generator]) -> Response:
api
.
add_resource
(
CompletionApi
,
'/completion-messages'
)
api
.
add_resource
(
CompletionEvaluateApi
,
'/chat-evaluate'
)
api
.
add_resource
(
CompletionStopApi
,
'/completion-messages/<string:task_id>/stop'
)
api
.
add_resource
(
ChatApi
,
'/chat-messages'
)
api
.
add_resource
(
ChatStopApi
,
'/chat-messages/<string:task_id>/stop'
)
api/requirements.txt
View file @
de93272b
...
...
@@ -54,4 +54,19 @@ zhipuai==1.0.7
werkzeug==2.3.7
pymilvus==2.3.0
qdrant-client==1.6.4
cohere~=4.32
\ No newline at end of file
cohere~=4.32
pydantic~=1.10.12
requests~=2.31.0
regex~=2023.8.8
jieba3k~=0.35.1
numpy~=1.25.2
httpx~=0.24.1
dataclasses~=0.6
click~=8.1.6
blinker~=1.6.2
botocore~=1.31.17
alembic~=1.11.2
tqdm~=4.66.1
pytz~=2022.7.1
datasets~=2.15.0
ragas~=0.0.20
\ No newline at end of file
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment