Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
D
dify
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
ai-tech
dify
Commits
5bc7a783
Commit
5bc7a783
authored
Aug 01, 2023
by
John Wang
Browse files
Options
Browse Files
Download
Plain Diff
Merge branch 'fix/azure-completion-choices-empty' into deploy/dev
parents
f591699d
af4ba81e
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
60 additions
and
3 deletions
+60
-3
streamable_azure_open_ai.py
api/core/llm/streamable_azure_open_ai.py
+58
-1
basic.tsx
web/app/components/app-sidebar/basic.tsx
+2
-2
No files found.
api/core/llm/streamable_azure_open_ai.py
View file @
5bc7a783
from
langchain.callbacks.manager
import
Callbacks
from
langchain.callbacks.manager
import
Callbacks
,
CallbackManagerForLLMRun
from
langchain.llms
import
AzureOpenAI
from
langchain.llms.openai
import
_streaming_response_template
,
completion_with_retry
,
_update_response
,
\
update_token_usage
from
langchain.schema
import
LLMResult
from
typing
import
Optional
,
List
,
Dict
,
Mapping
,
Any
,
Union
,
Tuple
...
...
@@ -67,3 +69,58 @@ class StreamableAzureOpenAI(AzureOpenAI):
@
classmethod
def
get_kwargs_from_model_params
(
cls
,
params
:
dict
):
return
params
def
_generate
(
self
,
prompts
:
List
[
str
],
stop
:
Optional
[
List
[
str
]]
=
None
,
run_manager
:
Optional
[
CallbackManagerForLLMRun
]
=
None
,
**
kwargs
:
Any
,
)
->
LLMResult
:
"""Call out to OpenAI's endpoint with k unique prompts.
Args:
prompts: The prompts to pass into the model.
stop: Optional list of stop words to use when generating.
Returns:
The full LLM output.
Example:
.. code-block:: python
response = openai.generate(["Tell me a joke."])
"""
params
=
self
.
_invocation_params
params
=
{
**
params
,
**
kwargs
}
sub_prompts
=
self
.
get_sub_prompts
(
params
,
prompts
,
stop
)
choices
=
[]
token_usage
:
Dict
[
str
,
int
]
=
{}
# Get the token usage from the response.
# Includes prompt, completion, and total tokens used.
_keys
=
{
"completion_tokens"
,
"prompt_tokens"
,
"total_tokens"
}
for
_prompts
in
sub_prompts
:
if
self
.
streaming
:
if
len
(
_prompts
)
>
1
:
raise
ValueError
(
"Cannot stream results with multiple prompts."
)
params
[
"stream"
]
=
True
response
=
_streaming_response_template
()
for
stream_resp
in
completion_with_retry
(
self
,
prompt
=
_prompts
,
**
params
):
if
len
(
stream_resp
[
"choices"
])
>
0
:
if
run_manager
:
run_manager
.
on_llm_new_token
(
stream_resp
[
"choices"
][
0
][
"text"
],
verbose
=
self
.
verbose
,
logprobs
=
stream_resp
[
"choices"
][
0
][
"logprobs"
],
)
_update_response
(
response
,
stream_resp
)
choices
.
extend
(
response
[
"choices"
])
else
:
response
=
completion_with_retry
(
self
,
prompt
=
_prompts
,
**
params
)
choices
.
extend
(
response
[
"choices"
])
if
not
self
.
streaming
:
# Can't update token usage if streaming
update_token_usage
(
_keys
,
response
,
token_usage
)
return
self
.
create_llm_result
(
choices
,
prompts
,
token_usage
)
\ No newline at end of file
web/app/components/app-sidebar/basic.tsx
View file @
5bc7a783
...
...
@@ -4,7 +4,6 @@ import {
}
from
'@heroicons/react/24/outline'
import
Tooltip
from
'../base/tooltip'
import
AppIcon
from
'../base/app-icon'
const
chars
=
'0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ-_'
export
function
randomString
(
length
:
number
)
{
...
...
@@ -21,6 +20,7 @@ export type IAppBasicProps = {
type
:
string
|
React
.
ReactNode
hoverTip
?:
string
textStyle
?:
{
main
?:
string
;
extra
?:
string
}
isExtraInLine
?:
boolean
}
const
ApiSvg
=
<
svg
width=
"18"
height=
"18"
viewBox=
"0 0 18 18"
fill=
"none"
xmlns=
"http://www.w3.org/2000/svg"
>
...
...
@@ -61,7 +61,7 @@ const ICON_MAP = {
notion
:
<
AppIcon
innerIcon=
{
NotionSvg
}
className=
'!border-[0.5px] !border-indigo-100 !bg-white'
/>,
}
export
default
function
AppBasic
({
icon
,
icon_background
,
name
,
type
,
hoverTip
,
textStyle
,
iconType
=
'app'
}:
IAppBasicProps
)
{
export
default
function
AppBasic
({
icon
,
icon_background
,
name
,
type
,
hoverTip
,
textStyle
,
iconType
=
'app'
,
isExtraInLine
}:
IAppBasicProps
)
{
return
(
<
div
className=
"flex items-start"
>
{
icon
&&
icon_background
&&
iconType
===
'app'
&&
(
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment