Commit 7497b47e authored by John Wang's avatar John Wang

fix: llm not return llm_output

parent 95eaf9a9
......@@ -53,4 +53,9 @@ class FakeLLM(SimpleChatModel):
message = AIMessage(content=output_str)
generation = ChatGeneration(message=message)
return ChatResult(generations=[generation])
llm_output = {"token_usage": {
'prompt_tokens': 0,
'completion_tokens': 0,
'total_tokens': 0,
}}
return ChatResult(generations=[generation], llm_output=llm_output)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment