Commit ba0496e9 authored by takatost's avatar takatost

add workflow models

parent 89ec653d
from flask_restful import Resource from flask_restful import Resource, reqparse
from controllers.console import api from controllers.console import api
from controllers.console.app.wraps import get_app_model from controllers.console.app.wraps import get_app_model
...@@ -12,9 +12,20 @@ class DefaultBlockConfigApi(Resource): ...@@ -12,9 +12,20 @@ class DefaultBlockConfigApi(Resource):
@setup_required @setup_required
@login_required @login_required
@account_initialization_required @account_initialization_required
@get_app_model(mode=[AppMode.CHAT, AppMode.WORKFLOW]) def get(self):
def post(self, app_model): parser = reqparse.RequestParser()
return 'success', 200 parser.add_argument('app_mode', type=str, required=True, nullable=False,
choices=[AppMode.CHAT.value, AppMode.WORKFLOW.value], location='args')
args = parser.parse_args()
app_mode = args.get('app_mode')
app_mode = AppMode.value_of(app_mode)
api.add_resource(DefaultBlockConfigApi, '/apps/<uuid:app_id>/default-workflow-block-configs') # TODO: implement this
return {
"blocks": []
}
api.add_resource(DefaultBlockConfigApi, '/default-workflow-block-configs')
"""add workflow
Revision ID: b289e2408ee2
Revises: 16830a790f0f
Create Date: 2024-02-19 12:47:24.646954
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'b289e2408ee2'
down_revision = '16830a790f0f'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('workflow_app_logs',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
sa.Column('app_id', postgresql.UUID(), nullable=False),
sa.Column('workflow_id', postgresql.UUID(), nullable=False),
sa.Column('workflow_run_id', postgresql.UUID(), nullable=False),
sa.Column('created_from', sa.String(length=255), nullable=False),
sa.Column('created_by_role', sa.String(length=255), nullable=False),
sa.Column('created_by', postgresql.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='workflow_app_log_pkey')
)
with op.batch_alter_table('workflow_app_logs', schema=None) as batch_op:
batch_op.create_index('workflow_app_log_app_idx', ['tenant_id', 'app_id'], unique=False)
op.create_table('workflow_node_executions',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
sa.Column('app_id', postgresql.UUID(), nullable=False),
sa.Column('workflow_id', postgresql.UUID(), nullable=False),
sa.Column('triggered_from', sa.String(length=255), nullable=False),
sa.Column('workflow_run_id', postgresql.UUID(), nullable=True),
sa.Column('index', sa.Integer(), nullable=False),
sa.Column('predecessor_node_id', sa.String(length=255), nullable=True),
sa.Column('node_id', sa.String(length=255), nullable=False),
sa.Column('node_type', sa.String(length=255), nullable=False),
sa.Column('title', sa.String(length=255), nullable=False),
sa.Column('inputs', sa.Text(), nullable=False),
sa.Column('process_data', sa.Text(), nullable=False),
sa.Column('outputs', sa.Text(), nullable=True),
sa.Column('status', sa.String(length=255), nullable=False),
sa.Column('error', sa.Text(), nullable=True),
sa.Column('elapsed_time', sa.Float(), server_default=sa.text('0'), nullable=False),
sa.Column('execution_metadata', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('created_by', postgresql.UUID(), nullable=False),
sa.Column('finished_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id', name='workflow_node_execution_pkey')
)
with op.batch_alter_table('workflow_node_executions', schema=None) as batch_op:
batch_op.create_index('workflow_node_execution_node_run_idx', ['tenant_id', 'app_id', 'workflow_id', 'triggered_from', 'node_id'], unique=False)
batch_op.create_index('workflow_node_execution_workflow_run_idx', ['tenant_id', 'app_id', 'workflow_id', 'triggered_from', 'workflow_run_id'], unique=False)
op.create_table('workflow_runs',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
sa.Column('app_id', postgresql.UUID(), nullable=False),
sa.Column('sequence_number', sa.Integer(), nullable=False),
sa.Column('workflow_id', postgresql.UUID(), nullable=False),
sa.Column('type', sa.String(length=255), nullable=False),
sa.Column('triggered_from', sa.String(length=255), nullable=False),
sa.Column('version', sa.String(length=255), nullable=False),
sa.Column('graph', sa.Text(), nullable=True),
sa.Column('inputs', sa.Text(), nullable=True),
sa.Column('status', sa.String(length=255), nullable=False),
sa.Column('outputs', sa.Text(), nullable=True),
sa.Column('error', sa.Text(), nullable=True),
sa.Column('elapsed_time', sa.Float(), server_default=sa.text('0'), nullable=False),
sa.Column('total_tokens', sa.Integer(), server_default=sa.text('0'), nullable=False),
sa.Column('total_price', sa.Numeric(precision=10, scale=7), nullable=True),
sa.Column('currency', sa.String(length=255), nullable=True),
sa.Column('total_steps', sa.Integer(), server_default=sa.text('0'), nullable=True),
sa.Column('created_by', postgresql.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('finished_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id', name='workflow_run_pkey')
)
with op.batch_alter_table('workflow_runs', schema=None) as batch_op:
batch_op.create_index('workflow_run_triggerd_from_idx', ['tenant_id', 'app_id', 'workflow_id', 'triggered_from'], unique=False)
op.create_table('workflows',
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', postgresql.UUID(), nullable=False),
sa.Column('app_id', postgresql.UUID(), nullable=False),
sa.Column('type', sa.String(length=255), nullable=False),
sa.Column('version', sa.String(length=255), nullable=False),
sa.Column('graph', sa.Text(), nullable=True),
sa.Column('created_by', postgresql.UUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_by', postgresql.UUID(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id', name='workflow_pkey')
)
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.create_index('workflow_version_idx', ['tenant_id', 'app_id', 'type', 'version'], unique=False)
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
batch_op.add_column(sa.Column('chatbot_app_engine', sa.String(length=255), server_default=sa.text("'normal'::character varying"), nullable=False))
batch_op.add_column(sa.Column('workflow_id', postgresql.UUID(), nullable=True))
with op.batch_alter_table('messages', schema=None) as batch_op:
batch_op.add_column(sa.Column('workflow_run_id', postgresql.UUID(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('messages', schema=None) as batch_op:
batch_op.drop_column('workflow_run_id')
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
batch_op.drop_column('workflow_id')
batch_op.drop_column('chatbot_app_engine')
with op.batch_alter_table('workflows', schema=None) as batch_op:
batch_op.drop_index('workflow_version_idx')
op.drop_table('workflows')
with op.batch_alter_table('workflow_runs', schema=None) as batch_op:
batch_op.drop_index('workflow_run_triggerd_from_idx')
op.drop_table('workflow_runs')
with op.batch_alter_table('workflow_node_executions', schema=None) as batch_op:
batch_op.drop_index('workflow_node_execution_workflow_run_idx')
batch_op.drop_index('workflow_node_execution_node_run_idx')
op.drop_table('workflow_node_executions')
with op.batch_alter_table('workflow_app_logs', schema=None) as batch_op:
batch_op.drop_index('workflow_app_log_app_idx')
op.drop_table('workflow_app_logs')
# ### end Alembic commands ###
...@@ -12,6 +12,7 @@ from extensions.ext_database import db ...@@ -12,6 +12,7 @@ from extensions.ext_database import db
from libs.helper import generate_string from libs.helper import generate_string
from .account import Account, Tenant from .account import Account, Tenant
from .workflow import WorkflowRun, Workflow
class DifySetup(db.Model): class DifySetup(db.Model):
...@@ -156,12 +157,14 @@ class AppModelConfig(db.Model): ...@@ -156,12 +157,14 @@ class AppModelConfig(db.Model):
agent_mode = db.Column(db.Text) agent_mode = db.Column(db.Text)
sensitive_word_avoidance = db.Column(db.Text) sensitive_word_avoidance = db.Column(db.Text)
retriever_resource = db.Column(db.Text) retriever_resource = db.Column(db.Text)
prompt_type = db.Column(db.String(255), nullable=False, default='simple') prompt_type = db.Column(db.String(255), nullable=False, server_default=db.text("'simple'::character varying"))
chat_prompt_config = db.Column(db.Text) chat_prompt_config = db.Column(db.Text)
completion_prompt_config = db.Column(db.Text) completion_prompt_config = db.Column(db.Text)
dataset_configs = db.Column(db.Text) dataset_configs = db.Column(db.Text)
external_data_tools = db.Column(db.Text) external_data_tools = db.Column(db.Text)
file_upload = db.Column(db.Text) file_upload = db.Column(db.Text)
chatbot_app_engine = db.Column(db.String(255), nullable=False, server_default=db.text("'normal'::character varying"))
workflow_id = db.Column(UUID)
@property @property
def app(self): def app(self):
...@@ -261,6 +264,13 @@ class AppModelConfig(db.Model): ...@@ -261,6 +264,13 @@ class AppModelConfig(db.Model):
"image": {"enabled": False, "number_limits": 3, "detail": "high", "image": {"enabled": False, "number_limits": 3, "detail": "high",
"transfer_methods": ["remote_url", "local_file"]}} "transfer_methods": ["remote_url", "local_file"]}}
@property
def workflow(self):
if self.workflow_id:
return db.session.query(Workflow).filter(Workflow.id == self.workflow_id).first()
return None
def to_dict(self) -> dict: def to_dict(self) -> dict:
return { return {
"provider": "", "provider": "",
...@@ -581,6 +591,7 @@ class Message(db.Model): ...@@ -581,6 +591,7 @@ class Message(db.Model):
created_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)')) created_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)'))
updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)')) updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)'))
agent_based = db.Column(db.Boolean, nullable=False, server_default=db.text('false')) agent_based = db.Column(db.Boolean, nullable=False, server_default=db.text('false'))
workflow_run_id = db.Column(UUID)
@property @property
def user_feedback(self): def user_feedback(self):
...@@ -679,6 +690,13 @@ class Message(db.Model): ...@@ -679,6 +690,13 @@ class Message(db.Model):
return files return files
@property
def workflow_run(self):
if self.workflow_run_id:
return db.session.query(WorkflowRun).filter(WorkflowRun.id == self.workflow_run_id).first()
return None
class MessageFeedback(db.Model): class MessageFeedback(db.Model):
__tablename__ = 'message_feedbacks' __tablename__ = 'message_feedbacks'
......
from sqlalchemy.dialects.postgresql import UUID
from extensions.ext_database import db
class Workflow(db.Model):
"""
Workflow, for `Workflow App` and `Chat App workflow mode`.
Attributes:
- id (uuid) Workflow ID, pk
- tenant_id (uuid) Workspace ID
- app_id (uuid) App ID
- type (string) Workflow type
`workflow` for `Workflow App`
`chat` for `Chat App workflow mode`
- version (string) Version
`draft` for draft version (only one for each app), other for version number (redundant)
- graph (text) Workflow canvas configuration (JSON)
The entire canvas configuration JSON, including Node, Edge, and other configurations
- nodes (array[object]) Node list, see Node Schema
- edges (array[object]) Edge list, see Edge Schema
- created_by (uuid) Creator ID
- created_at (timestamp) Creation time
- updated_by (uuid) `optional` Last updater ID
- updated_at (timestamp) `optional` Last update time
"""
__tablename__ = 'workflows'
__table_args__ = (
db.PrimaryKeyConstraint('id', name='workflow_pkey'),
db.Index('workflow_version_idx', 'tenant_id', 'app_id', 'type', 'version'),
)
id = db.Column(UUID, server_default=db.text('uuid_generate_v4()'))
tenant_id = db.Column(UUID, nullable=False)
app_id = db.Column(UUID, nullable=False)
type = db.Column(db.String(255), nullable=False)
version = db.Column(db.String(255), nullable=False)
graph = db.Column(db.Text)
created_by = db.Column(UUID, nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)'))
updated_by = db.Column(UUID)
updated_at = db.Column(db.DateTime)
class WorkflowRun(db.Model):
"""
Workflow Run
Attributes:
- id (uuid) Run ID
- tenant_id (uuid) Workspace ID
- app_id (uuid) App ID
- sequence_number (int) Auto-increment sequence number, incremented within the App, starting from 1
- workflow_id (uuid) Workflow ID
- type (string) Workflow type
- triggered_from (string) Trigger source
`debugging` for canvas debugging
`app-run` for (published) app execution
- version (string) Version
- graph (text) Workflow canvas configuration (JSON)
- inputs (text) Input parameters
- status (string) Execution status, `running` / `succeeded` / `failed`
- outputs (text) `optional` Output content
- error (string) `optional` Error reason
- elapsed_time (float) `optional` Time consumption (s)
- total_tokens (int) `optional` Total tokens used
- total_price (decimal) `optional` Total cost
- currency (string) `optional` Currency, such as USD / RMB
- total_steps (int) Total steps (redundant), default 0
- created_by (uuid) Runner ID
- created_at (timestamp) Run time
- finished_at (timestamp) End time
"""
__tablename__ = 'workflow_runs'
__table_args__ = (
db.PrimaryKeyConstraint('id', name='workflow_run_pkey'),
db.Index('workflow_run_triggerd_from_idx', 'tenant_id', 'app_id', 'workflow_id', 'triggered_from'),
)
id = db.Column(UUID, server_default=db.text('uuid_generate_v4()'))
tenant_id = db.Column(UUID, nullable=False)
app_id = db.Column(UUID, nullable=False)
sequence_number = db.Column(db.Integer, nullable=False)
workflow_id = db.Column(UUID, nullable=False)
type = db.Column(db.String(255), nullable=False)
triggered_from = db.Column(db.String(255), nullable=False)
version = db.Column(db.String(255), nullable=False)
graph = db.Column(db.Text)
inputs = db.Column(db.Text)
status = db.Column(db.String(255), nullable=False)
outputs = db.Column(db.Text)
error = db.Column(db.Text)
elapsed_time = db.Column(db.Float, nullable=False, server_default=db.text('0'))
total_tokens = db.Column(db.Integer, nullable=False, server_default=db.text('0'))
total_price = db.Column(db.Numeric(10, 7))
currency = db.Column(db.String(255))
total_steps = db.Column(db.Integer, server_default=db.text('0'))
created_by = db.Column(UUID, nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)'))
finished_at = db.Column(db.DateTime)
class WorkflowNodeExecution(db.Model):
"""
Workflow Node Execution
- id (uuid) Execution ID
- tenant_id (uuid) Workspace ID
- app_id (uuid) App ID
- workflow_id (uuid) Workflow ID
- triggered_from (string) Trigger source
`single-step` for single-step debugging
`workflow-run` for workflow execution (debugging / user execution)
- workflow_run_id (uuid) `optional` Workflow run ID
Null for single-step debugging.
- index (int) Execution sequence number, used for displaying Tracing Node order
- predecessor_node_id (string) `optional` Predecessor node ID, used for displaying execution path
- node_id (string) Node ID
- node_type (string) Node type, such as `start`
- title (string) Node title
- inputs (json) All predecessor node variable content used in the node
- process_data (json) Node process data
- outputs (json) `optional` Node output variables
- status (string) Execution status, `running` / `succeeded` / `failed`
- error (string) `optional` Error reason
- elapsed_time (float) `optional` Time consumption (s)
- execution_metadata (text) Metadata
- total_tokens (int) `optional` Total tokens used
- total_price (decimal) `optional` Total cost
- currency (string) `optional` Currency, such as USD / RMB
- created_at (timestamp) Run time
- created_by (uuid) Runner ID
- finished_at (timestamp) End time
"""
__tablename__ = 'workflow_node_executions'
__table_args__ = (
db.PrimaryKeyConstraint('id', name='workflow_node_execution_pkey'),
db.Index('workflow_node_execution_workflow_run_idx', 'tenant_id', 'app_id', 'workflow_id',
'triggered_from', 'workflow_run_id'),
db.Index('workflow_node_execution_node_run_idx', 'tenant_id', 'app_id', 'workflow_id',
'triggered_from', 'node_id'),
)
id = db.Column(UUID, server_default=db.text('uuid_generate_v4()'))
tenant_id = db.Column(UUID, nullable=False)
app_id = db.Column(UUID, nullable=False)
workflow_id = db.Column(UUID, nullable=False)
triggered_from = db.Column(db.String(255), nullable=False)
workflow_run_id = db.Column(UUID)
index = db.Column(db.Integer, nullable=False)
predecessor_node_id = db.Column(db.String(255))
node_id = db.Column(db.String(255), nullable=False)
node_type = db.Column(db.String(255), nullable=False)
title = db.Column(db.String(255), nullable=False)
inputs = db.Column(db.Text, nullable=False)
process_data = db.Column(db.Text, nullable=False)
outputs = db.Column(db.Text)
status = db.Column(db.String(255), nullable=False)
error = db.Column(db.Text)
elapsed_time = db.Column(db.Float, nullable=False, server_default=db.text('0'))
execution_metadata = db.Column(db.Text)
created_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)'))
created_by = db.Column(UUID, nullable=False)
finished_at = db.Column(db.DateTime)
class WorkflowAppLog(db.Model):
"""
Workflow App execution log, excluding workflow debugging records.
Attributes:
- id (uuid) run ID
- tenant_id (uuid) Workspace ID
- app_id (uuid) App ID
- workflow_id (uuid) Associated Workflow ID
- workflow_run_id (uuid) Associated Workflow Run ID
- created_from (string) Creation source
`service-api` App Execution OpenAPI
`web-app` WebApp
`installed-app` Installed App
- created_by_role (string) Creator role
- `account` Console account
- `end_user` End user
- created_by (uuid) Creator ID, depends on the user table according to created_by_role
- created_at (timestamp) Creation time
"""
__tablename__ = 'workflow_app_logs'
__table_args__ = (
db.PrimaryKeyConstraint('id', name='workflow_app_log_pkey'),
db.Index('workflow_app_log_app_idx', 'tenant_id', 'app_id'),
)
id = db.Column(UUID, server_default=db.text('uuid_generate_v4()'))
tenant_id = db.Column(UUID, nullable=False)
app_id = db.Column(UUID, nullable=False)
workflow_id = db.Column(UUID, nullable=False)
workflow_run_id = db.Column(UUID, nullable=False)
created_from = db.Column(db.String(255), nullable=False)
created_by_role = db.Column(db.String(255), nullable=False)
created_by = db.Column(UUID, nullable=False)
created_at = db.Column(db.DateTime, nullable=False, server_default=db.text('CURRENT_TIMESTAMP(0)'))
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment