Compare commits

...

5 Commits

Author SHA1 Message Date
FFXN
7dfe615613 feat: Add "type" field to PipelineRecommendedPlugin model; Add query param "type" to recommended-plugins api. (#29684)
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2025-12-15 18:19:54 +08:00
FFXN
a1a3fa0283 Add "type" field to PipelineRecommendedPlugin model; Add query param "type" to recommended-plugins api. 2025-12-15 16:44:32 +08:00
FFXN
ff7344f3d3 Add "type" field to PipelineRecommendedPlugin model; Add query param "type" to recommended-plugins api. 2025-12-15 16:38:44 +08:00
FFXN
bcd33be22a Add "type" field to PipelineRecommendedPlugin model; Add query param "type" to recommended-plugins api. 2025-12-15 16:33:06 +08:00
hjlarry
991f31f195 log missing trigger icon
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2025-12-10 09:40:16 +08:00
5 changed files with 92 additions and 10 deletions

View File

@@ -1004,6 +1004,11 @@ class RagPipelineRecommendedPluginApi(Resource):
@login_required
@account_initialization_required
def get(self):
parser = reqparse.RequestParser()
parser.add_argument('type', type=str, location='args', required=False, default='all')
args = parser.parse_args()
type = args["type"]
rag_pipeline_service = RagPipelineService()
recommended_plugins = rag_pipeline_service.get_recommended_plugins()
recommended_plugins = rag_pipeline_service.get_recommended_plugins(type)
return recommended_plugins

View File

@@ -0,0 +1,64 @@
"""Alter table pipeline_recommended_plugins add column type
Revision ID: 6bb0832495f0
Revises: 7bb281b7a422
Create Date: 2025-12-15 16:14:38.482072
"""
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '6bb0832495f0'
down_revision = '7bb281b7a422'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('app_triggers', schema=None) as batch_op:
batch_op.alter_column('provider_name',
existing_type=sa.VARCHAR(length=255),
nullable=False,
existing_server_default=sa.text("''::character varying"))
with op.batch_alter_table('operation_logs', schema=None) as batch_op:
batch_op.alter_column('content',
existing_type=postgresql.JSON(astext_type=sa.Text()),
nullable=False)
with op.batch_alter_table('pipeline_recommended_plugins', schema=None) as batch_op:
batch_op.add_column(sa.Column('type', sa.String(length=50), nullable=True))
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.alter_column('quota_used',
existing_type=sa.BIGINT(),
nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('providers', schema=None) as batch_op:
batch_op.alter_column('quota_used',
existing_type=sa.BIGINT(),
nullable=True)
with op.batch_alter_table('pipeline_recommended_plugins', schema=None) as batch_op:
batch_op.drop_column('type')
with op.batch_alter_table('operation_logs', schema=None) as batch_op:
batch_op.alter_column('content',
existing_type=postgresql.JSON(astext_type=sa.Text()),
nullable=True)
with op.batch_alter_table('app_triggers', schema=None) as batch_op:
batch_op.alter_column('provider_name',
existing_type=sa.VARCHAR(length=255),
nullable=True,
existing_server_default=sa.text("''::character varying"))
# ### end Alembic commands ###

View File

@@ -1458,6 +1458,7 @@ class PipelineRecommendedPlugin(TypeBase):
)
plugin_id: Mapped[str] = mapped_column(LongText, nullable=False)
provider_name: Mapped[str] = mapped_column(LongText, nullable=False)
type: Mapped[str] = mapped_column(sa.String(50), nullable=True)
position: Mapped[int] = mapped_column(sa.Integer, nullable=False, default=0)
active: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, default=True)
created_at: Mapped[datetime] = mapped_column(

View File

@@ -907,19 +907,29 @@ class WorkflowNodeExecutionModel(Base): # This model is expected to have `offlo
@property
def extras(self) -> dict[str, Any]:
from core.tools.tool_manager import ToolManager
from core.trigger.trigger_manager import TriggerManager
extras: dict[str, Any] = {}
if self.execution_metadata_dict:
if self.node_type == NodeType.TOOL and "tool_info" in self.execution_metadata_dict:
tool_info: dict[str, Any] = self.execution_metadata_dict["tool_info"]
execution_metadata = self.execution_metadata_dict
if execution_metadata:
if self.node_type == NodeType.TOOL and "tool_info" in execution_metadata:
tool_info: dict[str, Any] = execution_metadata["tool_info"]
extras["icon"] = ToolManager.get_tool_icon(
tenant_id=self.tenant_id,
provider_type=tool_info["provider_type"],
provider_id=tool_info["provider_id"],
)
elif self.node_type == NodeType.DATASOURCE and "datasource_info" in self.execution_metadata_dict:
datasource_info = self.execution_metadata_dict["datasource_info"]
elif self.node_type == NodeType.DATASOURCE and "datasource_info" in execution_metadata:
datasource_info = execution_metadata["datasource_info"]
extras["icon"] = datasource_info.get("icon")
elif self.node_type == NodeType.TRIGGER_PLUGIN and "trigger_info" in execution_metadata:
trigger_info = execution_metadata["trigger_info"] or {}
provider_id = trigger_info.get("provider_id")
if provider_id:
extras["icon"] = TriggerManager.get_trigger_plugin_icon(
tenant_id=self.tenant_id,
provider_id=provider_id,
)
return extras
def _get_offload_by_type(self, type_: ExecutionOffLoadType) -> Optional["WorkflowNodeExecutionOffload"]:

View File

@@ -1248,12 +1248,14 @@ class RagPipelineService:
session.commit()
return workflow_node_execution_db_model
def get_recommended_plugins(self) -> dict:
def get_recommended_plugins(self, type: str) -> dict:
# Query active recommended plugins
query = db.session.query(PipelineRecommendedPlugin).where(PipelineRecommendedPlugin.active == True)
if type and type != "all":
query = query.where(PipelineRecommendedPlugin.type == type)
pipeline_recommended_plugins = (
db.session.query(PipelineRecommendedPlugin)
.where(PipelineRecommendedPlugin.active == True)
.order_by(PipelineRecommendedPlugin.position.asc())
query.order_by(PipelineRecommendedPlugin.position.asc())
.all()
)