feat: fix lint issue (#32168)

Co-authored-by: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
qiuqiua
2026-02-12 13:22:37 +08:00
committed by GitHub
parent 4f04e70494
commit 3a0c5df408
5 changed files with 63 additions and 25 deletions

View File

@@ -1 +1,3 @@
from .runner import WorkflowGenerator
__all__ = ["WorkflowGenerator"]

View File

@@ -2,6 +2,7 @@ import json
import logging
import re
from collections.abc import Sequence
from typing import Any, cast
import json_repair
@@ -30,6 +31,7 @@ from core.workflow.generator.prompts.vibe_prompts import (
format_available_tools,
parse_vibe_response,
)
from core.workflow.generator.types import AvailableModelDict, AvailableToolDict, WorkflowNodeDict
from core.workflow.generator.utils.graph_builder import CyclicDependencyError, GraphBuilder
from core.workflow.generator.utils.mermaid_generator import generate_mermaid
from core.workflow.generator.utils.workflow_validator import ValidationHint, WorkflowValidator
@@ -47,7 +49,7 @@ class WorkflowGenerator:
def generate_workflow_flowchart(
cls,
model_instance,
model_parameters: dict,
model_parameters: dict[str, Any],
instruction: str,
available_nodes: Sequence[dict[str, object]] | None = None,
existing_nodes: Sequence[dict[str, object]] | None = None,
@@ -180,8 +182,8 @@ class WorkflowGenerator:
# --- STEP 3: BUILDER (with retry loop) ---
MAX_GLOBAL_RETRIES = 2 # Total attempts: 1 initial + 1 retry
workflow_data = None
mermaid_code = None
workflow_data: dict[str, Any] | None = None
mermaid_code: str | None = None
all_warnings = []
all_fixes = []
retry_count = 0
@@ -192,12 +194,17 @@ class WorkflowGenerator:
logger.info("Generation attempt %s/%s", attempt + 1, MAX_GLOBAL_RETRIES)
# Prepare context
tool_schemas = format_available_tools(filtered_tools)
node_specs = format_available_nodes(list(available_nodes) if available_nodes else [])
existing_nodes_context = format_existing_nodes(list(existing_nodes) if existing_nodes else None)
tool_schemas = format_available_tools(cast(list[AvailableToolDict], filtered_tools))
node_specs = format_available_nodes(
cast(list[WorkflowNodeDict], list(available_nodes)) if available_nodes else []
)
existing_nodes_context = format_existing_nodes(
cast(list[dict[str, Any]], list(existing_nodes)) if existing_nodes else None
)
existing_edges_context = format_existing_edges(list(existing_edges) if existing_edges else None)
selected_nodes_context = format_selected_nodes(
list(selected_node_ids) if selected_node_ids else None, list(existing_nodes) if existing_nodes else None
list(selected_node_ids) if selected_node_ids else None,
cast(list[dict[str, Any]], list(existing_nodes)) if existing_nodes else None,
)
# Build retry context
@@ -223,7 +230,9 @@ class WorkflowGenerator:
plan_context=json.dumps(plan_data.get("steps", []), indent=2),
tool_schemas=tool_schemas,
builtin_node_specs=node_specs,
available_models=format_available_models(list(available_models or [])),
available_models=format_available_models(
cast(list[AvailableModelDict], list(available_models or []))
),
preferred_language=preferred_language or "English",
existing_nodes_context=existing_nodes_context,
selected_nodes_context=selected_nodes_context,
@@ -234,7 +243,9 @@ class WorkflowGenerator:
plan_context=json.dumps(plan_data.get("steps", []), indent=2),
tool_schemas=tool_schemas,
builtin_node_specs=node_specs,
available_models=format_available_models(list(available_models or [])),
available_models=format_available_models(
cast(list[AvailableModelDict], list(available_models or []))
),
preferred_language=preferred_language or "English",
existing_nodes_context=existing_nodes_context,
existing_edges_context=existing_edges_context,
@@ -278,14 +289,14 @@ class WorkflowGenerator:
"(3) Plugin communication error. Try a different model or check model settings."
)
workflow_data = json_repair.loads(build_content)
workflow_data = cast(dict[str, Any] | None, json_repair.loads(build_content))
# Handle double-encoded JSON (when json_repair.loads returns a string)
# Keep decoding until we get a dict
max_decode_attempts = 3
decode_attempts = 0
while isinstance(workflow_data, str) and decode_attempts < max_decode_attempts:
workflow_data = json_repair.loads(workflow_data)
workflow_data = cast(dict[str, Any] | None, json_repair.loads(workflow_data))
decode_attempts += 1
# If still a string, it's not valid JSON structure
@@ -298,6 +309,9 @@ class WorkflowGenerator:
)
raise ValueError(f"Expected dict, got {type(workflow_data).__name__}")
# Type narrowing: workflow_data is now dict[str, Any]
assert isinstance(workflow_data, dict), "workflow_data must be a dict at this point"
if "nodes" not in workflow_data:
workflow_data["nodes"] = []
@@ -358,18 +372,25 @@ class WorkflowGenerator:
# Validation will detect structural issues, and LLM will fix them on retry.
# This is more accurate because LLM understands the workflow context.
# Cast workflow_data for type safety after validation
from core.workflow.generator.types import WorkflowDataDict
workflow_data_typed = cast(WorkflowDataDict, workflow_data)
# --- STEP 4: RENDERER (Generate Mermaid early for validation) ---
mermaid_code = generate_mermaid(workflow_data)
mermaid_code = generate_mermaid(workflow_data_typed)
# --- STEP 5: VALIDATOR ---
is_valid, validation_hints = WorkflowValidator.validate(workflow_data, available_tools_list)
_, validation_hints = WorkflowValidator.validate(
workflow_data_typed, cast(list[AvailableToolDict], available_tools_list)
)
# --- STEP 6: GRAPH VALIDATION (structural checks using graph algorithms) ---
if attempt < MAX_GLOBAL_RETRIES - 1:
try:
from core.workflow.generator.utils.graph_validator import GraphValidator
graph_result = GraphValidator.validate(workflow_data)
graph_result = GraphValidator.validate(cast(dict[str, Any], workflow_data_typed))
if not graph_result.success:
# Convert graph errors to validation hints
@@ -408,17 +429,22 @@ class WorkflowGenerator:
# Collect all validation warnings
all_warnings = [h.message for h in validation_hints]
# Add stability warning (as requested by user)
# Add stability warning
stability_warning = "The generated workflow may require debugging."
if preferred_language and preferred_language.startswith("zh"):
stability_warning = "生成的 Workflow 可能需要调试。"
all_warnings.append(stability_warning)
# Ensure workflow_data is not None before returning
if workflow_data is None:
return {
"intent": "error",
"error": "Failed to generate workflow",
}
return {
"intent": "generate",
"flowchart": mermaid_code,
"nodes": workflow_data["nodes"],
"edges": workflow_data["edges"],
"nodes": workflow_data.get("nodes", []) if workflow_data else [],
"edges": workflow_data.get("edges", []) if workflow_data else [],
"message": plan_data.get("plan_thought", "Generated workflow based on your request."),
"warnings": all_warnings,
"tool_recommendations": [], # Legacy field

View File

@@ -95,7 +95,9 @@ class EdgeRepair:
warnings.extend(branch_warnings)
# Update outgoing index
for edge in new_edges:
outgoing_edges.setdefault(edge.get("source"), []).append(edge)
src = edge.get("source")
if src:
outgoing_edges.setdefault(src, []).append(edge)
# 4. Repair if-else branches
for node in nodes:
@@ -108,7 +110,9 @@ class EdgeRepair:
warnings.extend(branch_warnings)
# Update outgoing index
for edge in new_edges:
outgoing_edges.setdefault(edge.get("source"), []).append(edge)
src = edge.get("source")
if src:
outgoing_edges.setdefault(src, []).append(edge)
# 5. Connect orphaned nodes (nodes with no incoming edge, except start)
new_edges, orphan_repairs = cls._connect_orphaned_nodes(nodes, edges, outgoing_edges, incoming_edges)
@@ -365,6 +369,10 @@ class EdgeRepair:
node_id = node.get("id")
node_type = node.get("type")
# Skip nodes without ID
if not node_id:
continue
# Skip end nodes
if node_type == "end":
continue

View File

@@ -89,6 +89,10 @@ class NodeRepair:
for node in nodes:
node_type = node.get("type")
# Skip nodes without type
if not node_type:
continue
# 1. Rule-based repairs
handler_name = cls._REPAIR_HANDLERS.get(node_type)
if handler_name:
@@ -248,7 +252,6 @@ class NodeRepair:
# 1. Handle Dict format (Standard) - Check for invalid types
if isinstance(outputs, dict):
changed = False
for var_name, var_config in outputs.items():
if isinstance(var_config, dict):
original_type = var_config.get("type")
@@ -256,7 +259,6 @@ class NodeRepair:
new_type = normalize_type(original_type)
if new_type != original_type:
var_config["type"] = new_type
changed = True
repairs.append(
f"Normalized type '{original_type}' to '{new_type}' "
f"for var '{var_name}' in node '{node_id}'"

View File

@@ -14,8 +14,8 @@ class ValidationHint:
field: str
message: str
severity: str # 'error', 'warning'
suggestion: str = None
node_type: str = None # Added for test compatibility
suggestion: str | None = None
node_type: str | None = None # Added for test compatibility
# Alias for potential old code using 'type' instead of 'severity'
@property