mirror of
https://github.com/langgenius/dify.git
synced 2025-12-23 15:57:29 +00:00
Compare commits
15 Commits
mysql-adap
...
test/build
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bc691464a2 | ||
|
|
d444fa1c70 | ||
|
|
b3a4721815 | ||
|
|
4637435e42 | ||
|
|
7a2e951474 | ||
|
|
1e127df4ab | ||
|
|
ca7794305b | ||
|
|
fd255e81e1 | ||
|
|
09d31d1263 | ||
|
|
47dc26f011 | ||
|
|
123bb3ec08 | ||
|
|
90f77282e3 | ||
|
|
5208867ccc | ||
|
|
edc7ccc795 | ||
|
|
c9798f6425 |
@@ -1422,7 +1422,10 @@ def setup_datasource_oauth_client(provider, client_params):
|
||||
|
||||
|
||||
@click.command("transform-datasource-credentials", help="Transform datasource credentials.")
|
||||
def transform_datasource_credentials():
|
||||
@click.option(
|
||||
"--environment", prompt=True, help="the environment to transform datasource credentials", default="online"
|
||||
)
|
||||
def transform_datasource_credentials(environment: str):
|
||||
"""
|
||||
Transform datasource credentials
|
||||
"""
|
||||
@@ -1433,9 +1436,14 @@ def transform_datasource_credentials():
|
||||
notion_plugin_id = "langgenius/notion_datasource"
|
||||
firecrawl_plugin_id = "langgenius/firecrawl_datasource"
|
||||
jina_plugin_id = "langgenius/jina_datasource"
|
||||
notion_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(notion_plugin_id) # pyright: ignore[reportPrivateUsage]
|
||||
firecrawl_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(firecrawl_plugin_id) # pyright: ignore[reportPrivateUsage]
|
||||
jina_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(jina_plugin_id) # pyright: ignore[reportPrivateUsage]
|
||||
if environment == "online":
|
||||
notion_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(notion_plugin_id) # pyright: ignore[reportPrivateUsage]
|
||||
firecrawl_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(firecrawl_plugin_id) # pyright: ignore[reportPrivateUsage]
|
||||
jina_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(jina_plugin_id) # pyright: ignore[reportPrivateUsage]
|
||||
else:
|
||||
notion_plugin_unique_identifier = None
|
||||
firecrawl_plugin_unique_identifier = None
|
||||
jina_plugin_unique_identifier = None
|
||||
oauth_credential_type = CredentialType.OAUTH2
|
||||
api_key_credential_type = CredentialType.API_KEY
|
||||
|
||||
|
||||
@@ -104,6 +104,11 @@ class AppGenerateEntity(BaseModel):
|
||||
|
||||
inputs: Mapping[str, Any]
|
||||
files: Sequence[File]
|
||||
|
||||
# Unique identifier of the user initiating the execution.
|
||||
# This corresponds to `Account.id` for platform users or `EndUser.id` for end users.
|
||||
#
|
||||
# Note: The `user_id` field does not indicate whether the user is a platform user or an end user.
|
||||
user_id: str
|
||||
|
||||
# extras
|
||||
|
||||
@@ -1,15 +1,64 @@
|
||||
from typing import Annotated, Literal, Self, TypeAlias
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
from sqlalchemy import Engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity
|
||||
from core.workflow.graph_engine.layers.base import GraphEngineLayer
|
||||
from core.workflow.graph_events.base import GraphEngineEvent
|
||||
from core.workflow.graph_events.graph import GraphRunPausedEvent
|
||||
from models.model import AppMode
|
||||
from repositories.api_workflow_run_repository import APIWorkflowRunRepository
|
||||
from repositories.factory import DifyAPIRepositoryFactory
|
||||
|
||||
|
||||
# Wrapper types for `WorkflowAppGenerateEntity` and
|
||||
# `AdvancedChatAppGenerateEntity`. These wrappers enable type discrimination
|
||||
# and correct reconstruction of the entity field during (de)serialization.
|
||||
class _WorkflowGenerateEntityWrapper(BaseModel):
|
||||
type: Literal[AppMode.WORKFLOW] = AppMode.WORKFLOW
|
||||
entity: WorkflowAppGenerateEntity
|
||||
|
||||
|
||||
class _AdvancedChatAppGenerateEntityWrapper(BaseModel):
|
||||
type: Literal[AppMode.ADVANCED_CHAT] = AppMode.ADVANCED_CHAT
|
||||
entity: AdvancedChatAppGenerateEntity
|
||||
|
||||
|
||||
_GenerateEntityUnion: TypeAlias = Annotated[
|
||||
_WorkflowGenerateEntityWrapper | _AdvancedChatAppGenerateEntityWrapper,
|
||||
Field(discriminator="type"),
|
||||
]
|
||||
|
||||
|
||||
class WorkflowResumptionContext(BaseModel):
|
||||
"""WorkflowResumptionContext captures all state necessary for resumption."""
|
||||
|
||||
version: Literal["1"] = "1"
|
||||
|
||||
# Only workflow / chatflow could be paused.
|
||||
generate_entity: _GenerateEntityUnion
|
||||
serialized_graph_runtime_state: str
|
||||
|
||||
def dumps(self) -> str:
|
||||
return self.model_dump_json()
|
||||
|
||||
@classmethod
|
||||
def loads(cls, value: str) -> Self:
|
||||
return cls.model_validate_json(value)
|
||||
|
||||
def get_generate_entity(self) -> WorkflowAppGenerateEntity | AdvancedChatAppGenerateEntity:
|
||||
return self.generate_entity.entity
|
||||
|
||||
|
||||
class PauseStatePersistenceLayer(GraphEngineLayer):
|
||||
def __init__(self, session_factory: Engine | sessionmaker, state_owner_user_id: str):
|
||||
def __init__(
|
||||
self,
|
||||
session_factory: Engine | sessionmaker,
|
||||
generate_entity: WorkflowAppGenerateEntity | AdvancedChatAppGenerateEntity,
|
||||
state_owner_user_id: str,
|
||||
):
|
||||
"""Create a PauseStatePersistenceLayer.
|
||||
|
||||
The `state_owner_user_id` is used when creating state file for pause.
|
||||
@@ -19,6 +68,7 @@ class PauseStatePersistenceLayer(GraphEngineLayer):
|
||||
session_factory = sessionmaker(session_factory)
|
||||
self._session_maker = session_factory
|
||||
self._state_owner_user_id = state_owner_user_id
|
||||
self._generate_entity = generate_entity
|
||||
|
||||
def _get_repo(self) -> APIWorkflowRunRepository:
|
||||
return DifyAPIRepositoryFactory.create_api_workflow_run_repository(self._session_maker)
|
||||
@@ -49,13 +99,27 @@ class PauseStatePersistenceLayer(GraphEngineLayer):
|
||||
return
|
||||
|
||||
assert self.graph_runtime_state is not None
|
||||
|
||||
entity_wrapper: _GenerateEntityUnion
|
||||
if isinstance(self._generate_entity, WorkflowAppGenerateEntity):
|
||||
entity_wrapper = _WorkflowGenerateEntityWrapper(entity=self._generate_entity)
|
||||
elif isinstance(self._generate_entity, AdvancedChatAppGenerateEntity):
|
||||
entity_wrapper = _AdvancedChatAppGenerateEntityWrapper(entity=self._generate_entity)
|
||||
else:
|
||||
raise AssertionError(f"unknown entity type: type={type(self._generate_entity)}")
|
||||
|
||||
state = WorkflowResumptionContext(
|
||||
serialized_graph_runtime_state=self.graph_runtime_state.dumps(),
|
||||
generate_entity=entity_wrapper,
|
||||
)
|
||||
|
||||
workflow_run_id: str | None = self.graph_runtime_state.system_variable.workflow_execution_id
|
||||
assert workflow_run_id is not None
|
||||
repo = self._get_repo()
|
||||
repo.create_workflow_pause(
|
||||
workflow_run_id=workflow_run_id,
|
||||
state_owner_user_id=self._state_owner_user_id,
|
||||
state=self.graph_runtime_state.dumps(),
|
||||
state=state.dumps(),
|
||||
)
|
||||
|
||||
def on_graph_end(self, error: Exception | None) -> None:
|
||||
|
||||
@@ -1,21 +1,22 @@
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import traceback
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Union, cast
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from openinference.semconv.trace import OpenInferenceSpanKindValues, SpanAttributes
|
||||
from opentelemetry import trace
|
||||
from openinference.semconv.trace import OpenInferenceMimeTypeValues, OpenInferenceSpanKindValues, SpanAttributes
|
||||
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter as GrpcOTLPSpanExporter
|
||||
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter as HttpOTLPSpanExporter
|
||||
from opentelemetry.sdk import trace as trace_sdk
|
||||
from opentelemetry.sdk.resources import Resource
|
||||
from opentelemetry.sdk.trace.export import SimpleSpanProcessor
|
||||
from opentelemetry.sdk.trace.id_generator import RandomIdGenerator
|
||||
from opentelemetry.trace import SpanContext, TraceFlags, TraceState
|
||||
from sqlalchemy import select
|
||||
from opentelemetry.semconv.trace import SpanAttributes as OTELSpanAttributes
|
||||
from opentelemetry.trace import Span, Status, StatusCode, set_span_in_context, use_span
|
||||
from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator
|
||||
from opentelemetry.util.types import AttributeValue
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from core.ops.base_trace_instance import BaseTraceInstance
|
||||
from core.ops.entities.config_entity import ArizeConfig, PhoenixConfig
|
||||
@@ -30,9 +31,10 @@ from core.ops.entities.trace_entity import (
|
||||
TraceTaskName,
|
||||
WorkflowTraceInfo,
|
||||
)
|
||||
from core.repositories import DifyCoreRepositoryFactory
|
||||
from extensions.ext_database import db
|
||||
from models.model import EndUser, MessageFile
|
||||
from models.workflow import WorkflowNodeExecutionModel
|
||||
from models.workflow import WorkflowNodeExecutionTriggeredFrom
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -99,22 +101,45 @@ def datetime_to_nanos(dt: datetime | None) -> int:
|
||||
return int(dt.timestamp() * 1_000_000_000)
|
||||
|
||||
|
||||
def string_to_trace_id128(string: str | None) -> int:
|
||||
"""
|
||||
Convert any input string into a stable 128-bit integer trace ID.
|
||||
def error_to_string(error: Exception | str | None) -> str:
|
||||
"""Convert an error to a string with traceback information."""
|
||||
error_message = "Empty Stack Trace"
|
||||
if error:
|
||||
if isinstance(error, Exception):
|
||||
string_stacktrace = "".join(traceback.format_exception(error))
|
||||
error_message = f"{error.__class__.__name__}: {error}\n\n{string_stacktrace}"
|
||||
else:
|
||||
error_message = str(error)
|
||||
return error_message
|
||||
|
||||
This uses SHA-256 hashing and takes the first 16 bytes (128 bits) of the digest.
|
||||
It's suitable for generating consistent, unique identifiers from strings.
|
||||
"""
|
||||
if string is None:
|
||||
string = ""
|
||||
hash_object = hashlib.sha256(string.encode())
|
||||
|
||||
# Take the first 16 bytes (128 bits) of the hash digest
|
||||
digest = hash_object.digest()[:16]
|
||||
def set_span_status(current_span: Span, error: Exception | str | None = None):
|
||||
"""Set the status of the current span based on the presence of an error."""
|
||||
if error:
|
||||
error_string = error_to_string(error)
|
||||
current_span.set_status(Status(StatusCode.ERROR, error_string))
|
||||
|
||||
# Convert to a 128-bit integer
|
||||
return int.from_bytes(digest, byteorder="big")
|
||||
if isinstance(error, Exception):
|
||||
current_span.record_exception(error)
|
||||
else:
|
||||
exception_type = error.__class__.__name__
|
||||
exception_message = str(error)
|
||||
if not exception_message:
|
||||
exception_message = repr(error)
|
||||
attributes: dict[str, AttributeValue] = {
|
||||
OTELSpanAttributes.EXCEPTION_TYPE: exception_type,
|
||||
OTELSpanAttributes.EXCEPTION_MESSAGE: exception_message,
|
||||
OTELSpanAttributes.EXCEPTION_ESCAPED: False,
|
||||
OTELSpanAttributes.EXCEPTION_STACKTRACE: error_string,
|
||||
}
|
||||
current_span.add_event(name="exception", attributes=attributes)
|
||||
else:
|
||||
current_span.set_status(Status(StatusCode.OK))
|
||||
|
||||
|
||||
def safe_json_dumps(obj: Any) -> str:
|
||||
"""A convenience wrapper around `json.dumps` that ensures that any object can be safely encoded."""
|
||||
return json.dumps(obj, default=str, ensure_ascii=False)
|
||||
|
||||
|
||||
class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
@@ -131,9 +156,12 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
self.tracer, self.processor = setup_tracer(arize_phoenix_config)
|
||||
self.project = arize_phoenix_config.project
|
||||
self.file_base_url = os.getenv("FILES_URL", "http://127.0.0.1:5001")
|
||||
self.propagator = TraceContextTextMapPropagator()
|
||||
self.dify_trace_ids: set[str] = set()
|
||||
|
||||
def trace(self, trace_info: BaseTraceInfo):
|
||||
logger.info("[Arize/Phoenix] Trace: %s", trace_info)
|
||||
logger.info("[Arize/Phoenix] Trace Entity Info: %s", trace_info)
|
||||
logger.info("[Arize/Phoenix] Trace Entity Type: %s", type(trace_info))
|
||||
try:
|
||||
if isinstance(trace_info, WorkflowTraceInfo):
|
||||
self.workflow_trace(trace_info)
|
||||
@@ -151,7 +179,7 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
self.generate_name_trace(trace_info)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("[Arize/Phoenix] Error in the trace: %s", str(e), exc_info=True)
|
||||
logger.error("[Arize/Phoenix] Trace Entity Error: %s", str(e), exc_info=True)
|
||||
raise
|
||||
|
||||
def workflow_trace(self, trace_info: WorkflowTraceInfo):
|
||||
@@ -166,15 +194,9 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
}
|
||||
workflow_metadata.update(trace_info.metadata)
|
||||
|
||||
trace_id = string_to_trace_id128(trace_info.trace_id or trace_info.workflow_run_id)
|
||||
span_id = RandomIdGenerator().generate_span_id()
|
||||
context = SpanContext(
|
||||
trace_id=trace_id,
|
||||
span_id=span_id,
|
||||
is_remote=False,
|
||||
trace_flags=TraceFlags(TraceFlags.SAMPLED),
|
||||
trace_state=TraceState(),
|
||||
)
|
||||
dify_trace_id = trace_info.trace_id or trace_info.message_id or trace_info.workflow_run_id
|
||||
self.ensure_root_span(dify_trace_id)
|
||||
root_span_context = self.propagator.extract(carrier=self.carrier)
|
||||
|
||||
workflow_span = self.tracer.start_span(
|
||||
name=TraceTaskName.WORKFLOW_TRACE.value,
|
||||
@@ -186,31 +208,58 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
SpanAttributes.SESSION_ID: trace_info.conversation_id or "",
|
||||
},
|
||||
start_time=datetime_to_nanos(trace_info.start_time),
|
||||
context=trace.set_span_in_context(trace.NonRecordingSpan(context)),
|
||||
context=root_span_context,
|
||||
)
|
||||
|
||||
# Through workflow_run_id, get all_nodes_execution using repository
|
||||
session_factory = sessionmaker(bind=db.engine)
|
||||
|
||||
# Find the app's creator account
|
||||
app_id = trace_info.metadata.get("app_id")
|
||||
if not app_id:
|
||||
raise ValueError("No app_id found in trace_info metadata")
|
||||
|
||||
service_account = self.get_service_account_with_tenant(app_id)
|
||||
|
||||
workflow_node_execution_repository = DifyCoreRepositoryFactory.create_workflow_node_execution_repository(
|
||||
session_factory=session_factory,
|
||||
user=service_account,
|
||||
app_id=app_id,
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
|
||||
)
|
||||
|
||||
# Get all executions for this workflow run
|
||||
workflow_node_executions = workflow_node_execution_repository.get_by_workflow_run(
|
||||
workflow_run_id=trace_info.workflow_run_id
|
||||
)
|
||||
|
||||
try:
|
||||
# Process workflow nodes
|
||||
for node_execution in self._get_workflow_nodes(trace_info.workflow_run_id):
|
||||
for node_execution in workflow_node_executions:
|
||||
tenant_id = trace_info.tenant_id # Use from trace_info instead
|
||||
app_id = trace_info.metadata.get("app_id") # Use from trace_info instead
|
||||
inputs_value = node_execution.inputs or {}
|
||||
outputs_value = node_execution.outputs or {}
|
||||
|
||||
created_at = node_execution.created_at or datetime.now()
|
||||
elapsed_time = node_execution.elapsed_time
|
||||
finished_at = created_at + timedelta(seconds=elapsed_time)
|
||||
|
||||
process_data = json.loads(node_execution.process_data) if node_execution.process_data else {}
|
||||
process_data = node_execution.process_data or {}
|
||||
execution_metadata = node_execution.metadata or {}
|
||||
node_metadata = {str(k): v for k, v in execution_metadata.items()}
|
||||
|
||||
node_metadata = {
|
||||
"node_id": node_execution.id,
|
||||
"node_type": node_execution.node_type,
|
||||
"node_status": node_execution.status,
|
||||
"tenant_id": node_execution.tenant_id,
|
||||
"app_id": node_execution.app_id,
|
||||
"app_name": node_execution.title,
|
||||
"status": node_execution.status,
|
||||
"level": "ERROR" if node_execution.status != "succeeded" else "DEFAULT",
|
||||
}
|
||||
|
||||
if node_execution.execution_metadata:
|
||||
node_metadata.update(json.loads(node_execution.execution_metadata))
|
||||
node_metadata.update(
|
||||
{
|
||||
"node_id": node_execution.id,
|
||||
"node_type": node_execution.node_type,
|
||||
"node_status": node_execution.status,
|
||||
"tenant_id": tenant_id,
|
||||
"app_id": app_id,
|
||||
"app_name": node_execution.title,
|
||||
"status": node_execution.status,
|
||||
"level": "ERROR" if node_execution.status == "failed" else "DEFAULT",
|
||||
}
|
||||
)
|
||||
|
||||
# Determine the correct span kind based on node type
|
||||
span_kind = OpenInferenceSpanKindValues.CHAIN
|
||||
@@ -223,8 +272,9 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
if model:
|
||||
node_metadata["ls_model_name"] = model
|
||||
|
||||
outputs = json.loads(node_execution.outputs).get("usage", {}) if "outputs" in node_execution else {}
|
||||
usage_data = process_data.get("usage", {}) if "usage" in process_data else outputs.get("usage", {})
|
||||
usage_data = (
|
||||
process_data.get("usage", {}) if "usage" in process_data else outputs_value.get("usage", {})
|
||||
)
|
||||
if usage_data:
|
||||
node_metadata["total_tokens"] = usage_data.get("total_tokens", 0)
|
||||
node_metadata["prompt_tokens"] = usage_data.get("prompt_tokens", 0)
|
||||
@@ -236,17 +286,20 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
else:
|
||||
span_kind = OpenInferenceSpanKindValues.CHAIN
|
||||
|
||||
workflow_span_context = set_span_in_context(workflow_span)
|
||||
node_span = self.tracer.start_span(
|
||||
name=node_execution.node_type,
|
||||
attributes={
|
||||
SpanAttributes.INPUT_VALUE: node_execution.inputs or "{}",
|
||||
SpanAttributes.OUTPUT_VALUE: node_execution.outputs or "{}",
|
||||
SpanAttributes.INPUT_VALUE: safe_json_dumps(inputs_value),
|
||||
SpanAttributes.INPUT_MIME_TYPE: OpenInferenceMimeTypeValues.JSON.value,
|
||||
SpanAttributes.OUTPUT_VALUE: safe_json_dumps(outputs_value),
|
||||
SpanAttributes.OUTPUT_MIME_TYPE: OpenInferenceMimeTypeValues.JSON.value,
|
||||
SpanAttributes.OPENINFERENCE_SPAN_KIND: span_kind.value,
|
||||
SpanAttributes.METADATA: json.dumps(node_metadata, ensure_ascii=False),
|
||||
SpanAttributes.METADATA: safe_json_dumps(node_metadata),
|
||||
SpanAttributes.SESSION_ID: trace_info.conversation_id or "",
|
||||
},
|
||||
start_time=datetime_to_nanos(created_at),
|
||||
context=trace.set_span_in_context(trace.NonRecordingSpan(context)),
|
||||
context=workflow_span_context,
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -260,11 +313,8 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
llm_attributes[SpanAttributes.LLM_PROVIDER] = provider
|
||||
if model:
|
||||
llm_attributes[SpanAttributes.LLM_MODEL_NAME] = model
|
||||
outputs = (
|
||||
json.loads(node_execution.outputs).get("usage", {}) if "outputs" in node_execution else {}
|
||||
)
|
||||
usage_data = (
|
||||
process_data.get("usage", {}) if "usage" in process_data else outputs.get("usage", {})
|
||||
process_data.get("usage", {}) if "usage" in process_data else outputs_value.get("usage", {})
|
||||
)
|
||||
if usage_data:
|
||||
llm_attributes[SpanAttributes.LLM_TOKEN_COUNT_TOTAL] = usage_data.get("total_tokens", 0)
|
||||
@@ -275,8 +325,16 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
llm_attributes.update(self._construct_llm_attributes(process_data.get("prompts", [])))
|
||||
node_span.set_attributes(llm_attributes)
|
||||
finally:
|
||||
if node_execution.status == "failed":
|
||||
set_span_status(node_span, node_execution.error)
|
||||
else:
|
||||
set_span_status(node_span)
|
||||
node_span.end(end_time=datetime_to_nanos(finished_at))
|
||||
finally:
|
||||
if trace_info.error:
|
||||
set_span_status(workflow_span, trace_info.error)
|
||||
else:
|
||||
set_span_status(workflow_span)
|
||||
workflow_span.end(end_time=datetime_to_nanos(trace_info.end_time))
|
||||
|
||||
def message_trace(self, trace_info: MessageTraceInfo):
|
||||
@@ -322,34 +380,18 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
SpanAttributes.SESSION_ID: trace_info.message_data.conversation_id,
|
||||
}
|
||||
|
||||
trace_id = string_to_trace_id128(trace_info.trace_id or trace_info.message_id)
|
||||
message_span_id = RandomIdGenerator().generate_span_id()
|
||||
span_context = SpanContext(
|
||||
trace_id=trace_id,
|
||||
span_id=message_span_id,
|
||||
is_remote=False,
|
||||
trace_flags=TraceFlags(TraceFlags.SAMPLED),
|
||||
trace_state=TraceState(),
|
||||
)
|
||||
dify_trace_id = trace_info.trace_id or trace_info.message_id
|
||||
self.ensure_root_span(dify_trace_id)
|
||||
root_span_context = self.propagator.extract(carrier=self.carrier)
|
||||
|
||||
message_span = self.tracer.start_span(
|
||||
name=TraceTaskName.MESSAGE_TRACE.value,
|
||||
attributes=attributes,
|
||||
start_time=datetime_to_nanos(trace_info.start_time),
|
||||
context=trace.set_span_in_context(trace.NonRecordingSpan(span_context)),
|
||||
context=root_span_context,
|
||||
)
|
||||
|
||||
try:
|
||||
if trace_info.error:
|
||||
message_span.add_event(
|
||||
"exception",
|
||||
attributes={
|
||||
"exception.message": trace_info.error,
|
||||
"exception.type": "Error",
|
||||
"exception.stacktrace": trace_info.error,
|
||||
},
|
||||
)
|
||||
|
||||
# Convert outputs to string based on type
|
||||
if isinstance(trace_info.outputs, dict | list):
|
||||
outputs_str = json.dumps(trace_info.outputs, ensure_ascii=False)
|
||||
@@ -383,26 +425,26 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
if model_params := metadata_dict.get("model_parameters"):
|
||||
llm_attributes[SpanAttributes.LLM_INVOCATION_PARAMETERS] = json.dumps(model_params)
|
||||
|
||||
message_span_context = set_span_in_context(message_span)
|
||||
llm_span = self.tracer.start_span(
|
||||
name="llm",
|
||||
attributes=llm_attributes,
|
||||
start_time=datetime_to_nanos(trace_info.start_time),
|
||||
context=trace.set_span_in_context(trace.NonRecordingSpan(span_context)),
|
||||
context=message_span_context,
|
||||
)
|
||||
|
||||
try:
|
||||
if trace_info.error:
|
||||
llm_span.add_event(
|
||||
"exception",
|
||||
attributes={
|
||||
"exception.message": trace_info.error,
|
||||
"exception.type": "Error",
|
||||
"exception.stacktrace": trace_info.error,
|
||||
},
|
||||
)
|
||||
if trace_info.message_data.error:
|
||||
set_span_status(llm_span, trace_info.message_data.error)
|
||||
else:
|
||||
set_span_status(llm_span)
|
||||
finally:
|
||||
llm_span.end(end_time=datetime_to_nanos(trace_info.end_time))
|
||||
finally:
|
||||
if trace_info.error:
|
||||
set_span_status(message_span, trace_info.error)
|
||||
else:
|
||||
set_span_status(message_span)
|
||||
message_span.end(end_time=datetime_to_nanos(trace_info.end_time))
|
||||
|
||||
def moderation_trace(self, trace_info: ModerationTraceInfo):
|
||||
@@ -418,15 +460,9 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
}
|
||||
metadata.update(trace_info.metadata)
|
||||
|
||||
trace_id = string_to_trace_id128(trace_info.message_id)
|
||||
span_id = RandomIdGenerator().generate_span_id()
|
||||
context = SpanContext(
|
||||
trace_id=trace_id,
|
||||
span_id=span_id,
|
||||
is_remote=False,
|
||||
trace_flags=TraceFlags(TraceFlags.SAMPLED),
|
||||
trace_state=TraceState(),
|
||||
)
|
||||
dify_trace_id = trace_info.trace_id or trace_info.message_id
|
||||
self.ensure_root_span(dify_trace_id)
|
||||
root_span_context = self.propagator.extract(carrier=self.carrier)
|
||||
|
||||
span = self.tracer.start_span(
|
||||
name=TraceTaskName.MODERATION_TRACE.value,
|
||||
@@ -445,19 +481,14 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
SpanAttributes.METADATA: json.dumps(metadata, ensure_ascii=False),
|
||||
},
|
||||
start_time=datetime_to_nanos(trace_info.start_time),
|
||||
context=trace.set_span_in_context(trace.NonRecordingSpan(context)),
|
||||
context=root_span_context,
|
||||
)
|
||||
|
||||
try:
|
||||
if trace_info.message_data.error:
|
||||
span.add_event(
|
||||
"exception",
|
||||
attributes={
|
||||
"exception.message": trace_info.message_data.error,
|
||||
"exception.type": "Error",
|
||||
"exception.stacktrace": trace_info.message_data.error,
|
||||
},
|
||||
)
|
||||
set_span_status(span, trace_info.message_data.error)
|
||||
else:
|
||||
set_span_status(span)
|
||||
finally:
|
||||
span.end(end_time=datetime_to_nanos(trace_info.end_time))
|
||||
|
||||
@@ -480,15 +511,9 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
}
|
||||
metadata.update(trace_info.metadata)
|
||||
|
||||
trace_id = string_to_trace_id128(trace_info.message_id)
|
||||
span_id = RandomIdGenerator().generate_span_id()
|
||||
context = SpanContext(
|
||||
trace_id=trace_id,
|
||||
span_id=span_id,
|
||||
is_remote=False,
|
||||
trace_flags=TraceFlags(TraceFlags.SAMPLED),
|
||||
trace_state=TraceState(),
|
||||
)
|
||||
dify_trace_id = trace_info.trace_id or trace_info.message_id
|
||||
self.ensure_root_span(dify_trace_id)
|
||||
root_span_context = self.propagator.extract(carrier=self.carrier)
|
||||
|
||||
span = self.tracer.start_span(
|
||||
name=TraceTaskName.SUGGESTED_QUESTION_TRACE.value,
|
||||
@@ -499,19 +524,14 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
SpanAttributes.METADATA: json.dumps(metadata, ensure_ascii=False),
|
||||
},
|
||||
start_time=datetime_to_nanos(start_time),
|
||||
context=trace.set_span_in_context(trace.NonRecordingSpan(context)),
|
||||
context=root_span_context,
|
||||
)
|
||||
|
||||
try:
|
||||
if trace_info.error:
|
||||
span.add_event(
|
||||
"exception",
|
||||
attributes={
|
||||
"exception.message": trace_info.error,
|
||||
"exception.type": "Error",
|
||||
"exception.stacktrace": trace_info.error,
|
||||
},
|
||||
)
|
||||
set_span_status(span, trace_info.error)
|
||||
else:
|
||||
set_span_status(span)
|
||||
finally:
|
||||
span.end(end_time=datetime_to_nanos(end_time))
|
||||
|
||||
@@ -533,15 +553,9 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
}
|
||||
metadata.update(trace_info.metadata)
|
||||
|
||||
trace_id = string_to_trace_id128(trace_info.message_id)
|
||||
span_id = RandomIdGenerator().generate_span_id()
|
||||
context = SpanContext(
|
||||
trace_id=trace_id,
|
||||
span_id=span_id,
|
||||
is_remote=False,
|
||||
trace_flags=TraceFlags(TraceFlags.SAMPLED),
|
||||
trace_state=TraceState(),
|
||||
)
|
||||
dify_trace_id = trace_info.trace_id or trace_info.message_id
|
||||
self.ensure_root_span(dify_trace_id)
|
||||
root_span_context = self.propagator.extract(carrier=self.carrier)
|
||||
|
||||
span = self.tracer.start_span(
|
||||
name=TraceTaskName.DATASET_RETRIEVAL_TRACE.value,
|
||||
@@ -554,19 +568,14 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
"end_time": end_time.isoformat() if end_time else "",
|
||||
},
|
||||
start_time=datetime_to_nanos(start_time),
|
||||
context=trace.set_span_in_context(trace.NonRecordingSpan(context)),
|
||||
context=root_span_context,
|
||||
)
|
||||
|
||||
try:
|
||||
if trace_info.message_data.error:
|
||||
span.add_event(
|
||||
"exception",
|
||||
attributes={
|
||||
"exception.message": trace_info.message_data.error,
|
||||
"exception.type": "Error",
|
||||
"exception.stacktrace": trace_info.message_data.error,
|
||||
},
|
||||
)
|
||||
set_span_status(span, trace_info.message_data.error)
|
||||
else:
|
||||
set_span_status(span)
|
||||
finally:
|
||||
span.end(end_time=datetime_to_nanos(end_time))
|
||||
|
||||
@@ -580,20 +589,9 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
"tool_config": json.dumps(trace_info.tool_config, ensure_ascii=False),
|
||||
}
|
||||
|
||||
trace_id = string_to_trace_id128(trace_info.message_id)
|
||||
tool_span_id = RandomIdGenerator().generate_span_id()
|
||||
logger.info("[Arize/Phoenix] Creating tool trace with trace_id: %s, span_id: %s", trace_id, tool_span_id)
|
||||
|
||||
# Create span context with the same trace_id as the parent
|
||||
# todo: Create with the appropriate parent span context, so that the tool span is
|
||||
# a child of the appropriate span (e.g. message span)
|
||||
span_context = SpanContext(
|
||||
trace_id=trace_id,
|
||||
span_id=tool_span_id,
|
||||
is_remote=False,
|
||||
trace_flags=TraceFlags(TraceFlags.SAMPLED),
|
||||
trace_state=TraceState(),
|
||||
)
|
||||
dify_trace_id = trace_info.trace_id or trace_info.message_id
|
||||
self.ensure_root_span(dify_trace_id)
|
||||
root_span_context = self.propagator.extract(carrier=self.carrier)
|
||||
|
||||
tool_params_str = (
|
||||
json.dumps(trace_info.tool_parameters, ensure_ascii=False)
|
||||
@@ -612,19 +610,14 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
SpanAttributes.TOOL_PARAMETERS: tool_params_str,
|
||||
},
|
||||
start_time=datetime_to_nanos(trace_info.start_time),
|
||||
context=trace.set_span_in_context(trace.NonRecordingSpan(span_context)),
|
||||
context=root_span_context,
|
||||
)
|
||||
|
||||
try:
|
||||
if trace_info.error:
|
||||
span.add_event(
|
||||
"exception",
|
||||
attributes={
|
||||
"exception.message": trace_info.error,
|
||||
"exception.type": "Error",
|
||||
"exception.stacktrace": trace_info.error,
|
||||
},
|
||||
)
|
||||
set_span_status(span, trace_info.error)
|
||||
else:
|
||||
set_span_status(span)
|
||||
finally:
|
||||
span.end(end_time=datetime_to_nanos(trace_info.end_time))
|
||||
|
||||
@@ -641,15 +634,9 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
}
|
||||
metadata.update(trace_info.metadata)
|
||||
|
||||
trace_id = string_to_trace_id128(trace_info.message_id)
|
||||
span_id = RandomIdGenerator().generate_span_id()
|
||||
context = SpanContext(
|
||||
trace_id=trace_id,
|
||||
span_id=span_id,
|
||||
is_remote=False,
|
||||
trace_flags=TraceFlags(TraceFlags.SAMPLED),
|
||||
trace_state=TraceState(),
|
||||
)
|
||||
dify_trace_id = trace_info.trace_id or trace_info.message_id or trace_info.conversation_id
|
||||
self.ensure_root_span(dify_trace_id)
|
||||
root_span_context = self.propagator.extract(carrier=self.carrier)
|
||||
|
||||
span = self.tracer.start_span(
|
||||
name=TraceTaskName.GENERATE_NAME_TRACE.value,
|
||||
@@ -663,22 +650,34 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
"end_time": trace_info.end_time.isoformat() if trace_info.end_time else "",
|
||||
},
|
||||
start_time=datetime_to_nanos(trace_info.start_time),
|
||||
context=trace.set_span_in_context(trace.NonRecordingSpan(context)),
|
||||
context=root_span_context,
|
||||
)
|
||||
|
||||
try:
|
||||
if trace_info.message_data.error:
|
||||
span.add_event(
|
||||
"exception",
|
||||
attributes={
|
||||
"exception.message": trace_info.message_data.error,
|
||||
"exception.type": "Error",
|
||||
"exception.stacktrace": trace_info.message_data.error,
|
||||
},
|
||||
)
|
||||
set_span_status(span, trace_info.message_data.error)
|
||||
else:
|
||||
set_span_status(span)
|
||||
finally:
|
||||
span.end(end_time=datetime_to_nanos(trace_info.end_time))
|
||||
|
||||
def ensure_root_span(self, dify_trace_id: str | None):
|
||||
"""Ensure a unique root span exists for the given Dify trace ID."""
|
||||
if str(dify_trace_id) not in self.dify_trace_ids:
|
||||
self.carrier: dict[str, str] = {}
|
||||
|
||||
root_span = self.tracer.start_span(name="Dify")
|
||||
root_span.set_attribute(SpanAttributes.OPENINFERENCE_SPAN_KIND, OpenInferenceSpanKindValues.CHAIN.value)
|
||||
root_span.set_attribute("dify_project_name", str(self.project))
|
||||
root_span.set_attribute("dify_trace_id", str(dify_trace_id))
|
||||
|
||||
with use_span(root_span, end_on_exit=False):
|
||||
self.propagator.inject(carrier=self.carrier)
|
||||
|
||||
set_span_status(root_span)
|
||||
root_span.end()
|
||||
self.dify_trace_ids.add(str(dify_trace_id))
|
||||
|
||||
def api_check(self):
|
||||
try:
|
||||
with self.tracer.start_span("api_check") as span:
|
||||
@@ -698,26 +697,6 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
|
||||
logger.info("[Arize/Phoenix] Get run url failed: %s", str(e), exc_info=True)
|
||||
raise ValueError(f"[Arize/Phoenix] Get run url failed: {str(e)}")
|
||||
|
||||
def _get_workflow_nodes(self, workflow_run_id: str):
|
||||
"""Helper method to get workflow nodes"""
|
||||
workflow_nodes = db.session.scalars(
|
||||
select(
|
||||
WorkflowNodeExecutionModel.id,
|
||||
WorkflowNodeExecutionModel.tenant_id,
|
||||
WorkflowNodeExecutionModel.app_id,
|
||||
WorkflowNodeExecutionModel.title,
|
||||
WorkflowNodeExecutionModel.node_type,
|
||||
WorkflowNodeExecutionModel.status,
|
||||
WorkflowNodeExecutionModel.inputs,
|
||||
WorkflowNodeExecutionModel.outputs,
|
||||
WorkflowNodeExecutionModel.created_at,
|
||||
WorkflowNodeExecutionModel.elapsed_time,
|
||||
WorkflowNodeExecutionModel.process_data,
|
||||
WorkflowNodeExecutionModel.execution_metadata,
|
||||
).where(WorkflowNodeExecutionModel.workflow_run_id == workflow_run_id)
|
||||
).all()
|
||||
return workflow_nodes
|
||||
|
||||
def _construct_llm_attributes(self, prompts: dict | list | str | None) -> dict[str, str]:
|
||||
"""Helper method to construct LLM attributes with passed prompts."""
|
||||
attributes = {}
|
||||
|
||||
@@ -16,7 +16,6 @@ from uuid import uuid4
|
||||
from flask import Flask
|
||||
from typing_extensions import override
|
||||
|
||||
from core.workflow.enums import NodeType
|
||||
from core.workflow.graph import Graph
|
||||
from core.workflow.graph_events import GraphNodeEventBase, NodeRunFailedEvent
|
||||
from core.workflow.nodes.base.node import Node
|
||||
@@ -108,8 +107,8 @@ class Worker(threading.Thread):
|
||||
except Exception as e:
|
||||
error_event = NodeRunFailedEvent(
|
||||
id=str(uuid4()),
|
||||
node_id="unknown",
|
||||
node_type=NodeType.CODE,
|
||||
node_id=node.id,
|
||||
node_type=node.node_type,
|
||||
in_iteration_id=None,
|
||||
error=str(e),
|
||||
start_at=datetime.now(),
|
||||
|
||||
@@ -48,7 +48,6 @@ def add_document_to_index_task(dataset_document_id: str):
|
||||
db.session.query(DocumentSegment)
|
||||
.where(
|
||||
DocumentSegment.document_id == dataset_document.id,
|
||||
DocumentSegment.enabled == False,
|
||||
DocumentSegment.status == "completed",
|
||||
)
|
||||
.order_by(DocumentSegment.position.asc())
|
||||
|
||||
@@ -25,7 +25,12 @@ import pytest
|
||||
from sqlalchemy import Engine, delete, select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.app.layers.pause_state_persist_layer import PauseStatePersistenceLayer
|
||||
from core.app.app_config.entities import WorkflowUIBasedAppConfig
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom, WorkflowAppGenerateEntity
|
||||
from core.app.layers.pause_state_persist_layer import (
|
||||
PauseStatePersistenceLayer,
|
||||
WorkflowResumptionContext,
|
||||
)
|
||||
from core.model_runtime.entities.llm_entities import LLMUsage
|
||||
from core.workflow.entities.pause_reason import SchedulingPause
|
||||
from core.workflow.enums import WorkflowExecutionStatus
|
||||
@@ -39,7 +44,7 @@ from extensions.ext_storage import storage
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from models import Account
|
||||
from models import WorkflowPause as WorkflowPauseModel
|
||||
from models.model import UploadFile
|
||||
from models.model import AppMode, UploadFile
|
||||
from models.workflow import Workflow, WorkflowRun
|
||||
from services.file_service import FileService
|
||||
from services.workflow_run_service import WorkflowRunService
|
||||
@@ -226,11 +231,39 @@ class TestPauseStatePersistenceLayerTestContainers:
|
||||
|
||||
return ReadOnlyGraphRuntimeStateWrapper(graph_runtime_state)
|
||||
|
||||
def _create_generate_entity(
|
||||
self,
|
||||
workflow_execution_id: str | None = None,
|
||||
user_id: str | None = None,
|
||||
workflow_id: str | None = None,
|
||||
) -> WorkflowAppGenerateEntity:
|
||||
execution_id = workflow_execution_id or getattr(self, "test_workflow_run_id", str(uuid.uuid4()))
|
||||
wf_id = workflow_id or getattr(self, "test_workflow_id", str(uuid.uuid4()))
|
||||
tenant_id = getattr(self, "test_tenant_id", "tenant-123")
|
||||
app_id = getattr(self, "test_app_id", "app-123")
|
||||
app_config = WorkflowUIBasedAppConfig(
|
||||
tenant_id=str(tenant_id),
|
||||
app_id=str(app_id),
|
||||
app_mode=AppMode.WORKFLOW,
|
||||
workflow_id=str(wf_id),
|
||||
)
|
||||
return WorkflowAppGenerateEntity(
|
||||
task_id=str(uuid.uuid4()),
|
||||
app_config=app_config,
|
||||
inputs={},
|
||||
files=[],
|
||||
user_id=user_id or getattr(self, "test_user_id", str(uuid.uuid4())),
|
||||
stream=False,
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
workflow_execution_id=execution_id,
|
||||
)
|
||||
|
||||
def _create_pause_state_persistence_layer(
|
||||
self,
|
||||
workflow_run: WorkflowRun | None = None,
|
||||
workflow: Workflow | None = None,
|
||||
state_owner_user_id: str | None = None,
|
||||
generate_entity: WorkflowAppGenerateEntity | None = None,
|
||||
) -> PauseStatePersistenceLayer:
|
||||
"""Create PauseStatePersistenceLayer with real dependencies."""
|
||||
owner_id = state_owner_user_id
|
||||
@@ -244,10 +277,23 @@ class TestPauseStatePersistenceLayerTestContainers:
|
||||
|
||||
assert owner_id is not None
|
||||
owner_id = str(owner_id)
|
||||
workflow_execution_id = (
|
||||
workflow_run.id if workflow_run is not None else getattr(self, "test_workflow_run_id", None)
|
||||
)
|
||||
assert workflow_execution_id is not None
|
||||
workflow_id = workflow.id if workflow is not None else getattr(self, "test_workflow_id", None)
|
||||
assert workflow_id is not None
|
||||
entity_user_id = getattr(self, "test_user_id", owner_id)
|
||||
entity = generate_entity or self._create_generate_entity(
|
||||
workflow_execution_id=str(workflow_execution_id),
|
||||
user_id=entity_user_id,
|
||||
workflow_id=str(workflow_id),
|
||||
)
|
||||
|
||||
return PauseStatePersistenceLayer(
|
||||
session_factory=self.session.get_bind(),
|
||||
state_owner_user_id=owner_id,
|
||||
generate_entity=entity,
|
||||
)
|
||||
|
||||
def test_complete_pause_flow_with_real_dependencies(self, db_session_with_containers):
|
||||
@@ -297,10 +343,15 @@ class TestPauseStatePersistenceLayerTestContainers:
|
||||
assert pause_model.resumed_at is None
|
||||
|
||||
storage_content = storage.load(pause_model.state_object_key).decode()
|
||||
resumption_context = WorkflowResumptionContext.loads(storage_content)
|
||||
assert resumption_context.version == "1"
|
||||
assert resumption_context.serialized_graph_runtime_state == graph_runtime_state.dumps()
|
||||
expected_state = json.loads(graph_runtime_state.dumps())
|
||||
actual_state = json.loads(storage_content)
|
||||
|
||||
actual_state = json.loads(resumption_context.serialized_graph_runtime_state)
|
||||
assert actual_state == expected_state
|
||||
persisted_entity = resumption_context.get_generate_entity()
|
||||
assert isinstance(persisted_entity, WorkflowAppGenerateEntity)
|
||||
assert persisted_entity.workflow_execution_id == self.test_workflow_run_id
|
||||
|
||||
def test_state_persistence_and_retrieval(self, db_session_with_containers):
|
||||
"""Test that pause state can be persisted and retrieved correctly."""
|
||||
@@ -341,13 +392,15 @@ class TestPauseStatePersistenceLayerTestContainers:
|
||||
assert pause_entity.workflow_execution_id == self.test_workflow_run_id
|
||||
|
||||
state_bytes = pause_entity.get_state()
|
||||
retrieved_state = json.loads(state_bytes.decode())
|
||||
resumption_context = WorkflowResumptionContext.loads(state_bytes.decode())
|
||||
retrieved_state = json.loads(resumption_context.serialized_graph_runtime_state)
|
||||
expected_state = json.loads(graph_runtime_state.dumps())
|
||||
|
||||
assert retrieved_state == expected_state
|
||||
assert retrieved_state["outputs"] == complex_outputs
|
||||
assert retrieved_state["total_tokens"] == 250
|
||||
assert retrieved_state["node_run_steps"] == 10
|
||||
assert resumption_context.get_generate_entity().workflow_execution_id == self.test_workflow_run_id
|
||||
|
||||
def test_database_transaction_handling(self, db_session_with_containers):
|
||||
"""Test that database transactions are handled correctly."""
|
||||
@@ -410,7 +463,9 @@ class TestPauseStatePersistenceLayerTestContainers:
|
||||
|
||||
# Verify content in storage
|
||||
storage_content = storage.load(pause_model.state_object_key).decode()
|
||||
assert storage_content == graph_runtime_state.dumps()
|
||||
resumption_context = WorkflowResumptionContext.loads(storage_content)
|
||||
assert resumption_context.serialized_graph_runtime_state == graph_runtime_state.dumps()
|
||||
assert resumption_context.get_generate_entity().workflow_execution_id == self.test_workflow_run_id
|
||||
|
||||
def test_workflow_with_different_creators(self, db_session_with_containers):
|
||||
"""Test pause state with workflows created by different users."""
|
||||
@@ -474,6 +529,8 @@ class TestPauseStatePersistenceLayerTestContainers:
|
||||
# Verify the state owner is the workflow creator
|
||||
pause_entity = self.workflow_run_service._workflow_run_repo.get_workflow_pause(different_workflow_run.id)
|
||||
assert pause_entity is not None
|
||||
resumption_context = WorkflowResumptionContext.loads(pause_entity.get_state().decode())
|
||||
assert resumption_context.get_generate_entity().workflow_execution_id == different_workflow_run.id
|
||||
|
||||
def test_layer_ignores_non_pause_events(self, db_session_with_containers):
|
||||
"""Test that layer ignores non-pause events."""
|
||||
|
||||
@@ -256,7 +256,7 @@ class TestAddDocumentToIndexTask:
|
||||
"""
|
||||
# Arrange: Use non-existent document ID
|
||||
fake = Faker()
|
||||
non_existent_id = fake.uuid4()
|
||||
non_existent_id = str(fake.uuid4())
|
||||
|
||||
# Act: Execute the task with non-existent document
|
||||
add_document_to_index_task(non_existent_id)
|
||||
@@ -282,7 +282,7 @@ class TestAddDocumentToIndexTask:
|
||||
- Redis cache key not affected
|
||||
"""
|
||||
# Arrange: Create test data with invalid indexing status
|
||||
dataset, document = self._create_test_dataset_and_document(
|
||||
_, document = self._create_test_dataset_and_document(
|
||||
db_session_with_containers, mock_external_service_dependencies
|
||||
)
|
||||
|
||||
@@ -417,15 +417,15 @@ class TestAddDocumentToIndexTask:
|
||||
# Verify redis cache was cleared
|
||||
assert redis_client.exists(indexing_cache_key) == 0
|
||||
|
||||
def test_add_document_to_index_with_no_segments_to_process(
|
||||
def test_add_document_to_index_with_already_enabled_segments(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test document indexing when no segments need processing.
|
||||
Test document indexing when segments are already enabled.
|
||||
|
||||
This test verifies:
|
||||
- Proper handling when all segments are already enabled
|
||||
- Index processing still occurs but with empty documents list
|
||||
- Segments with status="completed" are processed regardless of enabled status
|
||||
- Index processing occurs with all completed segments
|
||||
- Auto disable log deletion still occurs
|
||||
- Redis cache is cleared
|
||||
"""
|
||||
@@ -465,15 +465,16 @@ class TestAddDocumentToIndexTask:
|
||||
# Act: Execute the task
|
||||
add_document_to_index_task(document.id)
|
||||
|
||||
# Assert: Verify index processing occurred but with empty documents list
|
||||
# Assert: Verify index processing occurred with all completed segments
|
||||
mock_external_service_dependencies["index_processor_factory"].assert_called_once_with(IndexType.PARAGRAPH_INDEX)
|
||||
mock_external_service_dependencies["index_processor"].load.assert_called_once()
|
||||
|
||||
# Verify the load method was called with empty documents list
|
||||
# Verify the load method was called with all completed segments
|
||||
# (implementation doesn't filter by enabled status, only by status="completed")
|
||||
call_args = mock_external_service_dependencies["index_processor"].load.call_args
|
||||
assert call_args is not None
|
||||
documents = call_args[0][1] # Second argument should be documents list
|
||||
assert len(documents) == 0 # No segments to process
|
||||
assert len(documents) == 3 # All completed segments are processed
|
||||
|
||||
# Verify redis cache was cleared
|
||||
assert redis_client.exists(indexing_cache_key) == 0
|
||||
@@ -499,7 +500,7 @@ class TestAddDocumentToIndexTask:
|
||||
# Create some auto disable log entries
|
||||
fake = Faker()
|
||||
auto_disable_logs = []
|
||||
for i in range(2):
|
||||
for _ in range(2):
|
||||
log_entry = DatasetAutoDisableLog(
|
||||
id=fake.uuid4(),
|
||||
tenant_id=document.tenant_id,
|
||||
@@ -595,9 +596,11 @@ class TestAddDocumentToIndexTask:
|
||||
Test segment filtering with various edge cases.
|
||||
|
||||
This test verifies:
|
||||
- Only segments with enabled=False and status="completed" are processed
|
||||
- Only segments with status="completed" are processed (regardless of enabled status)
|
||||
- Segments with status!="completed" are NOT processed
|
||||
- Segments are ordered by position correctly
|
||||
- Mixed segment states are handled properly
|
||||
- All segments are updated to enabled=True after processing
|
||||
- Redis cache key deletion
|
||||
"""
|
||||
# Arrange: Create test data
|
||||
@@ -628,7 +631,8 @@ class TestAddDocumentToIndexTask:
|
||||
db.session.add(segment1)
|
||||
segments.append(segment1)
|
||||
|
||||
# Segment 2: Should NOT be processed (enabled=True, status="completed")
|
||||
# Segment 2: Should be processed (enabled=True, status="completed")
|
||||
# Note: Implementation doesn't filter by enabled status, only by status="completed"
|
||||
segment2 = DocumentSegment(
|
||||
id=fake.uuid4(),
|
||||
tenant_id=document.tenant_id,
|
||||
@@ -640,7 +644,7 @@ class TestAddDocumentToIndexTask:
|
||||
tokens=len(fake.text(max_nb_chars=200).split()) * 2,
|
||||
index_node_id="node_1",
|
||||
index_node_hash="hash_1",
|
||||
enabled=True, # Already enabled
|
||||
enabled=True, # Already enabled, but will still be processed
|
||||
status="completed",
|
||||
created_by=document.created_by,
|
||||
)
|
||||
@@ -702,11 +706,14 @@ class TestAddDocumentToIndexTask:
|
||||
call_args = mock_external_service_dependencies["index_processor"].load.call_args
|
||||
assert call_args is not None
|
||||
documents = call_args[0][1] # Second argument should be documents list
|
||||
assert len(documents) == 2 # Only 2 segments should be processed
|
||||
assert len(documents) == 3 # 3 segments with status="completed" should be processed
|
||||
|
||||
# Verify correct segments were processed (by position order)
|
||||
assert documents[0].metadata["doc_id"] == "node_0" # position 0
|
||||
assert documents[1].metadata["doc_id"] == "node_3" # position 3
|
||||
# Segments 1, 2, 4 should be processed (positions 0, 1, 3)
|
||||
# Segment 3 is skipped (position 2, status="processing")
|
||||
assert documents[0].metadata["doc_id"] == "node_0" # segment1, position 0
|
||||
assert documents[1].metadata["doc_id"] == "node_1" # segment2, position 1
|
||||
assert documents[2].metadata["doc_id"] == "node_3" # segment4, position 3
|
||||
|
||||
# Verify database state changes
|
||||
db.session.refresh(document)
|
||||
@@ -717,7 +724,7 @@ class TestAddDocumentToIndexTask:
|
||||
|
||||
# All segments should be enabled because the task updates ALL segments for the document
|
||||
assert segment1.enabled is True
|
||||
assert segment2.enabled is True # Was already enabled, now updated to True
|
||||
assert segment2.enabled is True # Was already enabled, stays True
|
||||
assert segment3.enabled is True # Was not processed but still updated to True
|
||||
assert segment4.enabled is True
|
||||
|
||||
|
||||
@@ -4,7 +4,14 @@ from unittest.mock import Mock
|
||||
|
||||
import pytest
|
||||
|
||||
from core.app.layers.pause_state_persist_layer import PauseStatePersistenceLayer
|
||||
from core.app.app_config.entities import WorkflowUIBasedAppConfig
|
||||
from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, InvokeFrom, WorkflowAppGenerateEntity
|
||||
from core.app.layers.pause_state_persist_layer import (
|
||||
PauseStatePersistenceLayer,
|
||||
WorkflowResumptionContext,
|
||||
_AdvancedChatAppGenerateEntityWrapper,
|
||||
_WorkflowGenerateEntityWrapper,
|
||||
)
|
||||
from core.variables.segments import Segment
|
||||
from core.workflow.entities.pause_reason import SchedulingPause
|
||||
from core.workflow.graph_engine.entities.commands import GraphEngineCommand
|
||||
@@ -15,6 +22,7 @@ from core.workflow.graph_events.graph import (
|
||||
GraphRunSucceededEvent,
|
||||
)
|
||||
from core.workflow.runtime.graph_runtime_state_protocol import ReadOnlyVariablePool
|
||||
from models.model import AppMode
|
||||
from repositories.factory import DifyAPIRepositoryFactory
|
||||
|
||||
|
||||
@@ -170,6 +178,25 @@ class MockCommandChannel:
|
||||
class TestPauseStatePersistenceLayer:
|
||||
"""Unit tests for PauseStatePersistenceLayer."""
|
||||
|
||||
@staticmethod
|
||||
def _create_generate_entity(workflow_execution_id: str = "run-123") -> WorkflowAppGenerateEntity:
|
||||
app_config = WorkflowUIBasedAppConfig(
|
||||
tenant_id="tenant-123",
|
||||
app_id="app-123",
|
||||
app_mode=AppMode.WORKFLOW,
|
||||
workflow_id="workflow-123",
|
||||
)
|
||||
return WorkflowAppGenerateEntity(
|
||||
task_id="task-123",
|
||||
app_config=app_config,
|
||||
inputs={},
|
||||
files=[],
|
||||
user_id="user-123",
|
||||
stream=False,
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
workflow_execution_id=workflow_execution_id,
|
||||
)
|
||||
|
||||
def test_init_with_dependency_injection(self):
|
||||
session_factory = Mock(name="session_factory")
|
||||
state_owner_user_id = "user-123"
|
||||
@@ -177,6 +204,7 @@ class TestPauseStatePersistenceLayer:
|
||||
layer = PauseStatePersistenceLayer(
|
||||
session_factory=session_factory,
|
||||
state_owner_user_id=state_owner_user_id,
|
||||
generate_entity=self._create_generate_entity(),
|
||||
)
|
||||
|
||||
assert layer._session_maker is session_factory
|
||||
@@ -186,7 +214,11 @@ class TestPauseStatePersistenceLayer:
|
||||
|
||||
def test_initialize_sets_dependencies(self):
|
||||
session_factory = Mock(name="session_factory")
|
||||
layer = PauseStatePersistenceLayer(session_factory=session_factory, state_owner_user_id="owner")
|
||||
layer = PauseStatePersistenceLayer(
|
||||
session_factory=session_factory,
|
||||
state_owner_user_id="owner",
|
||||
generate_entity=self._create_generate_entity(),
|
||||
)
|
||||
|
||||
graph_runtime_state = MockReadOnlyGraphRuntimeState()
|
||||
command_channel = MockCommandChannel()
|
||||
@@ -198,7 +230,12 @@ class TestPauseStatePersistenceLayer:
|
||||
|
||||
def test_on_event_with_graph_run_paused_event(self, monkeypatch: pytest.MonkeyPatch):
|
||||
session_factory = Mock(name="session_factory")
|
||||
layer = PauseStatePersistenceLayer(session_factory=session_factory, state_owner_user_id="owner-123")
|
||||
generate_entity = self._create_generate_entity(workflow_execution_id="run-123")
|
||||
layer = PauseStatePersistenceLayer(
|
||||
session_factory=session_factory,
|
||||
state_owner_user_id="owner-123",
|
||||
generate_entity=generate_entity,
|
||||
)
|
||||
|
||||
mock_repo = Mock()
|
||||
mock_factory = Mock(return_value=mock_repo)
|
||||
@@ -221,12 +258,20 @@ class TestPauseStatePersistenceLayer:
|
||||
mock_repo.create_workflow_pause.assert_called_once_with(
|
||||
workflow_run_id="run-123",
|
||||
state_owner_user_id="owner-123",
|
||||
state=expected_state,
|
||||
state=mock_repo.create_workflow_pause.call_args.kwargs["state"],
|
||||
)
|
||||
serialized_state = mock_repo.create_workflow_pause.call_args.kwargs["state"]
|
||||
resumption_context = WorkflowResumptionContext.loads(serialized_state)
|
||||
assert resumption_context.serialized_graph_runtime_state == expected_state
|
||||
assert resumption_context.get_generate_entity().model_dump() == generate_entity.model_dump()
|
||||
|
||||
def test_on_event_ignores_non_paused_events(self, monkeypatch: pytest.MonkeyPatch):
|
||||
session_factory = Mock(name="session_factory")
|
||||
layer = PauseStatePersistenceLayer(session_factory=session_factory, state_owner_user_id="owner-123")
|
||||
layer = PauseStatePersistenceLayer(
|
||||
session_factory=session_factory,
|
||||
state_owner_user_id="owner-123",
|
||||
generate_entity=self._create_generate_entity(),
|
||||
)
|
||||
|
||||
mock_repo = Mock()
|
||||
mock_factory = Mock(return_value=mock_repo)
|
||||
@@ -250,7 +295,11 @@ class TestPauseStatePersistenceLayer:
|
||||
|
||||
def test_on_event_raises_attribute_error_when_graph_runtime_state_is_none(self):
|
||||
session_factory = Mock(name="session_factory")
|
||||
layer = PauseStatePersistenceLayer(session_factory=session_factory, state_owner_user_id="owner-123")
|
||||
layer = PauseStatePersistenceLayer(
|
||||
session_factory=session_factory,
|
||||
state_owner_user_id="owner-123",
|
||||
generate_entity=self._create_generate_entity(),
|
||||
)
|
||||
|
||||
event = TestDataFactory.create_graph_run_paused_event()
|
||||
|
||||
@@ -259,7 +308,11 @@ class TestPauseStatePersistenceLayer:
|
||||
|
||||
def test_on_event_asserts_when_workflow_execution_id_missing(self, monkeypatch: pytest.MonkeyPatch):
|
||||
session_factory = Mock(name="session_factory")
|
||||
layer = PauseStatePersistenceLayer(session_factory=session_factory, state_owner_user_id="owner-123")
|
||||
layer = PauseStatePersistenceLayer(
|
||||
session_factory=session_factory,
|
||||
state_owner_user_id="owner-123",
|
||||
generate_entity=self._create_generate_entity(),
|
||||
)
|
||||
|
||||
mock_repo = Mock()
|
||||
mock_factory = Mock(return_value=mock_repo)
|
||||
@@ -276,3 +329,82 @@ class TestPauseStatePersistenceLayer:
|
||||
|
||||
mock_factory.assert_not_called()
|
||||
mock_repo.create_workflow_pause.assert_not_called()
|
||||
|
||||
|
||||
def _build_workflow_generate_entity_for_roundtrip() -> WorkflowResumptionContext:
|
||||
"""Create a WorkflowAppGenerateEntity with realistic data for WorkflowResumptionContext tests."""
|
||||
app_config = WorkflowUIBasedAppConfig(
|
||||
tenant_id="tenant-roundtrip",
|
||||
app_id="app-roundtrip",
|
||||
app_mode=AppMode.WORKFLOW,
|
||||
workflow_id="workflow-roundtrip",
|
||||
)
|
||||
serialized_state = json.dumps({"state": "workflow"})
|
||||
|
||||
return WorkflowResumptionContext(
|
||||
serialized_graph_runtime_state=serialized_state,
|
||||
generate_entity=_WorkflowGenerateEntityWrapper(
|
||||
entity=WorkflowAppGenerateEntity(
|
||||
task_id="workflow-task",
|
||||
app_config=app_config,
|
||||
inputs={"input_key": "input_value"},
|
||||
files=[],
|
||||
user_id="user-roundtrip",
|
||||
stream=False,
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
workflow_execution_id="workflow-exec-roundtrip",
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _build_advanced_chat_generate_entity_for_roundtrip() -> WorkflowResumptionContext:
|
||||
"""Create an AdvancedChatAppGenerateEntity with realistic data for WorkflowResumptionContext tests."""
|
||||
app_config = WorkflowUIBasedAppConfig(
|
||||
tenant_id="tenant-advanced",
|
||||
app_id="app-advanced",
|
||||
app_mode=AppMode.ADVANCED_CHAT,
|
||||
workflow_id="workflow-advanced",
|
||||
)
|
||||
serialized_state = json.dumps({"state": "workflow"})
|
||||
|
||||
return WorkflowResumptionContext(
|
||||
serialized_graph_runtime_state=serialized_state,
|
||||
generate_entity=_AdvancedChatAppGenerateEntityWrapper(
|
||||
entity=AdvancedChatAppGenerateEntity(
|
||||
task_id="advanced-task",
|
||||
app_config=app_config,
|
||||
inputs={"topic": "roundtrip"},
|
||||
files=[],
|
||||
user_id="advanced-user",
|
||||
stream=False,
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
workflow_run_id="advanced-run-id",
|
||||
query="Explain serialization behavior",
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"state",
|
||||
[
|
||||
pytest.param(
|
||||
_build_advanced_chat_generate_entity_for_roundtrip(),
|
||||
id="advanced_chat",
|
||||
),
|
||||
pytest.param(
|
||||
_build_workflow_generate_entity_for_roundtrip(),
|
||||
id="workflow",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_workflow_resumption_context_dumps_loads_roundtrip(state: WorkflowResumptionContext):
|
||||
"""WorkflowResumptionContext roundtrip preserves workflow generate entity metadata."""
|
||||
dumped = state.dumps()
|
||||
loaded = WorkflowResumptionContext.loads(dumped)
|
||||
|
||||
assert loaded == state
|
||||
assert loaded.serialized_graph_runtime_state == state.serialized_graph_runtime_state
|
||||
restored_entity = loaded.get_generate_entity()
|
||||
assert isinstance(restored_entity, type(state.generate_entity.entity))
|
||||
|
||||
@@ -44,9 +44,32 @@ fi
|
||||
|
||||
if $web_modified; then
|
||||
echo "Running ESLint on web module"
|
||||
|
||||
if git diff --cached --quiet -- 'web/**/*.ts' 'web/**/*.tsx'; then
|
||||
web_ts_modified=false
|
||||
else
|
||||
ts_diff_status=$?
|
||||
if [ $ts_diff_status -eq 1 ]; then
|
||||
web_ts_modified=true
|
||||
else
|
||||
echo "Unable to determine staged TypeScript changes (git exit code: $ts_diff_status)."
|
||||
exit $ts_diff_status
|
||||
fi
|
||||
fi
|
||||
|
||||
cd ./web || exit 1
|
||||
lint-staged
|
||||
|
||||
if $web_ts_modified; then
|
||||
echo "Running TypeScript type-check"
|
||||
if ! pnpm run type-check; then
|
||||
echo "Type check failed. Please run 'pnpm run type-check' to fix the errors."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "No staged TypeScript changes detected, skipping type-check"
|
||||
fi
|
||||
|
||||
echo "Running unit tests check"
|
||||
modified_files=$(git diff --cached --name-only -- utils | grep -v '\.spec\.ts$' || true)
|
||||
|
||||
|
||||
@@ -7,6 +7,9 @@ import type { PeriodParams } from '@/app/components/app/overview/app-chart'
|
||||
import { AvgResponseTime, AvgSessionInteractions, AvgUserInteractions, ConversationsChart, CostChart, EndUsersChart, MessagesChart, TokenPerSecond, UserSatisfactionRate, WorkflowCostChart, WorkflowDailyTerminalsChart, WorkflowMessagesChart } from '@/app/components/app/overview/app-chart'
|
||||
import { useStore as useAppStore } from '@/app/components/app/store'
|
||||
import TimeRangePicker from './time-range-picker'
|
||||
import { TIME_PERIOD_MAPPING as LONG_TIME_PERIOD_MAPPING } from '@/app/components/app/log/filter'
|
||||
import { IS_CLOUD_EDITION } from '@/config'
|
||||
import LongTimeRangePicker from './long-time-range-picker'
|
||||
|
||||
dayjs.extend(quarterOfYear)
|
||||
|
||||
@@ -30,7 +33,10 @@ export default function ChartView({ appId, headerRight }: IChartViewProps) {
|
||||
const appDetail = useAppStore(state => state.appDetail)
|
||||
const isChatApp = appDetail?.mode !== 'completion' && appDetail?.mode !== 'workflow'
|
||||
const isWorkflow = appDetail?.mode === 'workflow'
|
||||
const [period, setPeriod] = useState<PeriodParams>({ name: t('appLog.filter.period.today'), query: { start: today.startOf('day').format(queryDateFormat), end: today.endOf('day').format(queryDateFormat) } })
|
||||
const [period, setPeriod] = useState<PeriodParams>(IS_CLOUD_EDITION
|
||||
? { name: t('appLog.filter.period.today'), query: { start: today.startOf('day').format(queryDateFormat), end: today.endOf('day').format(queryDateFormat) } }
|
||||
: { name: t('appLog.filter.period.last7days'), query: { start: today.subtract(7, 'day').startOf('day').format(queryDateFormat), end: today.endOf('day').format(queryDateFormat) } },
|
||||
)
|
||||
|
||||
if (!appDetail)
|
||||
return null
|
||||
@@ -40,11 +46,20 @@ export default function ChartView({ appId, headerRight }: IChartViewProps) {
|
||||
<div className='mb-4'>
|
||||
<div className='system-xl-semibold mb-2 text-text-primary'>{t('common.appMenus.overview')}</div>
|
||||
<div className='flex items-center justify-between'>
|
||||
<TimeRangePicker
|
||||
ranges={TIME_PERIOD_MAPPING}
|
||||
onSelect={setPeriod}
|
||||
queryDateFormat={queryDateFormat}
|
||||
/>
|
||||
{IS_CLOUD_EDITION ? (
|
||||
<TimeRangePicker
|
||||
ranges={TIME_PERIOD_MAPPING}
|
||||
onSelect={setPeriod}
|
||||
queryDateFormat={queryDateFormat}
|
||||
/>
|
||||
) : (
|
||||
<LongTimeRangePicker
|
||||
periodMapping={LONG_TIME_PERIOD_MAPPING}
|
||||
onSelect={setPeriod}
|
||||
queryDateFormat={queryDateFormat}
|
||||
/>
|
||||
)}
|
||||
|
||||
{headerRight}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,63 @@
|
||||
'use client'
|
||||
import type { PeriodParams } from '@/app/components/app/overview/app-chart'
|
||||
import type { FC } from 'react'
|
||||
import React from 'react'
|
||||
import type { Item } from '@/app/components/base/select'
|
||||
import { SimpleSelect } from '@/app/components/base/select'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import dayjs from 'dayjs'
|
||||
type Props = {
|
||||
periodMapping: { [key: string]: { value: number; name: string } }
|
||||
onSelect: (payload: PeriodParams) => void
|
||||
queryDateFormat: string
|
||||
}
|
||||
|
||||
const today = dayjs()
|
||||
|
||||
const LongTimeRangePicker: FC<Props> = ({
|
||||
periodMapping,
|
||||
onSelect,
|
||||
queryDateFormat,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
|
||||
const handleSelect = React.useCallback((item: Item) => {
|
||||
const id = item.value
|
||||
const value = periodMapping[id]?.value ?? '-1'
|
||||
const name = item.name || t('appLog.filter.period.allTime')
|
||||
if (value === -1) {
|
||||
onSelect({ name: t('appLog.filter.period.allTime'), query: undefined })
|
||||
}
|
||||
else if (value === 0) {
|
||||
const startOfToday = today.startOf('day').format(queryDateFormat)
|
||||
const endOfToday = today.endOf('day').format(queryDateFormat)
|
||||
onSelect({
|
||||
name,
|
||||
query: {
|
||||
start: startOfToday,
|
||||
end: endOfToday,
|
||||
},
|
||||
})
|
||||
}
|
||||
else {
|
||||
onSelect({
|
||||
name,
|
||||
query: {
|
||||
start: today.subtract(value as number, 'day').startOf('day').format(queryDateFormat),
|
||||
end: today.endOf('day').format(queryDateFormat),
|
||||
},
|
||||
})
|
||||
}
|
||||
}, [onSelect, periodMapping, queryDateFormat, t])
|
||||
|
||||
return (
|
||||
<SimpleSelect
|
||||
items={Object.entries(periodMapping).map(([k, v]) => ({ value: k, name: t(`appLog.filter.period.${v.name}`) }))}
|
||||
className='mt-0 !w-40'
|
||||
notClearable={true}
|
||||
onSelect={handleSelect}
|
||||
defaultValue={'2'}
|
||||
/>
|
||||
)
|
||||
}
|
||||
export default React.memo(LongTimeRangePicker)
|
||||
@@ -10,6 +10,10 @@ import { ProviderContextProvider } from '@/context/provider-context'
|
||||
import { ModalContextProvider } from '@/context/modal-context'
|
||||
import GotoAnything from '@/app/components/goto-anything'
|
||||
import Zendesk from '@/app/components/base/zendesk'
|
||||
import Splash from '../components/splash'
|
||||
import Test from '@edition/test'
|
||||
import SubSubIndex from '@edition/sub/sub-sub/index'
|
||||
import SubSub from '@edition/sub/sub-sub'
|
||||
|
||||
const Layout = ({ children }: { children: ReactNode }) => {
|
||||
return (
|
||||
@@ -20,11 +24,15 @@ const Layout = ({ children }: { children: ReactNode }) => {
|
||||
<EventEmitterContextProvider>
|
||||
<ProviderContextProvider>
|
||||
<ModalContextProvider>
|
||||
<Test />
|
||||
<SubSubIndex />
|
||||
<SubSub />
|
||||
<HeaderWrapper>
|
||||
<Header />
|
||||
</HeaderWrapper>
|
||||
{children}
|
||||
<GotoAnything />
|
||||
<Splash />
|
||||
</ModalContextProvider>
|
||||
</ProviderContextProvider>
|
||||
</EventEmitterContextProvider>
|
||||
|
||||
21
web/app/components/splash.tsx
Normal file
21
web/app/components/splash.tsx
Normal file
@@ -0,0 +1,21 @@
|
||||
'use client'
|
||||
import type { FC, PropsWithChildren } from 'react'
|
||||
import React from 'react'
|
||||
import { useIsLogin } from '@/service/use-common'
|
||||
import Loading from './base/loading'
|
||||
|
||||
const Splash: FC<PropsWithChildren> = () => {
|
||||
// would auto redirect to signin page if not logged in
|
||||
const { isLoading, data: loginData } = useIsLogin()
|
||||
const isLoggedIn = loginData?.logged_in
|
||||
|
||||
if (isLoading || !isLoggedIn) {
|
||||
return (
|
||||
<div className='fixed inset-0 z-[9999999] flex h-full items-center justify-center bg-background-body'>
|
||||
<Loading />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
return null
|
||||
}
|
||||
export default React.memo(Splash)
|
||||
3
web/app/edition/community/copied.svg
Normal file
3
web/app/edition/community/copied.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M10.6665 2.66683C11.2865 2.66683 11.5965 2.66683 11.8508 2.73498C12.541 2.91991 13.0801 3.45901 13.265 4.14919C13.3332 4.40352 13.3332 4.71352 13.3332 5.3335V11.4668C13.3332 12.5869 13.3332 13.147 13.1152 13.5748C12.9234 13.9511 12.6175 14.2571 12.2412 14.4488C11.8133 14.6668 11.2533 14.6668 10.1332 14.6668H5.8665C4.7464 14.6668 4.18635 14.6668 3.75852 14.4488C3.3822 14.2571 3.07624 13.9511 2.88449 13.5748C2.6665 13.147 2.6665 12.5869 2.6665 11.4668V5.3335C2.6665 4.71352 2.6665 4.40352 2.73465 4.14919C2.91959 3.45901 3.45868 2.91991 4.14887 2.73498C4.4032 2.66683 4.71319 2.66683 5.33317 2.66683M5.99984 10.0002L7.33317 11.3335L10.3332 8.3335M6.39984 4.00016H9.59984C9.9732 4.00016 10.1599 4.00016 10.3025 3.9275C10.4279 3.86359 10.5299 3.7616 10.5938 3.63616C10.6665 3.49355 10.6665 3.30686 10.6665 2.9335V2.40016C10.6665 2.02679 10.6665 1.84011 10.5938 1.6975C10.5299 1.57206 10.4279 1.47007 10.3025 1.40616C10.1599 1.3335 9.97321 1.3335 9.59984 1.3335H6.39984C6.02647 1.3335 5.83978 1.3335 5.69718 1.40616C5.57174 1.47007 5.46975 1.57206 5.40583 1.6975C5.33317 1.84011 5.33317 2.02679 5.33317 2.40016V2.9335C5.33317 3.30686 5.33317 3.49355 5.40583 3.63616C5.46975 3.7616 5.57174 3.86359 5.69718 3.9275C5.83978 4.00016 6.02647 4.00016 6.39984 4.00016Z" stroke="#1D2939" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.4 KiB |
21
web/app/edition/community/sub/sub-c.module.css
Normal file
21
web/app/edition/community/sub/sub-c.module.css
Normal file
@@ -0,0 +1,21 @@
|
||||
.bg {
|
||||
background-color: red;
|
||||
}
|
||||
|
||||
.bgImg {
|
||||
background-repeat: no-repeat;
|
||||
background-repeat: left center;
|
||||
background-image: url(~@/app/components/develop/secret-key/assets/play.svg);
|
||||
}
|
||||
|
||||
.bgCopied {
|
||||
background-repeat: no-repeat;
|
||||
background-repeat: left center;
|
||||
background-image: url(../copied.svg);
|
||||
}
|
||||
|
||||
.bgCopied2 {
|
||||
background-repeat: no-repeat;
|
||||
background-repeat: left center;
|
||||
background-image: url(~@edition/copied.svg);
|
||||
}
|
||||
13
web/app/edition/community/sub/sub-sub/index.tsx
Normal file
13
web/app/edition/community/sub/sub-sub/index.tsx
Normal file
@@ -0,0 +1,13 @@
|
||||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import React from 'react'
|
||||
import s from '../sub-c.module.css'
|
||||
|
||||
const SubSub: FC = () => {
|
||||
return (
|
||||
<div className={s.bg}>
|
||||
Sub Sub
|
||||
</div>
|
||||
)
|
||||
}
|
||||
export default React.memo(SubSub)
|
||||
0
web/app/edition/community/sub/sub-sub/sss.tsx
Normal file
0
web/app/edition/community/sub/sub-sub/sss.tsx
Normal file
0
web/app/edition/community/sub/sub.ts
Normal file
0
web/app/edition/community/sub/sub.ts
Normal file
10
web/app/edition/community/test.tsx
Normal file
10
web/app/edition/community/test.tsx
Normal file
@@ -0,0 +1,10 @@
|
||||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import React from 'react'
|
||||
|
||||
const Test: FC = () => {
|
||||
return (
|
||||
<div>Community</div>
|
||||
)
|
||||
}
|
||||
export default React.memo(Test)
|
||||
3
web/app/edition/saas/copied.svg
Normal file
3
web/app/edition/saas/copied.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M10.6665 2.66683C11.2865 2.66683 11.5965 2.66683 11.8508 2.73498C12.541 2.91991 13.0801 3.45901 13.265 4.14919C13.3332 4.40352 13.3332 4.71352 13.3332 5.3335V11.4668C13.3332 12.5869 13.3332 13.147 13.1152 13.5748C12.9234 13.9511 12.6175 14.2571 12.2412 14.4488C11.8133 14.6668 11.2533 14.6668 10.1332 14.6668H5.8665C4.7464 14.6668 4.18635 14.6668 3.75852 14.4488C3.3822 14.2571 3.07624 13.9511 2.88449 13.5748C2.6665 13.147 2.6665 12.5869 2.6665 11.4668V5.3335C2.6665 4.71352 2.6665 4.40352 2.73465 4.14919C2.91959 3.45901 3.45868 2.91991 4.14887 2.73498C4.4032 2.66683 4.71319 2.66683 5.33317 2.66683M5.99984 10.0002L7.33317 11.3335L10.3332 8.3335M6.39984 4.00016H9.59984C9.9732 4.00016 10.1599 4.00016 10.3025 3.9275C10.4279 3.86359 10.5299 3.7616 10.5938 3.63616C10.6665 3.49355 10.6665 3.30686 10.6665 2.9335V2.40016C10.6665 2.02679 10.6665 1.84011 10.5938 1.6975C10.5299 1.57206 10.4279 1.47007 10.3025 1.40616C10.1599 1.3335 9.97321 1.3335 9.59984 1.3335H6.39984C6.02647 1.3335 5.83978 1.3335 5.69718 1.40616C5.57174 1.47007 5.46975 1.57206 5.40583 1.6975C5.33317 1.84011 5.33317 2.02679 5.33317 2.40016V2.9335C5.33317 3.30686 5.33317 3.49355 5.40583 3.63616C5.46975 3.7616 5.57174 3.86359 5.69718 3.9275C5.83978 4.00016 6.02647 4.00016 6.39984 4.00016Z" stroke="#1D2939" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.4 KiB |
21
web/app/edition/saas/sub/sub-c.module.css
Normal file
21
web/app/edition/saas/sub/sub-c.module.css
Normal file
@@ -0,0 +1,21 @@
|
||||
.bg {
|
||||
background-color: red;
|
||||
}
|
||||
|
||||
.bgImg {
|
||||
background-repeat: no-repeat;
|
||||
background-repeat: left center;
|
||||
background-image: url(~@/app/components/develop/secret-key/assets/play.svg);
|
||||
}
|
||||
|
||||
.bgCopied {
|
||||
background-repeat: no-repeat;
|
||||
background-repeat: left center;
|
||||
background-image: url(../copied.svg);
|
||||
}
|
||||
|
||||
.bgCopied2 {
|
||||
background-repeat: no-repeat;
|
||||
background-repeat: left center;
|
||||
background-image: url(~@edition/copied.svg);
|
||||
}
|
||||
13
web/app/edition/saas/sub/sub-sub/index.tsx
Normal file
13
web/app/edition/saas/sub/sub-sub/index.tsx
Normal file
@@ -0,0 +1,13 @@
|
||||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import React from 'react'
|
||||
import s from '../sub-c.module.css'
|
||||
|
||||
const SubSub: FC = () => {
|
||||
return (
|
||||
<div className={s.bg}>
|
||||
Sub Sub
|
||||
</div>
|
||||
)
|
||||
}
|
||||
export default React.memo(SubSub)
|
||||
0
web/app/edition/saas/sub/sub-sub/sss.tsx
Normal file
0
web/app/edition/saas/sub/sub-sub/sss.tsx
Normal file
0
web/app/edition/saas/sub/sub.ts
Normal file
0
web/app/edition/saas/sub/sub.ts
Normal file
16
web/app/edition/saas/test.tsx
Normal file
16
web/app/edition/saas/test.tsx
Normal file
@@ -0,0 +1,16 @@
|
||||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import React from 'react'
|
||||
import s from '@edition/sub/sub-c.module.css'
|
||||
import cn from '@/utils/classnames'
|
||||
|
||||
const Test: FC = () => {
|
||||
return (
|
||||
<>
|
||||
<div className={cn(s.bgImg, 'pl-4')}>SaaS</div>
|
||||
<div className={cn(s.bgCopied, 'pl-4')}>Copied</div>
|
||||
<div className={cn(s.bgCopied2, 'pl-4')}>Import svg use alias</div>
|
||||
</>
|
||||
)
|
||||
}
|
||||
export default React.memo(Test)
|
||||
@@ -1,4 +1,6 @@
|
||||
const { codeInspectorPlugin } = require('code-inspector-plugin')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const withPWA = require('next-pwa')({
|
||||
dest: 'public',
|
||||
register: true,
|
||||
@@ -88,6 +90,43 @@ const port = process.env.PORT || 3000
|
||||
const locImageURLs = !hasSetWebPrefix ? [new URL(`http://localhost:${port}/**`), new URL(`http://127.0.0.1:${port}/**`)] : []
|
||||
const remoteImageURLs = [hasSetWebPrefix ? new URL(`${process.env.NEXT_PUBLIC_WEB_PREFIX}/**`) : '', ...locImageURLs].filter(item => !!item)
|
||||
|
||||
const isSaaS = process.env.NEXT_PUBLIC_EDITION === 'CLOUD'
|
||||
console.log(isSaaS)
|
||||
const supportPostfixReg = /\.(ts|tsx|css|svg|jpg|jpeg|png)$/
|
||||
const editionPaths = (() => {
|
||||
const editionDir = `./app/edition/${isSaaS ? 'saas' : 'community'}`
|
||||
|
||||
const result = {}
|
||||
|
||||
function walk(dir) {
|
||||
const files = fs.readdirSync(dir)
|
||||
for (const file of files) {
|
||||
const fullPath = path.join(dir, file)
|
||||
const stat = fs.statSync(fullPath)
|
||||
if (stat.isDirectory()) {
|
||||
walk(fullPath)
|
||||
} else if (supportPostfixReg.test(file)) {
|
||||
const relPath = path.relative(editionDir, fullPath)
|
||||
const key = `@edition/${relPath.replace(/\\/g, '/')}`.replace(supportPostfixReg, '')
|
||||
const fullPathWithoutPrefix = `./${fullPath.replace(supportPostfixReg, '')}`
|
||||
result[key] = fullPathWithoutPrefix
|
||||
if (key.endsWith('/index')) {
|
||||
const dirKey = key.replace(/\/index$/, '')
|
||||
result[dirKey] = fullPathWithoutPrefix
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (fs.existsSync(editionDir)) {
|
||||
walk(editionDir)
|
||||
}
|
||||
|
||||
return result
|
||||
})()
|
||||
|
||||
// console.log(JSON.stringify(editionPaths, null, 2))
|
||||
|
||||
/** @type {import('next').NextConfig} */
|
||||
const nextConfig = {
|
||||
basePath: process.env.NEXT_PUBLIC_BASE_PATH || '',
|
||||
@@ -95,7 +134,10 @@ const nextConfig = {
|
||||
turbopack: {
|
||||
rules: codeInspectorPlugin({
|
||||
bundler: 'turbopack'
|
||||
})
|
||||
}),
|
||||
// resolveAlias: {
|
||||
// ...editionPaths,
|
||||
// }
|
||||
},
|
||||
productionBrowserSourceMaps: false, // enable browser source map generation during the production build
|
||||
// Configure pageExtensions to include md and mdx
|
||||
@@ -125,6 +167,7 @@ const nextConfig = {
|
||||
typescript: {
|
||||
// https://nextjs.org/docs/api-reference/next.config.js/ignoring-typescript-errors
|
||||
ignoreBuildErrors: true,
|
||||
tsconfigPath: isSaaS ? 'tsconfig.json' : 'tsconfig.ce.json',
|
||||
},
|
||||
reactStrictMode: true,
|
||||
async redirects() {
|
||||
|
||||
13
web/tsconfig.ce.json
Normal file
13
web/tsconfig.ce.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"extends": "./tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"paths": {
|
||||
"@edition/*": [
|
||||
"./app/edition/community/*"
|
||||
],
|
||||
"~@edition/*": [
|
||||
"./app/edition/community/*"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -29,7 +29,15 @@
|
||||
],
|
||||
"~@/*": [
|
||||
"./*"
|
||||
]
|
||||
],
|
||||
"@edition/*": [
|
||||
"./app/edition/saas/*",
|
||||
"./app/edition/community/*",
|
||||
],
|
||||
"~@edition/*": [
|
||||
"./app/edition/saas/*",
|
||||
"./app/edition/community/*",
|
||||
],
|
||||
}
|
||||
},
|
||||
"include": [
|
||||
|
||||
Reference in New Issue
Block a user