mirror of
https://github.com/langgenius/dify.git
synced 2025-12-24 00:07:43 +00:00
Compare commits
5 Commits
feat/model
...
fix/conver
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
66d67185a0 | ||
|
|
5f0b52c017 | ||
|
|
c2606f9062 | ||
|
|
70da81d0e5 | ||
|
|
75199442c1 |
3
.github/workflows/autofix.yml
vendored
3
.github/workflows/autofix.yml
vendored
@@ -23,6 +23,9 @@ jobs:
|
||||
uv run ruff check --fix-only .
|
||||
# Format code
|
||||
uv run ruff format .
|
||||
- name: ast-grep
|
||||
run: |
|
||||
uvx --from ast-grep-cli sg --pattern 'db.session.query($WHATEVER).filter($HERE)' --rewrite 'db.session.query($WHATEVER).where($HERE)' -l py --update-all
|
||||
|
||||
- uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27
|
||||
|
||||
|
||||
@@ -478,6 +478,13 @@ API_WORKFLOW_NODE_EXECUTION_REPOSITORY=repositories.sqlalchemy_api_workflow_node
|
||||
|
||||
# API workflow run repository implementation
|
||||
API_WORKFLOW_RUN_REPOSITORY=repositories.sqlalchemy_api_workflow_run_repository.DifyAPISQLAlchemyWorkflowRunRepository
|
||||
# Workflow log cleanup configuration
|
||||
# Enable automatic cleanup of workflow run logs to manage database size
|
||||
WORKFLOW_LOG_CLEANUP_ENABLED=true
|
||||
# Number of days to retain workflow run logs (default: 30 days)
|
||||
WORKFLOW_LOG_RETENTION_DAYS=30
|
||||
# Batch size for workflow log cleanup operations (default: 100)
|
||||
WORKFLOW_LOG_CLEANUP_BATCH_SIZE=100
|
||||
|
||||
# App configuration
|
||||
APP_MAX_EXECUTION_TIME=1200
|
||||
|
||||
@@ -968,6 +968,14 @@ class AccountConfig(BaseSettings):
|
||||
)
|
||||
|
||||
|
||||
class WorkflowLogConfig(BaseSettings):
|
||||
WORKFLOW_LOG_CLEANUP_ENABLED: bool = Field(default=True, description="Enable workflow run log cleanup")
|
||||
WORKFLOW_LOG_RETENTION_DAYS: int = Field(default=30, description="Retention days for workflow run logs")
|
||||
WORKFLOW_LOG_CLEANUP_BATCH_SIZE: int = Field(
|
||||
default=100, description="Batch size for workflow run log cleanup operations"
|
||||
)
|
||||
|
||||
|
||||
class FeatureConfig(
|
||||
# place the configs in alphabet order
|
||||
AppExecutionConfig,
|
||||
@@ -1003,5 +1011,6 @@ class FeatureConfig(
|
||||
HostedServiceConfig,
|
||||
CeleryBeatConfig,
|
||||
CeleryScheduleTasksConfig,
|
||||
WorkflowLogConfig,
|
||||
):
|
||||
pass
|
||||
|
||||
@@ -12,7 +12,6 @@ from controllers.console.app.error import (
|
||||
)
|
||||
from controllers.console.wraps import account_initialization_required, setup_required
|
||||
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
|
||||
from core.helper.code_executor.code_node_provider import CodeNodeProvider
|
||||
from core.helper.code_executor.javascript.javascript_code_provider import JavascriptCodeProvider
|
||||
from core.helper.code_executor.python3.python3_code_provider import Python3CodeProvider
|
||||
from core.llm_generator.llm_generator import LLMGenerator
|
||||
@@ -126,18 +125,20 @@ class InstructionGenerateApi(Resource):
|
||||
parser.add_argument("model_config", type=dict, required=True, nullable=False, location="json")
|
||||
parser.add_argument("ideal_output", type=str, required=False, default="", location="json")
|
||||
args = parser.parse_args()
|
||||
providers: list[type[CodeNodeProvider]] = [Python3CodeProvider, JavascriptCodeProvider]
|
||||
code_provider: type[CodeNodeProvider] | None = next(
|
||||
(p for p in providers if p.is_accept_language(args["language"])), None
|
||||
code_template = (
|
||||
Python3CodeProvider.get_default_code()
|
||||
if args["language"] == "python"
|
||||
else (JavascriptCodeProvider.get_default_code())
|
||||
if args["language"] == "javascript"
|
||||
else ""
|
||||
)
|
||||
code_template = code_provider.get_default_code() if code_provider else ""
|
||||
try:
|
||||
# Generate from nothing for a workflow node
|
||||
if (args["current"] == code_template or args["current"] == "") and args["node_id"] != "":
|
||||
from models import App, db
|
||||
from services.workflow_service import WorkflowService
|
||||
|
||||
app = db.session.query(App).filter(App.id == args["flow_id"]).first()
|
||||
app = db.session.query(App).where(App.id == args["flow_id"]).first()
|
||||
if not app:
|
||||
return {"error": f"app {args['flow_id']} not found"}, 400
|
||||
workflow = WorkflowService().get_draft_workflow(app_model=app)
|
||||
|
||||
@@ -39,7 +39,7 @@ class UploadFileApi(Resource):
|
||||
data_source_info = document.data_source_info_dict
|
||||
if data_source_info and "upload_file_id" in data_source_info:
|
||||
file_id = data_source_info["upload_file_id"]
|
||||
upload_file = db.session.query(UploadFile).filter(UploadFile.id == file_id).first()
|
||||
upload_file = db.session.query(UploadFile).where(UploadFile.id == file_id).first()
|
||||
if not upload_file:
|
||||
raise NotFound("UploadFile not found.")
|
||||
else:
|
||||
|
||||
@@ -181,7 +181,7 @@ class MessageCycleManager:
|
||||
:param message_id: message id
|
||||
:return:
|
||||
"""
|
||||
message_file = db.session.query(MessageFile).filter(MessageFile.id == message_id).first()
|
||||
message_file = db.session.query(MessageFile).where(MessageFile.id == message_id).first()
|
||||
event_type = StreamEvent.MESSAGE_FILE if message_file else StreamEvent.MESSAGE
|
||||
|
||||
return MessageStreamResponse(
|
||||
|
||||
@@ -399,9 +399,9 @@ class LLMGenerator:
|
||||
def instruction_modify_legacy(
|
||||
tenant_id: str, flow_id: str, current: str, instruction: str, model_config: dict, ideal_output: str | None
|
||||
) -> dict:
|
||||
app: App | None = db.session.query(App).filter(App.id == flow_id).first()
|
||||
app: App | None = db.session.query(App).where(App.id == flow_id).first()
|
||||
last_run: Message | None = (
|
||||
db.session.query(Message).filter(Message.app_id == flow_id).order_by(Message.created_at.desc()).first()
|
||||
db.session.query(Message).where(Message.app_id == flow_id).order_by(Message.created_at.desc()).first()
|
||||
)
|
||||
if not last_run:
|
||||
return LLMGenerator.__instruction_modify_common(
|
||||
@@ -442,7 +442,7 @@ class LLMGenerator:
|
||||
) -> dict:
|
||||
from services.workflow_service import WorkflowService
|
||||
|
||||
app: App | None = db.session.query(App).filter(App.id == flow_id).first()
|
||||
app: App | None = db.session.query(App).where(App.id == flow_id).first()
|
||||
if not app:
|
||||
raise ValueError("App not found.")
|
||||
workflow = WorkflowService().get_draft_workflow(app_model=app)
|
||||
|
||||
@@ -414,7 +414,7 @@ When you are modifying the code, you should remember:
|
||||
- Get inputs from the parameters of the function and have explicit type annotations.
|
||||
- Write proper imports at the top of the code.
|
||||
- Use return statement to return the result.
|
||||
- You should return a `dict`.
|
||||
- You should return a `dict`. If you need to return a `result: str`, you should `return {"result": result}`.
|
||||
Your output must strictly follow the schema format, do not output any content outside of the JSON body.
|
||||
""" # noqa: E501
|
||||
|
||||
|
||||
@@ -151,7 +151,13 @@ def init_app(app: DifyApp) -> Celery:
|
||||
"task": "schedule.check_upgradable_plugin_task.check_upgradable_plugin_task",
|
||||
"schedule": crontab(minute="*/15"),
|
||||
}
|
||||
|
||||
if dify_config.WORKFLOW_LOG_CLEANUP_ENABLED:
|
||||
# 2:00 AM every day
|
||||
imports.append("schedule.clean_workflow_runlogs_precise")
|
||||
beat_schedule["clean_workflow_runlogs_precise"] = {
|
||||
"task": "schedule.clean_workflow_runlogs_precise.clean_workflow_runlogs_precise",
|
||||
"schedule": crontab(minute="0", hour="2"),
|
||||
}
|
||||
celery_app.conf.update(beat_schedule=beat_schedule, imports=imports)
|
||||
|
||||
return celery_app
|
||||
|
||||
155
api/schedule/clean_workflow_runlogs_precise.py
Normal file
155
api/schedule/clean_workflow_runlogs_precise.py
Normal file
@@ -0,0 +1,155 @@
|
||||
import datetime
|
||||
import logging
|
||||
import time
|
||||
|
||||
import click
|
||||
|
||||
import app
|
||||
from configs import dify_config
|
||||
from extensions.ext_database import db
|
||||
from models.model import (
|
||||
AppAnnotationHitHistory,
|
||||
Conversation,
|
||||
Message,
|
||||
MessageAgentThought,
|
||||
MessageAnnotation,
|
||||
MessageChain,
|
||||
MessageFeedback,
|
||||
MessageFile,
|
||||
)
|
||||
from models.workflow import ConversationVariable, WorkflowAppLog, WorkflowNodeExecutionModel, WorkflowRun
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
MAX_RETRIES = 3
|
||||
BATCH_SIZE = dify_config.WORKFLOW_LOG_CLEANUP_BATCH_SIZE
|
||||
|
||||
|
||||
@app.celery.task(queue="dataset")
|
||||
def clean_workflow_runlogs_precise():
|
||||
"""Clean expired workflow run logs with retry mechanism and complete message cascade"""
|
||||
|
||||
click.echo(click.style("Start clean workflow run logs (precise mode with complete cascade).", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
retention_days = dify_config.WORKFLOW_LOG_RETENTION_DAYS
|
||||
cutoff_date = datetime.datetime.now() - datetime.timedelta(days=retention_days)
|
||||
|
||||
try:
|
||||
total_workflow_runs = db.session.query(WorkflowRun).where(WorkflowRun.created_at < cutoff_date).count()
|
||||
if total_workflow_runs == 0:
|
||||
_logger.info("No expired workflow run logs found")
|
||||
return
|
||||
_logger.info("Found %s expired workflow run logs to clean", total_workflow_runs)
|
||||
|
||||
total_deleted = 0
|
||||
failed_batches = 0
|
||||
batch_count = 0
|
||||
|
||||
while True:
|
||||
workflow_runs = (
|
||||
db.session.query(WorkflowRun.id).where(WorkflowRun.created_at < cutoff_date).limit(BATCH_SIZE).all()
|
||||
)
|
||||
|
||||
if not workflow_runs:
|
||||
break
|
||||
|
||||
workflow_run_ids = [run.id for run in workflow_runs]
|
||||
batch_count += 1
|
||||
|
||||
success = _delete_batch_with_retry(workflow_run_ids, failed_batches)
|
||||
|
||||
if success:
|
||||
total_deleted += len(workflow_run_ids)
|
||||
failed_batches = 0
|
||||
else:
|
||||
failed_batches += 1
|
||||
if failed_batches >= MAX_RETRIES:
|
||||
_logger.error("Failed to delete batch after %s retries, aborting cleanup for today", MAX_RETRIES)
|
||||
break
|
||||
else:
|
||||
# Calculate incremental delay times: 5, 10, 15 minutes
|
||||
retry_delay_minutes = failed_batches * 5
|
||||
_logger.warning("Batch deletion failed, retrying in %s minutes...", retry_delay_minutes)
|
||||
time.sleep(retry_delay_minutes * 60)
|
||||
continue
|
||||
|
||||
_logger.info("Cleanup completed: %s expired workflow run logs deleted", total_deleted)
|
||||
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
_logger.exception("Unexpected error in workflow log cleanup")
|
||||
raise
|
||||
|
||||
end_at = time.perf_counter()
|
||||
execution_time = end_at - start_at
|
||||
click.echo(click.style(f"Cleaned workflow run logs from db success latency: {execution_time:.2f}s", fg="green"))
|
||||
|
||||
|
||||
def _delete_batch_with_retry(workflow_run_ids: list[str], attempt_count: int) -> bool:
|
||||
"""Delete a single batch with a retry mechanism and complete cascading deletion"""
|
||||
try:
|
||||
with db.session.begin_nested():
|
||||
message_data = (
|
||||
db.session.query(Message.id, Message.conversation_id)
|
||||
.filter(Message.workflow_run_id.in_(workflow_run_ids))
|
||||
.all()
|
||||
)
|
||||
message_id_list = [msg.id for msg in message_data]
|
||||
conversation_id_list = list({msg.conversation_id for msg in message_data if msg.conversation_id})
|
||||
if message_id_list:
|
||||
db.session.query(AppAnnotationHitHistory).where(
|
||||
AppAnnotationHitHistory.message_id.in_(message_id_list)
|
||||
).delete(synchronize_session=False)
|
||||
|
||||
db.session.query(MessageAgentThought).where(MessageAgentThought.message_id.in_(message_id_list)).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
db.session.query(MessageChain).where(MessageChain.message_id.in_(message_id_list)).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
db.session.query(MessageFile).where(MessageFile.message_id.in_(message_id_list)).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
db.session.query(MessageAnnotation).where(MessageAnnotation.message_id.in_(message_id_list)).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
db.session.query(MessageFeedback).where(MessageFeedback.message_id.in_(message_id_list)).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
db.session.query(Message).where(Message.workflow_run_id.in_(workflow_run_ids)).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
db.session.query(WorkflowAppLog).where(WorkflowAppLog.workflow_run_id.in_(workflow_run_ids)).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
db.session.query(WorkflowNodeExecutionModel).where(
|
||||
WorkflowNodeExecutionModel.workflow_run_id.in_(workflow_run_ids)
|
||||
).delete(synchronize_session=False)
|
||||
|
||||
if conversation_id_list:
|
||||
db.session.query(ConversationVariable).where(
|
||||
ConversationVariable.conversation_id.in_(conversation_id_list)
|
||||
).delete(synchronize_session=False)
|
||||
|
||||
db.session.query(Conversation).where(Conversation.id.in_(conversation_id_list)).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
db.session.query(WorkflowRun).where(WorkflowRun.id.in_(workflow_run_ids)).delete(synchronize_session=False)
|
||||
|
||||
db.session.commit()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
_logger.exception("Batch deletion failed (attempt %s)", attempt_count + 1)
|
||||
return False
|
||||
@@ -293,7 +293,7 @@ class AppAnnotationService:
|
||||
annotation_ids_to_delete = [annotation.id for annotation, _ in annotations_to_delete]
|
||||
|
||||
# Step 2: Bulk delete hit histories in a single query
|
||||
db.session.query(AppAnnotationHitHistory).filter(
|
||||
db.session.query(AppAnnotationHitHistory).where(
|
||||
AppAnnotationHitHistory.annotation_id.in_(annotation_ids_to_delete)
|
||||
).delete(synchronize_session=False)
|
||||
|
||||
@@ -307,7 +307,7 @@ class AppAnnotationService:
|
||||
# Step 4: Bulk delete annotations in a single query
|
||||
deleted_count = (
|
||||
db.session.query(MessageAnnotation)
|
||||
.filter(MessageAnnotation.id.in_(annotation_ids_to_delete))
|
||||
.where(MessageAnnotation.id.in_(annotation_ids_to_delete))
|
||||
.delete(synchronize_session=False)
|
||||
)
|
||||
|
||||
@@ -505,9 +505,9 @@ class AppAnnotationService:
|
||||
db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first()
|
||||
)
|
||||
|
||||
annotations_query = db.session.query(MessageAnnotation).filter(MessageAnnotation.app_id == app_id)
|
||||
annotations_query = db.session.query(MessageAnnotation).where(MessageAnnotation.app_id == app_id)
|
||||
for annotation in annotations_query.yield_per(100):
|
||||
annotation_hit_histories_query = db.session.query(AppAnnotationHitHistory).filter(
|
||||
annotation_hit_histories_query = db.session.query(AppAnnotationHitHistory).where(
|
||||
AppAnnotationHitHistory.annotation_id == annotation.id
|
||||
)
|
||||
for annotation_hit_history in annotation_hit_histories_query.yield_per(100):
|
||||
|
||||
@@ -471,7 +471,7 @@ class TestAnnotationService:
|
||||
# Verify annotation was deleted
|
||||
from extensions.ext_database import db
|
||||
|
||||
deleted_annotation = db.session.query(MessageAnnotation).filter(MessageAnnotation.id == annotation_id).first()
|
||||
deleted_annotation = db.session.query(MessageAnnotation).where(MessageAnnotation.id == annotation_id).first()
|
||||
assert deleted_annotation is None
|
||||
|
||||
# Verify delete_annotation_index_task was called (when annotation setting exists)
|
||||
@@ -1175,7 +1175,7 @@ class TestAnnotationService:
|
||||
AppAnnotationService.delete_app_annotation(app.id, annotation_id)
|
||||
|
||||
# Verify annotation was deleted
|
||||
deleted_annotation = db.session.query(MessageAnnotation).filter(MessageAnnotation.id == annotation_id).first()
|
||||
deleted_annotation = db.session.query(MessageAnnotation).where(MessageAnnotation.id == annotation_id).first()
|
||||
assert deleted_annotation is None
|
||||
|
||||
# Verify delete_annotation_index_task was called
|
||||
|
||||
@@ -234,7 +234,7 @@ class TestAPIBasedExtensionService:
|
||||
# Verify extension was deleted
|
||||
from extensions.ext_database import db
|
||||
|
||||
deleted_extension = db.session.query(APIBasedExtension).filter(APIBasedExtension.id == extension_id).first()
|
||||
deleted_extension = db.session.query(APIBasedExtension).where(APIBasedExtension.id == extension_id).first()
|
||||
assert deleted_extension is None
|
||||
|
||||
def test_save_extension_duplicate_name(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
|
||||
@@ -484,7 +484,7 @@ class TestMessageService:
|
||||
# Verify feedback was deleted
|
||||
from extensions.ext_database import db
|
||||
|
||||
deleted_feedback = db.session.query(MessageFeedback).filter(MessageFeedback.id == feedback.id).first()
|
||||
deleted_feedback = db.session.query(MessageFeedback).where(MessageFeedback.id == feedback.id).first()
|
||||
assert deleted_feedback is None
|
||||
|
||||
def test_create_feedback_no_rating_when_not_exists(
|
||||
|
||||
@@ -469,6 +469,6 @@ class TestModelLoadBalancingService:
|
||||
|
||||
# Verify inherit config was created in database
|
||||
inherit_configs = (
|
||||
db.session.query(LoadBalancingModelConfig).filter(LoadBalancingModelConfig.name == "__inherit__").all()
|
||||
db.session.query(LoadBalancingModelConfig).where(LoadBalancingModelConfig.name == "__inherit__").all()
|
||||
)
|
||||
assert len(inherit_configs) == 1
|
||||
|
||||
@@ -887,6 +887,14 @@ API_WORKFLOW_RUN_REPOSITORY=repositories.sqlalchemy_api_workflow_run_repository.
|
||||
# API workflow node execution repository implementation
|
||||
API_WORKFLOW_NODE_EXECUTION_REPOSITORY=repositories.sqlalchemy_api_workflow_node_execution_repository.DifyAPISQLAlchemyWorkflowNodeExecutionRepository
|
||||
|
||||
# Workflow log cleanup configuration
|
||||
# Enable automatic cleanup of workflow run logs to manage database size
|
||||
WORKFLOW_LOG_CLEANUP_ENABLED=false
|
||||
# Number of days to retain workflow run logs (default: 30 days)
|
||||
WORKFLOW_LOG_RETENTION_DAYS=30
|
||||
# Batch size for workflow log cleanup operations (default: 100)
|
||||
WORKFLOW_LOG_CLEANUP_BATCH_SIZE=100
|
||||
|
||||
# HTTP request node in workflow configuration
|
||||
HTTP_REQUEST_NODE_MAX_BINARY_SIZE=10485760
|
||||
HTTP_REQUEST_NODE_MAX_TEXT_SIZE=1048576
|
||||
|
||||
@@ -396,6 +396,9 @@ x-shared-env: &shared-api-worker-env
|
||||
CORE_WORKFLOW_NODE_EXECUTION_REPOSITORY: ${CORE_WORKFLOW_NODE_EXECUTION_REPOSITORY:-core.repositories.sqlalchemy_workflow_node_execution_repository.SQLAlchemyWorkflowNodeExecutionRepository}
|
||||
API_WORKFLOW_RUN_REPOSITORY: ${API_WORKFLOW_RUN_REPOSITORY:-repositories.sqlalchemy_api_workflow_run_repository.DifyAPISQLAlchemyWorkflowRunRepository}
|
||||
API_WORKFLOW_NODE_EXECUTION_REPOSITORY: ${API_WORKFLOW_NODE_EXECUTION_REPOSITORY:-repositories.sqlalchemy_api_workflow_node_execution_repository.DifyAPISQLAlchemyWorkflowNodeExecutionRepository}
|
||||
WORKFLOW_LOG_CLEANUP_ENABLED: ${WORKFLOW_LOG_CLEANUP_ENABLED:-false}
|
||||
WORKFLOW_LOG_RETENTION_DAYS: ${WORKFLOW_LOG_RETENTION_DAYS:-30}
|
||||
WORKFLOW_LOG_CLEANUP_BATCH_SIZE: ${WORKFLOW_LOG_CLEANUP_BATCH_SIZE:-100}
|
||||
HTTP_REQUEST_NODE_MAX_BINARY_SIZE: ${HTTP_REQUEST_NODE_MAX_BINARY_SIZE:-10485760}
|
||||
HTTP_REQUEST_NODE_MAX_TEXT_SIZE: ${HTTP_REQUEST_NODE_MAX_TEXT_SIZE:-1048576}
|
||||
HTTP_REQUEST_NODE_SSL_VERIFY: ${HTTP_REQUEST_NODE_SSL_VERIFY:-True}
|
||||
|
||||
@@ -142,7 +142,7 @@ export const GetCodeGeneratorResModal: FC<IGetCodeGeneratorResProps> = (
|
||||
ideal_output: ideaOutput,
|
||||
language: languageMap[codeLanguages] || 'javascript',
|
||||
})
|
||||
if(!currentCode)
|
||||
if((res as any).code) // not current or current is the same as the template would return a code field
|
||||
res.modified = (res as any).code
|
||||
|
||||
if (error) {
|
||||
|
||||
@@ -259,7 +259,7 @@ const ReasoningConfigForm: React.FC<Props> = ({
|
||||
className='h-8 grow'
|
||||
type='number'
|
||||
value={varInput?.value || ''}
|
||||
onChange={handleValueChange(variable, type)}
|
||||
onChange={e => handleValueChange(variable, type)(e.target.value)}
|
||||
placeholder={placeholder?.[language] || placeholder?.en_US}
|
||||
/>
|
||||
)}
|
||||
|
||||
@@ -127,7 +127,7 @@ const ChatVariableModal = ({
|
||||
case ChatVarType.ArrayString:
|
||||
case ChatVarType.ArrayNumber:
|
||||
case ChatVarType.ArrayObject:
|
||||
return value?.filter(Boolean) || []
|
||||
return value?.filter((item: any) => item !== null && item !== undefined && item !== '') || []
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user