mirror of
https://github.com/langgenius/dify.git
synced 2026-02-09 17:54:01 +00:00
Compare commits
5 Commits
dependabot
...
feat/add-u
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
689b28eb73 | ||
|
|
c52be354f6 | ||
|
|
1a3bd91b48 | ||
|
|
a1b15978d5 | ||
|
|
2fadc19416 |
@@ -578,6 +578,25 @@ class PluginUpgradeFromGithubApi(Resource):
|
||||
raise ValueError(e)
|
||||
|
||||
|
||||
@console_ns.route("/workspaces/current/plugin/upgrade/batch")
|
||||
class PluginBatchUpgradeApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@plugin_permission_required(install_required=True)
|
||||
def post(self):
|
||||
"""
|
||||
Batch upgrade all marketplace plugins that have updates available
|
||||
"""
|
||||
_, tenant_id = current_account_with_tenant()
|
||||
|
||||
try:
|
||||
result = PluginService.batch_upgrade_plugins_from_marketplace(tenant_id)
|
||||
return jsonable_encoder(result)
|
||||
except PluginDaemonClientSideError as e:
|
||||
raise ValueError(e)
|
||||
|
||||
|
||||
@console_ns.route("/workspaces/current/plugin/uninstall")
|
||||
class PluginUninstallApi(Resource):
|
||||
@console_ns.expect(console_ns.models[ParserUninstall.__name__])
|
||||
|
||||
@@ -23,7 +23,7 @@ dependencies = [
|
||||
"gevent~=25.9.1",
|
||||
"gmpy2~=2.2.1",
|
||||
"google-api-core==2.18.0",
|
||||
"google-api-python-client==2.189.0",
|
||||
"google-api-python-client==2.90.0",
|
||||
"google-auth==2.29.0",
|
||||
"google-auth-httplib2==0.2.0",
|
||||
"google-cloud-aiplatform==1.49.0",
|
||||
|
||||
@@ -155,11 +155,11 @@ class AsyncWorkflowService:
|
||||
|
||||
task: AsyncResult[Any] | None = None
|
||||
if queue_name == QueuePriority.PROFESSIONAL:
|
||||
task = execute_workflow_professional.delay(task_data_dict)
|
||||
task = execute_workflow_professional.delay(task_data_dict) # type: ignore
|
||||
elif queue_name == QueuePriority.TEAM:
|
||||
task = execute_workflow_team.delay(task_data_dict)
|
||||
task = execute_workflow_team.delay(task_data_dict) # type: ignore
|
||||
else: # SANDBOX
|
||||
task = execute_workflow_sandbox.delay(task_data_dict)
|
||||
task = execute_workflow_sandbox.delay(task_data_dict) # type: ignore
|
||||
|
||||
# 10. Update trigger log with task info
|
||||
trigger_log.status = WorkflowTriggerStatus.QUEUED
|
||||
@@ -170,7 +170,7 @@ class AsyncWorkflowService:
|
||||
|
||||
return AsyncTriggerResponse(
|
||||
workflow_trigger_log_id=trigger_log.id,
|
||||
task_id=task.id,
|
||||
task_id=task.id, # type: ignore
|
||||
status="queued",
|
||||
queue=queue_name,
|
||||
)
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import builtins
|
||||
import logging
|
||||
from collections.abc import Mapping, Sequence
|
||||
from mimetypes import guess_type
|
||||
from typing import Any
|
||||
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy import select
|
||||
@@ -337,6 +341,91 @@ class PluginService:
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def batch_upgrade_plugins_from_marketplace(tenant_id: str) -> dict[str, builtins.list[dict[str, Any]]]:
|
||||
"""
|
||||
Batch upgrade all marketplace plugins that have updates available
|
||||
|
||||
Returns a dict with:
|
||||
- success: list of successfully upgraded plugins
|
||||
- failed: list of failed upgrades with error messages
|
||||
- skipped: list of plugins skipped (no updates or errors)
|
||||
"""
|
||||
if not dify_config.MARKETPLACE_ENABLED:
|
||||
raise ValueError("marketplace is not enabled")
|
||||
|
||||
manager = PluginInstaller()
|
||||
result: dict[str, builtins.list[dict[str, Any]]] = {
|
||||
"success": [],
|
||||
"failed": [],
|
||||
"skipped": [],
|
||||
}
|
||||
|
||||
# Get all installed plugins
|
||||
plugins = manager.list_plugins(tenant_id)
|
||||
|
||||
# Filter marketplace plugins only
|
||||
marketplace_plugins = [plugin for plugin in plugins if plugin.source == PluginInstallationSource.Marketplace]
|
||||
|
||||
if not marketplace_plugins:
|
||||
return result
|
||||
|
||||
# Get latest versions for all marketplace plugins
|
||||
plugin_ids = [plugin.plugin_id for plugin in marketplace_plugins]
|
||||
latest_versions = PluginService.fetch_latest_plugin_version(plugin_ids)
|
||||
|
||||
# Upgrade each plugin if newer version is available
|
||||
for plugin in marketplace_plugins:
|
||||
try:
|
||||
latest_info = latest_versions.get(plugin.plugin_id)
|
||||
if not latest_info:
|
||||
result["skipped"].append(
|
||||
{
|
||||
"plugin_id": plugin.plugin_id,
|
||||
"reason": "no_update_info",
|
||||
"current_version": plugin.version,
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
# Check if update is needed
|
||||
if latest_info.version == plugin.version:
|
||||
result["skipped"].append(
|
||||
{
|
||||
"plugin_id": plugin.plugin_id,
|
||||
"reason": "already_latest",
|
||||
"current_version": plugin.version,
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
# Perform upgrade
|
||||
PluginService.upgrade_plugin_with_marketplace(
|
||||
tenant_id, plugin.plugin_unique_identifier, latest_info.unique_identifier
|
||||
)
|
||||
|
||||
result["success"].append(
|
||||
{
|
||||
"plugin_id": plugin.plugin_id,
|
||||
"from_version": plugin.version,
|
||||
"to_version": latest_info.version,
|
||||
"from_identifier": plugin.plugin_unique_identifier,
|
||||
"to_identifier": latest_info.unique_identifier,
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("Failed to upgrade plugin %s", plugin.plugin_id)
|
||||
result["failed"].append(
|
||||
{
|
||||
"plugin_id": plugin.plugin_id,
|
||||
"current_version": plugin.version,
|
||||
"error": str(e),
|
||||
}
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def upload_pkg(tenant_id: str, pkg: bytes, verify_signature: bool = False) -> PluginDecodeResponse:
|
||||
"""
|
||||
|
||||
@@ -6,6 +6,7 @@ from celery import shared_task
|
||||
|
||||
from core.rag.datasource.vdb.vector_factory import Vector
|
||||
from core.rag.models.document import Document
|
||||
from extensions.ext_database import db
|
||||
from models.dataset import Dataset
|
||||
from services.dataset_service import DatasetCollectionBindingService
|
||||
|
||||
@@ -57,3 +58,5 @@ def add_annotation_to_index_task(
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("Build index for annotation failed")
|
||||
finally:
|
||||
db.session.close()
|
||||
|
||||
@@ -5,6 +5,7 @@ import click
|
||||
from celery import shared_task
|
||||
|
||||
from core.rag.datasource.vdb.vector_factory import Vector
|
||||
from extensions.ext_database import db
|
||||
from models.dataset import Dataset
|
||||
from services.dataset_service import DatasetCollectionBindingService
|
||||
|
||||
@@ -39,3 +40,5 @@ def delete_annotation_index_task(annotation_id: str, app_id: str, tenant_id: str
|
||||
logger.info(click.style(f"App annotations index deleted : {app_id} latency: {end_at - start_at}", fg="green"))
|
||||
except Exception:
|
||||
logger.exception("Annotation deleted index failed")
|
||||
finally:
|
||||
db.session.close()
|
||||
|
||||
@@ -6,6 +6,7 @@ from celery import shared_task
|
||||
|
||||
from core.rag.datasource.vdb.vector_factory import Vector
|
||||
from core.rag.models.document import Document
|
||||
from extensions.ext_database import db
|
||||
from models.dataset import Dataset
|
||||
from services.dataset_service import DatasetCollectionBindingService
|
||||
|
||||
@@ -58,3 +59,5 @@ def update_annotation_to_index_task(
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("Build index for annotation failed")
|
||||
finally:
|
||||
db.session.close()
|
||||
|
||||
@@ -48,11 +48,6 @@ def batch_create_segment_to_index_task(
|
||||
|
||||
indexing_cache_key = f"segment_batch_import_{job_id}"
|
||||
|
||||
# Initialize variables with default values
|
||||
upload_file_key: str | None = None
|
||||
dataset_config: dict | None = None
|
||||
document_config: dict | None = None
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
try:
|
||||
dataset = session.get(Dataset, dataset_id)
|
||||
@@ -74,115 +69,86 @@ def batch_create_segment_to_index_task(
|
||||
if not upload_file:
|
||||
raise ValueError("UploadFile not found.")
|
||||
|
||||
dataset_config = {
|
||||
"id": dataset.id,
|
||||
"indexing_technique": dataset.indexing_technique,
|
||||
"tenant_id": dataset.tenant_id,
|
||||
"embedding_model_provider": dataset.embedding_model_provider,
|
||||
"embedding_model": dataset.embedding_model,
|
||||
}
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
suffix = Path(upload_file.key).suffix
|
||||
file_path = f"{temp_dir}/{next(tempfile._get_candidate_names())}{suffix}" # type: ignore
|
||||
storage.download(upload_file.key, file_path)
|
||||
|
||||
document_config = {
|
||||
"id": dataset_document.id,
|
||||
"doc_form": dataset_document.doc_form,
|
||||
"word_count": dataset_document.word_count or 0,
|
||||
}
|
||||
df = pd.read_csv(file_path)
|
||||
content = []
|
||||
for _, row in df.iterrows():
|
||||
if dataset_document.doc_form == "qa_model":
|
||||
data = {"content": row.iloc[0], "answer": row.iloc[1]}
|
||||
else:
|
||||
data = {"content": row.iloc[0]}
|
||||
content.append(data)
|
||||
if len(content) == 0:
|
||||
raise ValueError("The CSV file is empty.")
|
||||
|
||||
upload_file_key = upload_file.key
|
||||
document_segments = []
|
||||
embedding_model = None
|
||||
if dataset.indexing_technique == "high_quality":
|
||||
model_manager = ModelManager()
|
||||
embedding_model = model_manager.get_model_instance(
|
||||
tenant_id=dataset.tenant_id,
|
||||
provider=dataset.embedding_model_provider,
|
||||
model_type=ModelType.TEXT_EMBEDDING,
|
||||
model=dataset.embedding_model,
|
||||
)
|
||||
|
||||
except Exception:
|
||||
logger.exception("Segments batch created index failed")
|
||||
redis_client.setex(indexing_cache_key, 600, "error")
|
||||
return
|
||||
|
||||
# Ensure required variables are set before proceeding
|
||||
if upload_file_key is None or dataset_config is None or document_config is None:
|
||||
logger.error("Required configuration not set due to session error")
|
||||
redis_client.setex(indexing_cache_key, 600, "error")
|
||||
return
|
||||
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
suffix = Path(upload_file_key).suffix
|
||||
file_path = f"{temp_dir}/{next(tempfile._get_candidate_names())}{suffix}" # type: ignore
|
||||
storage.download(upload_file_key, file_path)
|
||||
|
||||
df = pd.read_csv(file_path)
|
||||
content = []
|
||||
for _, row in df.iterrows():
|
||||
if document_config["doc_form"] == "qa_model":
|
||||
data = {"content": row.iloc[0], "answer": row.iloc[1]}
|
||||
word_count_change = 0
|
||||
if embedding_model:
|
||||
tokens_list = embedding_model.get_text_embedding_num_tokens(
|
||||
texts=[segment["content"] for segment in content]
|
||||
)
|
||||
else:
|
||||
data = {"content": row.iloc[0]}
|
||||
content.append(data)
|
||||
if len(content) == 0:
|
||||
raise ValueError("The CSV file is empty.")
|
||||
tokens_list = [0] * len(content)
|
||||
|
||||
document_segments = []
|
||||
embedding_model = None
|
||||
if dataset_config["indexing_technique"] == "high_quality":
|
||||
model_manager = ModelManager()
|
||||
embedding_model = model_manager.get_model_instance(
|
||||
tenant_id=dataset_config["tenant_id"],
|
||||
provider=dataset_config["embedding_model_provider"],
|
||||
model_type=ModelType.TEXT_EMBEDDING,
|
||||
model=dataset_config["embedding_model"],
|
||||
)
|
||||
for segment, tokens in zip(content, tokens_list):
|
||||
content = segment["content"]
|
||||
doc_id = str(uuid.uuid4())
|
||||
segment_hash = helper.generate_text_hash(content)
|
||||
max_position = (
|
||||
session.query(func.max(DocumentSegment.position))
|
||||
.where(DocumentSegment.document_id == dataset_document.id)
|
||||
.scalar()
|
||||
)
|
||||
segment_document = DocumentSegment(
|
||||
tenant_id=tenant_id,
|
||||
dataset_id=dataset_id,
|
||||
document_id=document_id,
|
||||
index_node_id=doc_id,
|
||||
index_node_hash=segment_hash,
|
||||
position=max_position + 1 if max_position else 1,
|
||||
content=content,
|
||||
word_count=len(content),
|
||||
tokens=tokens,
|
||||
created_by=user_id,
|
||||
indexing_at=naive_utc_now(),
|
||||
status="completed",
|
||||
completed_at=naive_utc_now(),
|
||||
)
|
||||
if dataset_document.doc_form == "qa_model":
|
||||
segment_document.answer = segment["answer"]
|
||||
segment_document.word_count += len(segment["answer"])
|
||||
word_count_change += segment_document.word_count
|
||||
session.add(segment_document)
|
||||
document_segments.append(segment_document)
|
||||
|
||||
word_count_change = 0
|
||||
if embedding_model:
|
||||
tokens_list = embedding_model.get_text_embedding_num_tokens(texts=[segment["content"] for segment in content])
|
||||
else:
|
||||
tokens_list = [0] * len(content)
|
||||
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
for segment, tokens in zip(content, tokens_list):
|
||||
content = segment["content"]
|
||||
doc_id = str(uuid.uuid4())
|
||||
segment_hash = helper.generate_text_hash(content)
|
||||
max_position = (
|
||||
session.query(func.max(DocumentSegment.position))
|
||||
.where(DocumentSegment.document_id == document_config["id"])
|
||||
.scalar()
|
||||
)
|
||||
segment_document = DocumentSegment(
|
||||
tenant_id=tenant_id,
|
||||
dataset_id=dataset_id,
|
||||
document_id=document_id,
|
||||
index_node_id=doc_id,
|
||||
index_node_hash=segment_hash,
|
||||
position=max_position + 1 if max_position else 1,
|
||||
content=content,
|
||||
word_count=len(content),
|
||||
tokens=tokens,
|
||||
created_by=user_id,
|
||||
indexing_at=naive_utc_now(),
|
||||
status="completed",
|
||||
completed_at=naive_utc_now(),
|
||||
)
|
||||
if document_config["doc_form"] == "qa_model":
|
||||
segment_document.answer = segment["answer"]
|
||||
segment_document.word_count += len(segment["answer"])
|
||||
word_count_change += segment_document.word_count
|
||||
session.add(segment_document)
|
||||
document_segments.append(segment_document)
|
||||
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
dataset_document = session.get(Document, document_id)
|
||||
if dataset_document:
|
||||
assert dataset_document.word_count is not None
|
||||
dataset_document.word_count += word_count_change
|
||||
session.add(dataset_document)
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
dataset = session.get(Dataset, dataset_id)
|
||||
if dataset:
|
||||
VectorService.create_segments_vector(None, document_segments, dataset, document_config["doc_form"])
|
||||
|
||||
redis_client.setex(indexing_cache_key, 600, "completed")
|
||||
end_at = time.perf_counter()
|
||||
logger.info(
|
||||
click.style(
|
||||
f"Segment batch created job: {job_id} latency: {end_at - start_at}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
VectorService.create_segments_vector(None, document_segments, dataset, dataset_document.doc_form)
|
||||
session.commit()
|
||||
redis_client.setex(indexing_cache_key, 600, "completed")
|
||||
end_at = time.perf_counter()
|
||||
logger.info(
|
||||
click.style(
|
||||
f"Segment batch created job: {job_id} latency: {end_at - start_at}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("Segments batch created index failed")
|
||||
redis_client.setex(indexing_cache_key, 600, "error")
|
||||
|
||||
@@ -28,7 +28,6 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i
|
||||
"""
|
||||
logger.info(click.style(f"Start clean document when document deleted: {document_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
total_attachment_files = []
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
try:
|
||||
@@ -48,91 +47,78 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i
|
||||
SegmentAttachmentBinding.document_id == document_id,
|
||||
)
|
||||
).all()
|
||||
|
||||
attachment_ids = [attachment_file.id for _, attachment_file in attachments_with_bindings]
|
||||
binding_ids = [binding.id for binding, _ in attachments_with_bindings]
|
||||
total_attachment_files.extend([attachment_file.key for _, attachment_file in attachments_with_bindings])
|
||||
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
segment_contents = [segment.content for segment in segments]
|
||||
except Exception:
|
||||
logger.exception("Cleaned document when document deleted failed")
|
||||
return
|
||||
|
||||
# check segment is exist
|
||||
if index_node_ids:
|
||||
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
|
||||
with session_factory.create_session() as session:
|
||||
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
if dataset:
|
||||
# check segment is exist
|
||||
if segments:
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
|
||||
index_processor.clean(
|
||||
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
|
||||
)
|
||||
|
||||
total_image_files = []
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
for segment_content in segment_contents:
|
||||
image_upload_file_ids = get_image_upload_file_ids(segment_content)
|
||||
image_files = session.scalars(select(UploadFile).where(UploadFile.id.in_(image_upload_file_ids))).all()
|
||||
total_image_files.extend([image_file.key for image_file in image_files])
|
||||
image_file_delete_stmt = delete(UploadFile).where(UploadFile.id.in_(image_upload_file_ids))
|
||||
session.execute(image_file_delete_stmt)
|
||||
for segment in segments:
|
||||
image_upload_file_ids = get_image_upload_file_ids(segment.content)
|
||||
image_files = session.scalars(
|
||||
select(UploadFile).where(UploadFile.id.in_(image_upload_file_ids))
|
||||
).all()
|
||||
for image_file in image_files:
|
||||
if image_file is None:
|
||||
continue
|
||||
try:
|
||||
storage.delete(image_file.key)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Delete image_files failed when storage deleted, \
|
||||
image_upload_file_is: %s",
|
||||
image_file.id,
|
||||
)
|
||||
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.document_id == document_id)
|
||||
session.execute(segment_delete_stmt)
|
||||
image_file_delete_stmt = delete(UploadFile).where(UploadFile.id.in_(image_upload_file_ids))
|
||||
session.execute(image_file_delete_stmt)
|
||||
session.delete(segment)
|
||||
|
||||
for image_file_key in total_image_files:
|
||||
try:
|
||||
storage.delete(image_file_key)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Delete image_files failed when storage deleted, \
|
||||
image_upload_file_is: %s",
|
||||
image_file_key,
|
||||
session.commit()
|
||||
if file_id:
|
||||
file = session.query(UploadFile).where(UploadFile.id == file_id).first()
|
||||
if file:
|
||||
try:
|
||||
storage.delete(file.key)
|
||||
except Exception:
|
||||
logger.exception("Delete file failed when document deleted, file_id: %s", file_id)
|
||||
session.delete(file)
|
||||
# delete segment attachments
|
||||
if attachments_with_bindings:
|
||||
attachment_ids = [attachment_file.id for _, attachment_file in attachments_with_bindings]
|
||||
binding_ids = [binding.id for binding, _ in attachments_with_bindings]
|
||||
for binding, attachment_file in attachments_with_bindings:
|
||||
try:
|
||||
storage.delete(attachment_file.key)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Delete attachment_file failed when storage deleted, \
|
||||
attachment_file_id: %s",
|
||||
binding.attachment_id,
|
||||
)
|
||||
attachment_file_delete_stmt = delete(UploadFile).where(UploadFile.id.in_(attachment_ids))
|
||||
session.execute(attachment_file_delete_stmt)
|
||||
|
||||
binding_delete_stmt = delete(SegmentAttachmentBinding).where(
|
||||
SegmentAttachmentBinding.id.in_(binding_ids)
|
||||
)
|
||||
session.execute(binding_delete_stmt)
|
||||
|
||||
# delete dataset metadata binding
|
||||
session.query(DatasetMetadataBinding).where(
|
||||
DatasetMetadataBinding.dataset_id == dataset_id,
|
||||
DatasetMetadataBinding.document_id == document_id,
|
||||
).delete()
|
||||
session.commit()
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logger.info(
|
||||
click.style(
|
||||
f"Cleaned document when document deleted: {document_id} latency: {end_at - start_at}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
if file_id:
|
||||
file = session.query(UploadFile).where(UploadFile.id == file_id).first()
|
||||
if file:
|
||||
try:
|
||||
storage.delete(file.key)
|
||||
except Exception:
|
||||
logger.exception("Delete file failed when document deleted, file_id: %s", file_id)
|
||||
session.delete(file)
|
||||
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
# delete segment attachments
|
||||
if attachment_ids:
|
||||
attachment_file_delete_stmt = delete(UploadFile).where(UploadFile.id.in_(attachment_ids))
|
||||
session.execute(attachment_file_delete_stmt)
|
||||
|
||||
if binding_ids:
|
||||
binding_delete_stmt = delete(SegmentAttachmentBinding).where(SegmentAttachmentBinding.id.in_(binding_ids))
|
||||
session.execute(binding_delete_stmt)
|
||||
|
||||
for attachment_file_key in total_attachment_files:
|
||||
try:
|
||||
storage.delete(attachment_file_key)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Delete attachment_file failed when storage deleted, \
|
||||
attachment_file_id: %s",
|
||||
attachment_file_key,
|
||||
)
|
||||
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
# delete dataset metadata binding
|
||||
session.query(DatasetMetadataBinding).where(
|
||||
DatasetMetadataBinding.dataset_id == dataset_id,
|
||||
DatasetMetadataBinding.document_id == document_id,
|
||||
).delete()
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logger.info(
|
||||
click.style(
|
||||
f"Cleaned document when document deleted: {document_id} latency: {end_at - start_at}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
logger.exception("Cleaned document when document deleted failed")
|
||||
|
||||
@@ -81,35 +81,26 @@ def _document_indexing(dataset_id: str, document_ids: Sequence[str]):
|
||||
session.commit()
|
||||
return
|
||||
|
||||
# Phase 1: Update status to parsing (short transaction)
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
documents = (
|
||||
session.query(Document).where(Document.id.in_(document_ids), Document.dataset_id == dataset_id).all()
|
||||
)
|
||||
for document_id in document_ids:
|
||||
logger.info(click.style(f"Start process document: {document_id}", fg="green"))
|
||||
|
||||
document = (
|
||||
session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
)
|
||||
|
||||
for document in documents:
|
||||
if document:
|
||||
document.indexing_status = "parsing"
|
||||
document.processing_started_at = naive_utc_now()
|
||||
documents.append(document)
|
||||
session.add(document)
|
||||
# Transaction committed and closed
|
||||
session.commit()
|
||||
|
||||
# Phase 2: Execute indexing (no transaction - IndexingRunner creates its own sessions)
|
||||
has_error = False
|
||||
try:
|
||||
indexing_runner = IndexingRunner()
|
||||
indexing_runner.run(documents)
|
||||
end_at = time.perf_counter()
|
||||
logger.info(click.style(f"Processed dataset: {dataset_id} latency: {end_at - start_at}", fg="green"))
|
||||
except DocumentIsPausedError as ex:
|
||||
logger.info(click.style(str(ex), fg="yellow"))
|
||||
has_error = True
|
||||
except Exception:
|
||||
logger.exception("Document indexing task failed, dataset_id: %s", dataset_id)
|
||||
has_error = True
|
||||
try:
|
||||
indexing_runner = IndexingRunner()
|
||||
indexing_runner.run(documents)
|
||||
end_at = time.perf_counter()
|
||||
logger.info(click.style(f"Processed dataset: {dataset_id} latency: {end_at - start_at}", fg="green"))
|
||||
|
||||
if not has_error:
|
||||
with session_factory.create_session() as session:
|
||||
# Trigger summary index generation for completed documents if enabled
|
||||
# Only generate for high_quality indexing technique and when summary_index_setting is enabled
|
||||
# Re-query dataset to get latest summary_index_setting (in case it was updated)
|
||||
@@ -124,18 +115,17 @@ def _document_indexing(dataset_id: str, document_ids: Sequence[str]):
|
||||
# expire all session to get latest document's indexing status
|
||||
session.expire_all()
|
||||
# Check each document's indexing status and trigger summary generation if completed
|
||||
|
||||
documents = (
|
||||
session.query(Document)
|
||||
.where(Document.id.in_(document_ids), Document.dataset_id == dataset_id)
|
||||
.all()
|
||||
)
|
||||
|
||||
for document in documents:
|
||||
for document_id in document_ids:
|
||||
# Re-query document to get latest status (IndexingRunner may have updated it)
|
||||
document = (
|
||||
session.query(Document)
|
||||
.where(Document.id == document_id, Document.dataset_id == dataset_id)
|
||||
.first()
|
||||
)
|
||||
if document:
|
||||
logger.info(
|
||||
"Checking document %s for summary generation: status=%s, doc_form=%s, need_summary=%s",
|
||||
document.id,
|
||||
document_id,
|
||||
document.indexing_status,
|
||||
document.doc_form,
|
||||
document.need_summary,
|
||||
@@ -146,36 +136,46 @@ def _document_indexing(dataset_id: str, document_ids: Sequence[str]):
|
||||
and document.need_summary is True
|
||||
):
|
||||
try:
|
||||
generate_summary_index_task.delay(dataset.id, document.id, None)
|
||||
generate_summary_index_task.delay(dataset.id, document_id, None)
|
||||
logger.info(
|
||||
"Queued summary index generation task for document %s in dataset %s "
|
||||
"after indexing completed",
|
||||
document.id,
|
||||
document_id,
|
||||
dataset.id,
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to queue summary index generation task for document %s",
|
||||
document.id,
|
||||
document_id,
|
||||
)
|
||||
# Don't fail the entire indexing process if summary task queuing fails
|
||||
else:
|
||||
logger.info(
|
||||
"Skipping summary generation for document %s: "
|
||||
"status=%s, doc_form=%s, need_summary=%s",
|
||||
document.id,
|
||||
document_id,
|
||||
document.indexing_status,
|
||||
document.doc_form,
|
||||
document.need_summary,
|
||||
)
|
||||
else:
|
||||
logger.warning("Document %s not found after indexing", document.id)
|
||||
logger.warning("Document %s not found after indexing", document_id)
|
||||
else:
|
||||
logger.info(
|
||||
"Summary index generation skipped for dataset %s: summary_index_setting.enable=%s",
|
||||
dataset.id,
|
||||
summary_index_setting.get("enable") if summary_index_setting else None,
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
"Summary index generation skipped for dataset %s: indexing_technique=%s (not 'high_quality')",
|
||||
dataset.id,
|
||||
dataset.indexing_technique,
|
||||
)
|
||||
except DocumentIsPausedError as ex:
|
||||
logger.info(click.style(str(ex), fg="yellow"))
|
||||
except Exception:
|
||||
logger.exception("Document indexing task failed, dataset_id: %s", dataset_id)
|
||||
|
||||
|
||||
def _document_indexing_with_tenant_queue(
|
||||
|
||||
@@ -6,8 +6,9 @@ improving performance by offloading storage operations to background workers.
|
||||
"""
|
||||
|
||||
from celery import shared_task # type: ignore[import-untyped]
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.db.session_factory import session_factory
|
||||
from extensions.ext_database import db
|
||||
from services.workflow_draft_variable_service import DraftVarFileDeletion, WorkflowDraftVariableService
|
||||
|
||||
|
||||
@@ -16,6 +17,6 @@ def save_workflow_execution_task(
|
||||
self,
|
||||
deletions: list[DraftVarFileDeletion],
|
||||
):
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
with Session(bind=db.engine) as session, session.begin():
|
||||
srv = WorkflowDraftVariableService(session=session)
|
||||
srv.delete_workflow_draft_variable_file(deletions=deletions)
|
||||
|
||||
@@ -605,20 +605,26 @@ class TestBatchCreateSegmentToIndexTask:
|
||||
|
||||
mock_storage.download.side_effect = mock_download
|
||||
|
||||
# Execute the task - should raise ValueError for empty CSV
|
||||
# Execute the task
|
||||
job_id = str(uuid.uuid4())
|
||||
with pytest.raises(ValueError, match="The CSV file is empty"):
|
||||
batch_create_segment_to_index_task(
|
||||
job_id=job_id,
|
||||
upload_file_id=upload_file.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
tenant_id=tenant.id,
|
||||
user_id=account.id,
|
||||
)
|
||||
batch_create_segment_to_index_task(
|
||||
job_id=job_id,
|
||||
upload_file_id=upload_file.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
tenant_id=tenant.id,
|
||||
user_id=account.id,
|
||||
)
|
||||
|
||||
# Verify error handling
|
||||
# Since exception was raised, no segments should be created
|
||||
# Check Redis cache was set to error status
|
||||
from extensions.ext_redis import redis_client
|
||||
|
||||
cache_key = f"segment_batch_import_{job_id}"
|
||||
cache_value = redis_client.get(cache_key)
|
||||
assert cache_value == b"error"
|
||||
|
||||
# Verify no segments were created
|
||||
from extensions.ext_database import db
|
||||
|
||||
segments = db.session.query(DocumentSegment).all()
|
||||
|
||||
@@ -83,127 +83,23 @@ def mock_documents(document_ids, dataset_id):
|
||||
def mock_db_session():
|
||||
"""Mock database session via session_factory.create_session()."""
|
||||
with patch("tasks.document_indexing_task.session_factory") as mock_sf:
|
||||
sessions = [] # Track all created sessions
|
||||
# Shared mock data that all sessions will access
|
||||
shared_mock_data = {"dataset": None, "documents": None, "doc_iter": None}
|
||||
session = MagicMock()
|
||||
# Ensure tests that expect session.close() to be called can observe it via the context manager
|
||||
session.close = MagicMock()
|
||||
cm = MagicMock()
|
||||
cm.__enter__.return_value = session
|
||||
# Link __exit__ to session.close so "close" expectations reflect context manager teardown
|
||||
|
||||
def create_session_side_effect():
|
||||
session = MagicMock()
|
||||
session.close = MagicMock()
|
||||
def _exit_side_effect(*args, **kwargs):
|
||||
session.close()
|
||||
|
||||
# Track commit calls
|
||||
commit_mock = MagicMock()
|
||||
session.commit = commit_mock
|
||||
cm = MagicMock()
|
||||
cm.__enter__.return_value = session
|
||||
cm.__exit__.side_effect = _exit_side_effect
|
||||
mock_sf.create_session.return_value = cm
|
||||
|
||||
def _exit_side_effect(*args, **kwargs):
|
||||
session.close()
|
||||
|
||||
cm.__exit__.side_effect = _exit_side_effect
|
||||
|
||||
# Support session.begin() for transactions
|
||||
begin_cm = MagicMock()
|
||||
begin_cm.__enter__.return_value = session
|
||||
|
||||
def begin_exit_side_effect(*args, **kwargs):
|
||||
# Auto-commit on transaction exit (like SQLAlchemy)
|
||||
session.commit()
|
||||
# Also mark wrapper's commit as called
|
||||
if sessions:
|
||||
sessions[0].commit()
|
||||
|
||||
begin_cm.__exit__ = MagicMock(side_effect=begin_exit_side_effect)
|
||||
session.begin = MagicMock(return_value=begin_cm)
|
||||
|
||||
sessions.append(session)
|
||||
|
||||
# Setup query with side_effect to handle both Dataset and Document queries
|
||||
def query_side_effect(*args):
|
||||
query = MagicMock()
|
||||
if args and args[0] == Dataset and shared_mock_data["dataset"] is not None:
|
||||
where_result = MagicMock()
|
||||
where_result.first.return_value = shared_mock_data["dataset"]
|
||||
query.where = MagicMock(return_value=where_result)
|
||||
elif args and args[0] == Document and shared_mock_data["documents"] is not None:
|
||||
# Support both .first() and .all() calls with chaining
|
||||
where_result = MagicMock()
|
||||
where_result.where = MagicMock(return_value=where_result)
|
||||
|
||||
# Create an iterator for .first() calls if not exists
|
||||
if shared_mock_data["doc_iter"] is None:
|
||||
docs = shared_mock_data["documents"] or [None]
|
||||
shared_mock_data["doc_iter"] = iter(docs)
|
||||
|
||||
where_result.first = lambda: next(shared_mock_data["doc_iter"], None)
|
||||
docs_or_empty = shared_mock_data["documents"] or []
|
||||
where_result.all = MagicMock(return_value=docs_or_empty)
|
||||
query.where = MagicMock(return_value=where_result)
|
||||
else:
|
||||
query.where = MagicMock(return_value=query)
|
||||
return query
|
||||
|
||||
session.query = MagicMock(side_effect=query_side_effect)
|
||||
return cm
|
||||
|
||||
mock_sf.create_session.side_effect = create_session_side_effect
|
||||
|
||||
# Create a wrapper that behaves like the first session but has access to all sessions
|
||||
class SessionWrapper:
|
||||
def __init__(self):
|
||||
self._sessions = sessions
|
||||
self._shared_data = shared_mock_data
|
||||
# Create a default session for setup phase
|
||||
self._default_session = MagicMock()
|
||||
self._default_session.close = MagicMock()
|
||||
self._default_session.commit = MagicMock()
|
||||
|
||||
# Support session.begin() for default session too
|
||||
begin_cm = MagicMock()
|
||||
begin_cm.__enter__.return_value = self._default_session
|
||||
|
||||
def default_begin_exit_side_effect(*args, **kwargs):
|
||||
self._default_session.commit()
|
||||
|
||||
begin_cm.__exit__ = MagicMock(side_effect=default_begin_exit_side_effect)
|
||||
self._default_session.begin = MagicMock(return_value=begin_cm)
|
||||
|
||||
def default_query_side_effect(*args):
|
||||
query = MagicMock()
|
||||
if args and args[0] == Dataset and shared_mock_data["dataset"] is not None:
|
||||
where_result = MagicMock()
|
||||
where_result.first.return_value = shared_mock_data["dataset"]
|
||||
query.where = MagicMock(return_value=where_result)
|
||||
elif args and args[0] == Document and shared_mock_data["documents"] is not None:
|
||||
where_result = MagicMock()
|
||||
where_result.where = MagicMock(return_value=where_result)
|
||||
|
||||
if shared_mock_data["doc_iter"] is None:
|
||||
docs = shared_mock_data["documents"] or [None]
|
||||
shared_mock_data["doc_iter"] = iter(docs)
|
||||
|
||||
where_result.first = lambda: next(shared_mock_data["doc_iter"], None)
|
||||
docs_or_empty = shared_mock_data["documents"] or []
|
||||
where_result.all = MagicMock(return_value=docs_or_empty)
|
||||
query.where = MagicMock(return_value=where_result)
|
||||
else:
|
||||
query.where = MagicMock(return_value=query)
|
||||
return query
|
||||
|
||||
self._default_session.query = MagicMock(side_effect=default_query_side_effect)
|
||||
|
||||
def __getattr__(self, name):
|
||||
# Forward all attribute access to the first session, or default if none created yet
|
||||
target_session = self._sessions[0] if self._sessions else self._default_session
|
||||
return getattr(target_session, name)
|
||||
|
||||
@property
|
||||
def all_sessions(self):
|
||||
"""Access all created sessions for testing."""
|
||||
return self._sessions
|
||||
|
||||
wrapper = SessionWrapper()
|
||||
yield wrapper
|
||||
query = MagicMock()
|
||||
session.query.return_value = query
|
||||
query.where.return_value = query
|
||||
yield session
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -356,9 +252,18 @@ class TestTaskEnqueuing:
|
||||
use the deprecated function.
|
||||
"""
|
||||
# Arrange
|
||||
# Set shared mock data so all sessions can access it
|
||||
mock_db_session._shared_data["dataset"] = mock_dataset
|
||||
mock_db_session._shared_data["documents"] = mock_documents
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
|
||||
|
||||
def mock_query_side_effect(*args):
|
||||
mock_query = MagicMock()
|
||||
if args[0] == Dataset:
|
||||
mock_query.where.return_value.first.return_value = mock_dataset
|
||||
elif args[0] == Document:
|
||||
# Return documents one by one for each call
|
||||
mock_query.where.return_value.first.side_effect = mock_documents
|
||||
return mock_query
|
||||
|
||||
mock_db_session.query.side_effect = mock_query_side_effect
|
||||
|
||||
with patch("tasks.document_indexing_task.FeatureService.get_features") as mock_features:
|
||||
mock_features.return_value.billing.enabled = False
|
||||
@@ -399,9 +304,21 @@ class TestBatchProcessing:
|
||||
doc.processing_started_at = None
|
||||
mock_documents.append(doc)
|
||||
|
||||
# Set shared mock data so all sessions can access it
|
||||
mock_db_session._shared_data["dataset"] = mock_dataset
|
||||
mock_db_session._shared_data["documents"] = mock_documents
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
|
||||
|
||||
# Create an iterator for documents
|
||||
doc_iter = iter(mock_documents)
|
||||
|
||||
def mock_query_side_effect(*args):
|
||||
mock_query = MagicMock()
|
||||
if args[0] == Dataset:
|
||||
mock_query.where.return_value.first.return_value = mock_dataset
|
||||
elif args[0] == Document:
|
||||
# Return documents one by one for each call
|
||||
mock_query.where.return_value.first = lambda: next(doc_iter, None)
|
||||
return mock_query
|
||||
|
||||
mock_db_session.query.side_effect = mock_query_side_effect
|
||||
|
||||
with patch("tasks.document_indexing_task.FeatureService.get_features") as mock_features:
|
||||
mock_features.return_value.billing.enabled = False
|
||||
@@ -440,9 +357,19 @@ class TestBatchProcessing:
|
||||
doc.stopped_at = None
|
||||
mock_documents.append(doc)
|
||||
|
||||
# Set shared mock data so all sessions can access it
|
||||
mock_db_session._shared_data["dataset"] = mock_dataset
|
||||
mock_db_session._shared_data["documents"] = mock_documents
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
|
||||
|
||||
doc_iter = iter(mock_documents)
|
||||
|
||||
def mock_query_side_effect(*args):
|
||||
mock_query = MagicMock()
|
||||
if args[0] == Dataset:
|
||||
mock_query.where.return_value.first.return_value = mock_dataset
|
||||
elif args[0] == Document:
|
||||
mock_query.where.return_value.first = lambda: next(doc_iter, None)
|
||||
return mock_query
|
||||
|
||||
mock_db_session.query.side_effect = mock_query_side_effect
|
||||
|
||||
mock_feature_service.get_features.return_value.billing.enabled = True
|
||||
mock_feature_service.get_features.return_value.billing.subscription.plan = CloudPlan.PROFESSIONAL
|
||||
@@ -480,9 +407,19 @@ class TestBatchProcessing:
|
||||
doc.stopped_at = None
|
||||
mock_documents.append(doc)
|
||||
|
||||
# Set shared mock data so all sessions can access it
|
||||
mock_db_session._shared_data["dataset"] = mock_dataset
|
||||
mock_db_session._shared_data["documents"] = mock_documents
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
|
||||
|
||||
doc_iter = iter(mock_documents)
|
||||
|
||||
def mock_query_side_effect(*args):
|
||||
mock_query = MagicMock()
|
||||
if args[0] == Dataset:
|
||||
mock_query.where.return_value.first.return_value = mock_dataset
|
||||
elif args[0] == Document:
|
||||
mock_query.where.return_value.first = lambda: next(doc_iter, None)
|
||||
return mock_query
|
||||
|
||||
mock_db_session.query.side_effect = mock_query_side_effect
|
||||
|
||||
mock_feature_service.get_features.return_value.billing.enabled = True
|
||||
mock_feature_service.get_features.return_value.billing.subscription.plan = CloudPlan.SANDBOX
|
||||
@@ -507,10 +444,7 @@ class TestBatchProcessing:
|
||||
"""
|
||||
# Arrange
|
||||
document_ids = []
|
||||
|
||||
# Set shared mock data with empty documents list
|
||||
mock_db_session._shared_data["dataset"] = mock_dataset
|
||||
mock_db_session._shared_data["documents"] = []
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
|
||||
|
||||
with patch("tasks.document_indexing_task.FeatureService.get_features") as mock_features:
|
||||
mock_features.return_value.billing.enabled = False
|
||||
@@ -548,9 +482,19 @@ class TestProgressTracking:
|
||||
doc.processing_started_at = None
|
||||
mock_documents.append(doc)
|
||||
|
||||
# Set shared mock data so all sessions can access it
|
||||
mock_db_session._shared_data["dataset"] = mock_dataset
|
||||
mock_db_session._shared_data["documents"] = mock_documents
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
|
||||
|
||||
doc_iter = iter(mock_documents)
|
||||
|
||||
def mock_query_side_effect(*args):
|
||||
mock_query = MagicMock()
|
||||
if args[0] == Dataset:
|
||||
mock_query.where.return_value.first.return_value = mock_dataset
|
||||
elif args[0] == Document:
|
||||
mock_query.where.return_value.first = lambda: next(doc_iter, None)
|
||||
return mock_query
|
||||
|
||||
mock_db_session.query.side_effect = mock_query_side_effect
|
||||
|
||||
with patch("tasks.document_indexing_task.FeatureService.get_features") as mock_features:
|
||||
mock_features.return_value.billing.enabled = False
|
||||
@@ -584,9 +528,19 @@ class TestProgressTracking:
|
||||
doc.processing_started_at = None
|
||||
mock_documents.append(doc)
|
||||
|
||||
# Set shared mock data so all sessions can access it
|
||||
mock_db_session._shared_data["dataset"] = mock_dataset
|
||||
mock_db_session._shared_data["documents"] = mock_documents
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
|
||||
|
||||
doc_iter = iter(mock_documents)
|
||||
|
||||
def mock_query_side_effect(*args):
|
||||
mock_query = MagicMock()
|
||||
if args[0] == Dataset:
|
||||
mock_query.where.return_value.first.return_value = mock_dataset
|
||||
elif args[0] == Document:
|
||||
mock_query.where.return_value.first = lambda: next(doc_iter, None)
|
||||
return mock_query
|
||||
|
||||
mock_db_session.query.side_effect = mock_query_side_effect
|
||||
|
||||
with patch("tasks.document_indexing_task.FeatureService.get_features") as mock_features:
|
||||
mock_features.return_value.billing.enabled = False
|
||||
@@ -681,9 +635,19 @@ class TestErrorHandling:
|
||||
doc.stopped_at = None
|
||||
mock_documents.append(doc)
|
||||
|
||||
# Set shared mock data so all sessions can access it
|
||||
mock_db_session._shared_data["dataset"] = mock_dataset
|
||||
mock_db_session._shared_data["documents"] = mock_documents
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
|
||||
|
||||
doc_iter = iter(mock_documents)
|
||||
|
||||
def mock_query_side_effect(*args):
|
||||
mock_query = MagicMock()
|
||||
if args[0] == Dataset:
|
||||
mock_query.where.return_value.first.return_value = mock_dataset
|
||||
elif args[0] == Document:
|
||||
mock_query.where.return_value.first = lambda: next(doc_iter, None)
|
||||
return mock_query
|
||||
|
||||
mock_db_session.query.side_effect = mock_query_side_effect
|
||||
|
||||
# Set up to trigger vector space limit error
|
||||
mock_feature_service.get_features.return_value.billing.enabled = True
|
||||
@@ -710,9 +674,17 @@ class TestErrorHandling:
|
||||
Errors during indexing should be caught and logged, but not crash the task.
|
||||
"""
|
||||
# Arrange
|
||||
# Set shared mock data so all sessions can access it
|
||||
mock_db_session._shared_data["dataset"] = mock_dataset
|
||||
mock_db_session._shared_data["documents"] = mock_documents
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
|
||||
|
||||
def mock_query_side_effect(*args):
|
||||
mock_query = MagicMock()
|
||||
if args[0] == Dataset:
|
||||
mock_query.where.return_value.first.return_value = mock_dataset
|
||||
elif args[0] == Document:
|
||||
mock_query.where.return_value.first.side_effect = mock_documents
|
||||
return mock_query
|
||||
|
||||
mock_db_session.query.side_effect = mock_query_side_effect
|
||||
|
||||
# Make IndexingRunner raise an exception
|
||||
mock_indexing_runner.run.side_effect = Exception("Indexing failed")
|
||||
@@ -736,9 +708,17 @@ class TestErrorHandling:
|
||||
but not treated as a failure.
|
||||
"""
|
||||
# Arrange
|
||||
# Set shared mock data so all sessions can access it
|
||||
mock_db_session._shared_data["dataset"] = mock_dataset
|
||||
mock_db_session._shared_data["documents"] = mock_documents
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
|
||||
|
||||
def mock_query_side_effect(*args):
|
||||
mock_query = MagicMock()
|
||||
if args[0] == Dataset:
|
||||
mock_query.where.return_value.first.return_value = mock_dataset
|
||||
elif args[0] == Document:
|
||||
mock_query.where.return_value.first.side_effect = mock_documents
|
||||
return mock_query
|
||||
|
||||
mock_db_session.query.side_effect = mock_query_side_effect
|
||||
|
||||
# Make IndexingRunner raise DocumentIsPausedError
|
||||
mock_indexing_runner.run.side_effect = DocumentIsPausedError("Document is paused")
|
||||
@@ -873,9 +853,17 @@ class TestTaskCancellation:
|
||||
Session cleanup should happen in finally block.
|
||||
"""
|
||||
# Arrange
|
||||
# Set shared mock data so all sessions can access it
|
||||
mock_db_session._shared_data["dataset"] = mock_dataset
|
||||
mock_db_session._shared_data["documents"] = mock_documents
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
|
||||
|
||||
def mock_query_side_effect(*args):
|
||||
mock_query = MagicMock()
|
||||
if args[0] == Dataset:
|
||||
mock_query.where.return_value.first.return_value = mock_dataset
|
||||
elif args[0] == Document:
|
||||
mock_query.where.return_value.first.side_effect = mock_documents
|
||||
return mock_query
|
||||
|
||||
mock_db_session.query.side_effect = mock_query_side_effect
|
||||
|
||||
with patch("tasks.document_indexing_task.FeatureService.get_features") as mock_features:
|
||||
mock_features.return_value.billing.enabled = False
|
||||
@@ -895,9 +883,17 @@ class TestTaskCancellation:
|
||||
Session cleanup should happen even when errors occur.
|
||||
"""
|
||||
# Arrange
|
||||
# Set shared mock data so all sessions can access it
|
||||
mock_db_session._shared_data["dataset"] = mock_dataset
|
||||
mock_db_session._shared_data["documents"] = mock_documents
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
|
||||
|
||||
def mock_query_side_effect(*args):
|
||||
mock_query = MagicMock()
|
||||
if args[0] == Dataset:
|
||||
mock_query.where.return_value.first.return_value = mock_dataset
|
||||
elif args[0] == Document:
|
||||
mock_query.where.return_value.first.side_effect = mock_documents
|
||||
return mock_query
|
||||
|
||||
mock_db_session.query.side_effect = mock_query_side_effect
|
||||
|
||||
# Make IndexingRunner raise an exception
|
||||
mock_indexing_runner.run.side_effect = Exception("Test error")
|
||||
@@ -966,7 +962,6 @@ class TestAdvancedScenarios:
|
||||
document_ids = [str(uuid.uuid4()) for _ in range(3)]
|
||||
|
||||
# Create only 2 documents (simulate one missing)
|
||||
# The new code uses .all() which will only return existing documents
|
||||
mock_documents = []
|
||||
for i, doc_id in enumerate([document_ids[0], document_ids[2]]): # Skip middle one
|
||||
doc = MagicMock(spec=Document)
|
||||
@@ -976,9 +971,21 @@ class TestAdvancedScenarios:
|
||||
doc.processing_started_at = None
|
||||
mock_documents.append(doc)
|
||||
|
||||
# Set shared mock data - .all() will only return existing documents
|
||||
mock_db_session._shared_data["dataset"] = mock_dataset
|
||||
mock_db_session._shared_data["documents"] = mock_documents
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
|
||||
|
||||
# Create iterator that returns None for missing document
|
||||
doc_responses = [mock_documents[0], None, mock_documents[1]]
|
||||
doc_iter = iter(doc_responses)
|
||||
|
||||
def mock_query_side_effect(*args):
|
||||
mock_query = MagicMock()
|
||||
if args[0] == Dataset:
|
||||
mock_query.where.return_value.first.return_value = mock_dataset
|
||||
elif args[0] == Document:
|
||||
mock_query.where.return_value.first = lambda: next(doc_iter, None)
|
||||
return mock_query
|
||||
|
||||
mock_db_session.query.side_effect = mock_query_side_effect
|
||||
|
||||
with patch("tasks.document_indexing_task.FeatureService.get_features") as mock_features:
|
||||
mock_features.return_value.billing.enabled = False
|
||||
@@ -1068,9 +1075,19 @@ class TestAdvancedScenarios:
|
||||
doc.stopped_at = None
|
||||
mock_documents.append(doc)
|
||||
|
||||
# Set shared mock data so all sessions can access it
|
||||
mock_db_session._shared_data["dataset"] = mock_dataset
|
||||
mock_db_session._shared_data["documents"] = mock_documents
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
|
||||
|
||||
doc_iter = iter(mock_documents)
|
||||
|
||||
def mock_query_side_effect(*args):
|
||||
mock_query = MagicMock()
|
||||
if args[0] == Dataset:
|
||||
mock_query.where.return_value.first.return_value = mock_dataset
|
||||
elif args[0] == Document:
|
||||
mock_query.where.return_value.first = lambda: next(doc_iter, None)
|
||||
return mock_query
|
||||
|
||||
mock_db_session.query.side_effect = mock_query_side_effect
|
||||
|
||||
# Set vector space exactly at limit
|
||||
mock_feature_service.get_features.return_value.billing.enabled = True
|
||||
@@ -1202,9 +1219,19 @@ class TestAdvancedScenarios:
|
||||
doc.processing_started_at = None
|
||||
mock_documents.append(doc)
|
||||
|
||||
# Set shared mock data so all sessions can access it
|
||||
mock_db_session._shared_data["dataset"] = mock_dataset
|
||||
mock_db_session._shared_data["documents"] = mock_documents
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
|
||||
|
||||
doc_iter = iter(mock_documents)
|
||||
|
||||
def mock_query_side_effect(*args):
|
||||
mock_query = MagicMock()
|
||||
if args[0] == Dataset:
|
||||
mock_query.where.return_value.first.return_value = mock_dataset
|
||||
elif args[0] == Document:
|
||||
mock_query.where.return_value.first = lambda: next(doc_iter, None)
|
||||
return mock_query
|
||||
|
||||
mock_db_session.query.side_effect = mock_query_side_effect
|
||||
|
||||
# Billing disabled - limits should not be checked
|
||||
mock_feature_service.get_features.return_value.billing.enabled = False
|
||||
@@ -1246,9 +1273,19 @@ class TestIntegration:
|
||||
|
||||
# Set up rpop to return None for concurrency check (no more tasks)
|
||||
mock_redis.rpop.side_effect = [None]
|
||||
# Set shared mock data so all sessions can access it
|
||||
mock_db_session._shared_data["dataset"] = mock_dataset
|
||||
mock_db_session._shared_data["documents"] = mock_documents
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
|
||||
|
||||
doc_iter = iter(mock_documents)
|
||||
|
||||
def mock_query_side_effect(*args):
|
||||
mock_query = MagicMock()
|
||||
if args[0] == Dataset:
|
||||
mock_query.where.return_value.first.return_value = mock_dataset
|
||||
elif args[0] == Document:
|
||||
mock_query.where.return_value.first = lambda: next(doc_iter, None)
|
||||
return mock_query
|
||||
|
||||
mock_db_session.query.side_effect = mock_query_side_effect
|
||||
|
||||
with patch("tasks.document_indexing_task.FeatureService.get_features") as mock_features:
|
||||
mock_features.return_value.billing.enabled = False
|
||||
@@ -1284,9 +1321,19 @@ class TestIntegration:
|
||||
|
||||
# Set up rpop to return None for concurrency check (no more tasks)
|
||||
mock_redis.rpop.side_effect = [None]
|
||||
# Set shared mock data so all sessions can access it
|
||||
mock_db_session._shared_data["dataset"] = mock_dataset
|
||||
mock_db_session._shared_data["documents"] = mock_documents
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
|
||||
|
||||
doc_iter = iter(mock_documents)
|
||||
|
||||
def mock_query_side_effect(*args):
|
||||
mock_query = MagicMock()
|
||||
if args[0] == Dataset:
|
||||
mock_query.where.return_value.first.return_value = mock_dataset
|
||||
elif args[0] == Document:
|
||||
mock_query.where.return_value.first = lambda: next(doc_iter, None)
|
||||
return mock_query
|
||||
|
||||
mock_db_session.query.side_effect = mock_query_side_effect
|
||||
|
||||
with patch("tasks.document_indexing_task.FeatureService.get_features") as mock_features:
|
||||
mock_features.return_value.billing.enabled = False
|
||||
@@ -1368,9 +1415,17 @@ class TestEdgeCases:
|
||||
mock_document.indexing_status = "waiting"
|
||||
mock_document.processing_started_at = None
|
||||
|
||||
# Set shared mock data so all sessions can access it
|
||||
mock_db_session._shared_data["dataset"] = mock_dataset
|
||||
mock_db_session._shared_data["documents"] = [mock_document]
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
|
||||
|
||||
def mock_query_side_effect(*args):
|
||||
mock_query = MagicMock()
|
||||
if args[0] == Dataset:
|
||||
mock_query.where.return_value.first.return_value = mock_dataset
|
||||
elif args[0] == Document:
|
||||
mock_query.where.return_value.first = lambda: mock_document
|
||||
return mock_query
|
||||
|
||||
mock_db_session.query.side_effect = mock_query_side_effect
|
||||
|
||||
with patch("tasks.document_indexing_task.FeatureService.get_features") as mock_features:
|
||||
mock_features.return_value.billing.enabled = False
|
||||
@@ -1410,9 +1465,17 @@ class TestEdgeCases:
|
||||
mock_document.indexing_status = "waiting"
|
||||
mock_document.processing_started_at = None
|
||||
|
||||
# Set shared mock data so all sessions can access it
|
||||
mock_db_session._shared_data["dataset"] = mock_dataset
|
||||
mock_db_session._shared_data["documents"] = [mock_document]
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
|
||||
|
||||
def mock_query_side_effect(*args):
|
||||
mock_query = MagicMock()
|
||||
if args[0] == Dataset:
|
||||
mock_query.where.return_value.first.return_value = mock_dataset
|
||||
elif args[0] == Document:
|
||||
mock_query.where.return_value.first = lambda: mock_document
|
||||
return mock_query
|
||||
|
||||
mock_db_session.query.side_effect = mock_query_side_effect
|
||||
|
||||
with patch("tasks.document_indexing_task.FeatureService.get_features") as mock_features:
|
||||
mock_features.return_value.billing.enabled = False
|
||||
@@ -1492,9 +1555,19 @@ class TestEdgeCases:
|
||||
doc.processing_started_at = None
|
||||
mock_documents.append(doc)
|
||||
|
||||
# Set shared mock data so all sessions can access it
|
||||
mock_db_session._shared_data["dataset"] = mock_dataset
|
||||
mock_db_session._shared_data["documents"] = mock_documents
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
|
||||
|
||||
doc_iter = iter(mock_documents)
|
||||
|
||||
def mock_query_side_effect(*args):
|
||||
mock_query = MagicMock()
|
||||
if args[0] == Dataset:
|
||||
mock_query.where.return_value.first.return_value = mock_dataset
|
||||
elif args[0] == Document:
|
||||
mock_query.where.return_value.first = lambda: next(doc_iter, None)
|
||||
return mock_query
|
||||
|
||||
mock_db_session.query.side_effect = mock_query_side_effect
|
||||
|
||||
# Set vector space limit to 0 (unlimited)
|
||||
mock_feature_service.get_features.return_value.billing.enabled = True
|
||||
@@ -1539,9 +1612,19 @@ class TestEdgeCases:
|
||||
doc.processing_started_at = None
|
||||
mock_documents.append(doc)
|
||||
|
||||
# Set shared mock data so all sessions can access it
|
||||
mock_db_session._shared_data["dataset"] = mock_dataset
|
||||
mock_db_session._shared_data["documents"] = mock_documents
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
|
||||
|
||||
doc_iter = iter(mock_documents)
|
||||
|
||||
def mock_query_side_effect(*args):
|
||||
mock_query = MagicMock()
|
||||
if args[0] == Dataset:
|
||||
mock_query.where.return_value.first.return_value = mock_dataset
|
||||
elif args[0] == Document:
|
||||
mock_query.where.return_value.first = lambda: next(doc_iter, None)
|
||||
return mock_query
|
||||
|
||||
mock_db_session.query.side_effect = mock_query_side_effect
|
||||
|
||||
# Set negative vector space limit
|
||||
mock_feature_service.get_features.return_value.billing.enabled = True
|
||||
@@ -1592,9 +1675,19 @@ class TestPerformanceScenarios:
|
||||
doc.processing_started_at = None
|
||||
mock_documents.append(doc)
|
||||
|
||||
# Set shared mock data so all sessions can access it
|
||||
mock_db_session._shared_data["dataset"] = mock_dataset
|
||||
mock_db_session._shared_data["documents"] = mock_documents
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
|
||||
|
||||
doc_iter = iter(mock_documents)
|
||||
|
||||
def mock_query_side_effect(*args):
|
||||
mock_query = MagicMock()
|
||||
if args[0] == Dataset:
|
||||
mock_query.where.return_value.first.return_value = mock_dataset
|
||||
elif args[0] == Document:
|
||||
mock_query.where.return_value.first = lambda: next(doc_iter, None)
|
||||
return mock_query
|
||||
|
||||
mock_db_session.query.side_effect = mock_query_side_effect
|
||||
|
||||
# Configure billing with sufficient limits
|
||||
mock_feature_service.get_features.return_value.billing.enabled = True
|
||||
@@ -1733,9 +1826,19 @@ class TestRobustness:
|
||||
doc.processing_started_at = None
|
||||
mock_documents.append(doc)
|
||||
|
||||
# Set shared mock data so all sessions can access it
|
||||
mock_db_session._shared_data["dataset"] = mock_dataset
|
||||
mock_db_session._shared_data["documents"] = mock_documents
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
|
||||
|
||||
doc_iter = iter(mock_documents)
|
||||
|
||||
def mock_query_side_effect(*args):
|
||||
mock_query = MagicMock()
|
||||
if args[0] == Dataset:
|
||||
mock_query.where.return_value.first.return_value = mock_dataset
|
||||
elif args[0] == Document:
|
||||
mock_query.where.return_value.first = lambda: next(doc_iter, None)
|
||||
return mock_query
|
||||
|
||||
mock_db_session.query.side_effect = mock_query_side_effect
|
||||
|
||||
# Make IndexingRunner raise an exception
|
||||
mock_indexing_runner.run.side_effect = RuntimeError("Unexpected indexing error")
|
||||
@@ -1763,7 +1866,7 @@ class TestRobustness:
|
||||
- No exceptions occur
|
||||
|
||||
Expected behavior:
|
||||
- All database sessions are closed
|
||||
- Database session is closed
|
||||
- No connection leaks
|
||||
"""
|
||||
# Arrange
|
||||
@@ -1776,9 +1879,19 @@ class TestRobustness:
|
||||
doc.processing_started_at = None
|
||||
mock_documents.append(doc)
|
||||
|
||||
# Set shared mock data so all sessions can access it
|
||||
mock_db_session._shared_data["dataset"] = mock_dataset
|
||||
mock_db_session._shared_data["documents"] = mock_documents
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
|
||||
|
||||
doc_iter = iter(mock_documents)
|
||||
|
||||
def mock_query_side_effect(*args):
|
||||
mock_query = MagicMock()
|
||||
if args[0] == Dataset:
|
||||
mock_query.where.return_value.first.return_value = mock_dataset
|
||||
elif args[0] == Document:
|
||||
mock_query.where.return_value.first = lambda: next(doc_iter, None)
|
||||
return mock_query
|
||||
|
||||
mock_db_session.query.side_effect = mock_query_side_effect
|
||||
|
||||
with patch("tasks.document_indexing_task.FeatureService.get_features") as mock_features:
|
||||
mock_features.return_value.billing.enabled = False
|
||||
@@ -1786,11 +1899,10 @@ class TestRobustness:
|
||||
# Act
|
||||
_document_indexing(dataset_id, document_ids)
|
||||
|
||||
# Assert - All created sessions should be closed
|
||||
# The code creates multiple sessions: validation, Phase 1 (parsing), Phase 3 (summary)
|
||||
assert len(mock_db_session.all_sessions) >= 1
|
||||
for session in mock_db_session.all_sessions:
|
||||
assert session.close.called, "All sessions should be closed"
|
||||
# Assert
|
||||
assert mock_db_session.close.called
|
||||
# Verify close is called exactly once
|
||||
assert mock_db_session.close.call_count == 1
|
||||
|
||||
def test_task_proxy_handles_feature_service_failure(self, tenant_id, dataset_id, document_ids, mock_redis):
|
||||
"""
|
||||
|
||||
8
api/uv.lock
generated
8
api/uv.lock
generated
@@ -1594,7 +1594,7 @@ requires-dist = [
|
||||
{ name = "gevent", specifier = "~=25.9.1" },
|
||||
{ name = "gmpy2", specifier = "~=2.2.1" },
|
||||
{ name = "google-api-core", specifier = "==2.18.0" },
|
||||
{ name = "google-api-python-client", specifier = "==2.189.0" },
|
||||
{ name = "google-api-python-client", specifier = "==2.90.0" },
|
||||
{ name = "google-auth", specifier = "==2.29.0" },
|
||||
{ name = "google-auth-httplib2", specifier = "==0.2.0" },
|
||||
{ name = "google-cloud-aiplatform", specifier = "==1.49.0" },
|
||||
@@ -2306,7 +2306,7 @@ grpc = [
|
||||
|
||||
[[package]]
|
||||
name = "google-api-python-client"
|
||||
version = "2.189.0"
|
||||
version = "2.90.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "google-api-core" },
|
||||
@@ -2315,9 +2315,9 @@ dependencies = [
|
||||
{ name = "httplib2" },
|
||||
{ name = "uritemplate" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6f/f8/0783aeca3410ee053d4dd1fccafd85197847b8f84dd038e036634605d083/google_api_python_client-2.189.0.tar.gz", hash = "sha256:45f2d8559b5c895dde6ad3fb33de025f5cb2c197fa5862f18df7f5295a172741", size = 13979470, upload-time = "2026-02-03T19:24:55.432Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/35/8b/d990f947c261304a5c1599d45717d02c27d46af5f23e1fee5dc19c8fa79d/google-api-python-client-2.90.0.tar.gz", hash = "sha256:cbcb3ba8be37c6806676a49df16ac412077e5e5dc7fa967941eff977b31fba03", size = 10891311, upload-time = "2023-06-20T16:29:25.008Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/04/44/3677ff27998214f2fa7957359da48da378a0ffff1bd0bdaba42e752bc13e/google_api_python_client-2.189.0-py3-none-any.whl", hash = "sha256:a258c09660a49c6159173f8bbece171278e917e104a11f0640b34751b79c8a1a", size = 14547633, upload-time = "2026-02-03T19:24:52.845Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/03/209b5c36a621ae644dc7d4743746cd3b38b18e133f8779ecaf6b95cc01ce/google_api_python_client-2.90.0-py2.py3-none-any.whl", hash = "sha256:4a41ffb7797d4f28e44635fb1e7076240b741c6493e7c3233c0e4421cec7c913", size = 11379891, upload-time = "2023-06-20T16:29:19.532Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@@ -194,11 +194,11 @@ const ConfigContent: FC<Props> = ({
|
||||
</div>
|
||||
{type === RETRIEVE_TYPE.multiWay && (
|
||||
<>
|
||||
<div className="my-2 flex flex-col items-center py-1">
|
||||
<div className="system-xs-semibold-uppercase mb-2 mr-2 shrink-0 text-text-secondary">
|
||||
<div className="my-2 flex h-6 items-center py-1">
|
||||
<div className="system-xs-semibold-uppercase mr-2 shrink-0 text-text-secondary">
|
||||
{t('rerankSettings', { ns: 'dataset' })}
|
||||
</div>
|
||||
<Divider bgStyle="gradient" className="m-0 !h-px" />
|
||||
<Divider bgStyle="gradient" className="mx-0 !h-px" />
|
||||
</div>
|
||||
{
|
||||
selectedDatasetsMode.inconsistentEmbeddingModel
|
||||
|
||||
@@ -5,14 +5,16 @@ import {
|
||||
RiBookOpenLine,
|
||||
RiDragDropLine,
|
||||
RiEqualizer2Line,
|
||||
RiRefreshLine,
|
||||
} from '@remixicon/react'
|
||||
import { useBoolean } from 'ahooks'
|
||||
import { noop } from 'es-toolkit/function'
|
||||
import Link from 'next/link'
|
||||
import { useEffect, useMemo, useState } from 'react'
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import Button from '@/app/components/base/button'
|
||||
import TabSlider from '@/app/components/base/tab-slider'
|
||||
import { useToastContext } from '@/app/components/base/toast'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import ReferenceSettingModal from '@/app/components/plugins/reference-setting-modal'
|
||||
import { MARKETPLACE_API_PREFIX, SUPPORT_INSTALL_LOCAL_FILE_EXTENSIONS } from '@/config'
|
||||
@@ -20,7 +22,8 @@ import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
import { useDocLink } from '@/context/i18n'
|
||||
import useDocumentTitle from '@/hooks/use-document-title'
|
||||
import { usePluginInstallation } from '@/hooks/use-query-params'
|
||||
import { fetchBundleInfoFromMarketPlace, fetchManifestFromMarketPlace } from '@/service/plugins'
|
||||
import { batchUpgradePlugins, fetchBundleInfoFromMarketPlace, fetchManifestFromMarketPlace } from '@/service/plugins'
|
||||
import { useInvalidateInstalledPluginList } from '@/service/use-plugins'
|
||||
import { sleep } from '@/utils'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { PLUGIN_PAGE_TABS_MAP } from '../hooks'
|
||||
@@ -48,6 +51,8 @@ const PluginPage = ({
|
||||
const { t } = useTranslation()
|
||||
const docLink = useDocLink()
|
||||
useDocumentTitle(t('metadata.title', { ns: 'plugin' }))
|
||||
const { notify } = useToastContext()
|
||||
const invalidateInstalledPluginList = useInvalidateInstalledPluginList()
|
||||
|
||||
// Use nuqs hook for installation state
|
||||
const [{ packageId, bundleInfo }, setInstallState] = usePluginInstallation()
|
||||
@@ -60,6 +65,9 @@ const PluginPage = ({
|
||||
setFalse: doHideInstallFromMarketplace,
|
||||
}] = useBoolean(false)
|
||||
|
||||
const [isBatchUpgrading, setIsBatchUpgrading] = useState(false)
|
||||
const [showBatchUpgradeTooltip, setShowBatchUpgradeTooltip] = useState(true)
|
||||
|
||||
const hideInstallFromMarketplace = () => {
|
||||
doHideInstallFromMarketplace()
|
||||
setInstallState(null)
|
||||
@@ -134,6 +142,45 @@ const PluginPage = ({
|
||||
enabled: isPluginsTab && canManagement,
|
||||
})
|
||||
|
||||
const handleBatchUpgrade = useCallback(async () => {
|
||||
// Hide tooltip immediately when clicked
|
||||
setShowBatchUpgradeTooltip(false)
|
||||
setIsBatchUpgrading(true)
|
||||
try {
|
||||
const result = await batchUpgradePlugins()
|
||||
const { success, failed, skipped } = result
|
||||
|
||||
// If there are updates (success or failed), show submitted message
|
||||
if (success.length > 0 || failed.length > 0) {
|
||||
notify({
|
||||
type: 'success',
|
||||
message: t('batchUpgrade.submittedMessage', { ns: 'plugin' }),
|
||||
})
|
||||
}
|
||||
// If all plugins are already up to date (only skipped)
|
||||
else if (skipped.length > 0) {
|
||||
notify({
|
||||
type: 'info',
|
||||
message: t('batchUpgrade.noUpdatesMessage', { ns: 'plugin' }),
|
||||
})
|
||||
}
|
||||
|
||||
invalidateInstalledPluginList()
|
||||
}
|
||||
catch (error) {
|
||||
console.error('Failed to batch upgrade plugins:', error)
|
||||
notify({
|
||||
type: 'error',
|
||||
message: t('batchUpgrade.errorMessage', { ns: 'plugin' }),
|
||||
})
|
||||
}
|
||||
finally {
|
||||
setIsBatchUpgrading(false)
|
||||
// Re-enable tooltip after a short delay
|
||||
setTimeout(() => setShowBatchUpgradeTooltip(true), 500)
|
||||
}
|
||||
}, [t, notify, invalidateInstalledPluginList])
|
||||
|
||||
const { dragging, fileUploader, fileChangeHandle, removeFile } = uploaderProps
|
||||
return (
|
||||
<div
|
||||
@@ -189,6 +236,27 @@ const PluginPage = ({
|
||||
</>
|
||||
)
|
||||
}
|
||||
{
|
||||
isPluginsTab && canManagement && (
|
||||
<>
|
||||
<Tooltip
|
||||
popupContent={t('batchUpgrade.tooltip', { ns: 'plugin' })}
|
||||
disabled={!showBatchUpgradeTooltip}
|
||||
>
|
||||
<Button
|
||||
variant="secondary-accent"
|
||||
className="px-3"
|
||||
onClick={handleBatchUpgrade}
|
||||
disabled={isBatchUpgrading}
|
||||
>
|
||||
<RiRefreshLine className={cn('mr-1 h-4 w-4', isBatchUpgrading && 'animate-spin')} />
|
||||
{t('batchUpgrade.button', { ns: 'plugin' })}
|
||||
</Button>
|
||||
</Tooltip>
|
||||
<div className="mx-1 h-3.5 w-[1px] shrink-0 bg-divider-regular"></div>
|
||||
</>
|
||||
)
|
||||
}
|
||||
<PluginTasks />
|
||||
{canManagement && (
|
||||
<InstallPluginDropdown
|
||||
|
||||
@@ -63,6 +63,11 @@
|
||||
"autoUpdate.upgradeMode.partial": "المحدد فقط",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "لن يتم تحديث الإضافات المحددة تلقائيًا",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "سيتم تحديث الإضافات المحددة فقط تلقائيًا. لم يتم تحديد أي إضافات حاليًا، لذلك لن يتم تحديث أي إضافات تلقائيًا.",
|
||||
"batchUpgrade.button": "تحديث الكل",
|
||||
"batchUpgrade.errorMessage": "فشل إرسال مهمة تحديث البرنامج المساعد. يرجى المحاولة مرة أخرى",
|
||||
"batchUpgrade.noUpdatesMessage": "جميع البرامج المساعدة محدثة",
|
||||
"batchUpgrade.submittedMessage": "تم إرسال مهمة تحديث البرنامج المساعد",
|
||||
"batchUpgrade.tooltip": "تحديث جميع البرامج المساعدة المثبتة من Marketplace إلى أحدث إصدار",
|
||||
"category.agents": "استراتيجيات الوكيل",
|
||||
"category.all": "الكل",
|
||||
"category.bundles": "حزم",
|
||||
|
||||
@@ -63,6 +63,11 @@
|
||||
"autoUpdate.upgradeMode.partial": "Nur ausgewählt",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "Ausgewählte Plugins werden nicht automatisch aktualisiert",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Nur ausgewählte Plugins werden automatisch aktualisiert. Derzeit sind keine Plugins ausgewählt, daher werden keine Plugins automatisch aktualisiert.",
|
||||
"batchUpgrade.button": "Alle aktualisieren",
|
||||
"batchUpgrade.errorMessage": "Fehler beim Senden der Plugin-Aktualisierungsaufgabe. Bitte erneut versuchen",
|
||||
"batchUpgrade.noUpdatesMessage": "Alle Plugins sind auf dem neuesten Stand",
|
||||
"batchUpgrade.submittedMessage": "Plugin-Aktualisierungsaufgabe eingereicht",
|
||||
"batchUpgrade.tooltip": "Alle vom Marketplace installierten Plugins auf die neueste Version aktualisieren",
|
||||
"category.agents": "Agenten-Strategien",
|
||||
"category.all": "Alle",
|
||||
"category.bundles": "Bündel",
|
||||
|
||||
@@ -63,6 +63,11 @@
|
||||
"autoUpdate.upgradeMode.partial": "Only selected",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "Selected plugins will not auto-update",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Only selected plugins will auto-update. No plugins are currently selected, so no plugins will auto-update.",
|
||||
"batchUpgrade.button": "Update All",
|
||||
"batchUpgrade.errorMessage": "Failed to submit plugin update task. Please try again.",
|
||||
"batchUpgrade.noUpdatesMessage": "All plugins are up to date",
|
||||
"batchUpgrade.submittedMessage": "Plugin update task submitted",
|
||||
"batchUpgrade.tooltip": "Update all plugins installed from Marketplace to the latest version",
|
||||
"category.agents": "Agent Strategies",
|
||||
"category.all": "All",
|
||||
"category.bundles": "Bundles",
|
||||
|
||||
@@ -63,6 +63,11 @@
|
||||
"autoUpdate.upgradeMode.partial": "Solo seleccionado",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "Los plugins seleccionados no se actualizarán automáticamente",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Solo los plugins seleccionados se actualizarán automáticamente. Actualmente no hay plugins seleccionados, por lo que no se actualizarán automáticamente.",
|
||||
"batchUpgrade.button": "Actualizar todo",
|
||||
"batchUpgrade.errorMessage": "Error al enviar la tarea de actualización del plugin. Por favor, inténtelo de nuevo",
|
||||
"batchUpgrade.noUpdatesMessage": "Todos los plugins están actualizados",
|
||||
"batchUpgrade.submittedMessage": "Tarea de actualización de plugin enviada",
|
||||
"batchUpgrade.tooltip": "Actualizar todos los plugins instalados desde Marketplace a la última versión",
|
||||
"category.agents": "Estrategias de los agentes",
|
||||
"category.all": "Todo",
|
||||
"category.bundles": "Paquetes",
|
||||
|
||||
@@ -63,6 +63,11 @@
|
||||
"autoUpdate.upgradeMode.partial": "فقط انتخاب شده",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "افزونههای انتخاب شده بهصورت خودکار بهروزرسانی نخواهند شد",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "فقط پلاگینهای انتخاب شده بهروزرسانی خودکار خواهند داشت. در حال حاضر هیچ پلاگینی انتخاب نشده است، بنابراین هیچ پلاگینی بهروزرسانی خودکار نخواهد شد.",
|
||||
"batchUpgrade.button": "بهروزرسانی همه",
|
||||
"batchUpgrade.errorMessage": "ارسال وظیفه بهروزرسانی افزونه ناموفق بود. لطفاً دوباره امتحان کنید",
|
||||
"batchUpgrade.noUpdatesMessage": "همه افزونهها بهروز هستند",
|
||||
"batchUpgrade.submittedMessage": "وظیفه بهروزرسانی افزونه ارسال شد",
|
||||
"batchUpgrade.tooltip": "بهروزرسانی همه افزونههای نصب شده از Marketplace به آخرین نسخه",
|
||||
"category.agents": "استراتژی های عامل",
|
||||
"category.all": "همه",
|
||||
"category.bundles": "بسته",
|
||||
|
||||
@@ -63,6 +63,11 @@
|
||||
"autoUpdate.upgradeMode.partial": "Seulement sélectionné",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "Les plugins sélectionnés ne se mettront pas à jour automatiquement.",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Seuls les plugins sélectionnés se mettront à jour automatiquement. Aucun plugin n'est actuellement sélectionné, donc aucun plugin ne se mettra à jour automatiquement.",
|
||||
"batchUpgrade.button": "Tout mettre à jour",
|
||||
"batchUpgrade.errorMessage": "Échec de la soumission de la tâche de mise à jour du plugin. Veuillez réessayer",
|
||||
"batchUpgrade.noUpdatesMessage": "Tous les plugins sont à jour",
|
||||
"batchUpgrade.submittedMessage": "Tâche de mise à jour du plugin soumise",
|
||||
"batchUpgrade.tooltip": "Mettre à jour tous les plugins installés depuis Marketplace vers la dernière version",
|
||||
"category.agents": "Stratégies des agents",
|
||||
"category.all": "Tout",
|
||||
"category.bundles": "Paquets",
|
||||
|
||||
@@ -63,6 +63,11 @@
|
||||
"autoUpdate.upgradeMode.partial": "केवल चयनित",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "चुने हुए प्लगइन्स अपने आप अपडेट नहीं होंगे",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "केवल चयनित प्लगइन्स स्वतः अपडेट होंगे। वर्तमान में कोई प्लगइन चयनित नहीं है, इसलिए कोई प्लगइन स्वतः अपडेट नहीं होगा।",
|
||||
"batchUpgrade.button": "सभी को अपडेट करें",
|
||||
"batchUpgrade.errorMessage": "प्लगइन अपडेट कार्य सबमिट करने में विफल। कृपया पुनः प्रयास करें",
|
||||
"batchUpgrade.noUpdatesMessage": "सभी प्लगइन्स नवीनतम हैं",
|
||||
"batchUpgrade.submittedMessage": "प्लगइन अपडेट कार्य सबमिट किया गया",
|
||||
"batchUpgrade.tooltip": "Marketplace से इंस्टॉल किए गए सभी प्लगइन्स को नवीनतम संस्करण में अपडेट करें",
|
||||
"category.agents": "एजेंट रणनीतियाँ",
|
||||
"category.all": "सभी",
|
||||
"category.bundles": "बंडल",
|
||||
|
||||
@@ -63,6 +63,11 @@
|
||||
"autoUpdate.upgradeMode.partial": "Hanya dipilih",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "Plugin yang dipilih tidak akan diperbarui secara otomatis",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Hanya plugin yang dipilih yang akan diperbarui secara otomatis. Saat ini tidak ada plugin yang dipilih, jadi tidak ada plugin yang akan diperbarui secara otomatis.",
|
||||
"batchUpgrade.button": "Perbarui semua",
|
||||
"batchUpgrade.errorMessage": "Gagal mengirim tugas pembaruan plugin. Silakan coba lagi",
|
||||
"batchUpgrade.noUpdatesMessage": "Semua plugin sudah terbaru",
|
||||
"batchUpgrade.submittedMessage": "Tugas pembaruan plugin telah dikirim",
|
||||
"batchUpgrade.tooltip": "Perbarui semua plugin yang diinstal dari Marketplace ke versi terbaru",
|
||||
"category.agents": "Strategi Agen",
|
||||
"category.all": "Semua",
|
||||
"category.bundles": "Bundel",
|
||||
|
||||
@@ -63,6 +63,11 @@
|
||||
"autoUpdate.upgradeMode.partial": "Solo selezionati",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "I plugin selezionati non verranno aggiornati automaticamente",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Solo i plugin selezionati si aggiorneranno automaticamente. Attualmente non ci sono plugin selezionati, quindi nessun plugin si aggiornerà automaticamente.",
|
||||
"batchUpgrade.button": "Aggiorna tutto",
|
||||
"batchUpgrade.errorMessage": "Invio dell'attività di aggiornamento del plugin non riuscito. Riprova",
|
||||
"batchUpgrade.noUpdatesMessage": "Tutti i plugin sono aggiornati",
|
||||
"batchUpgrade.submittedMessage": "Attività di aggiornamento plugin inviata",
|
||||
"batchUpgrade.tooltip": "Aggiorna tutti i plugin installati da Marketplace all'ultima versione",
|
||||
"category.agents": "Strategie degli agenti",
|
||||
"category.all": "Tutto",
|
||||
"category.bundles": "Pacchetti",
|
||||
|
||||
@@ -63,6 +63,11 @@
|
||||
"autoUpdate.upgradeMode.partial": "選択されたもののみ",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "選択されたプラグインは自動更新されません",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "選択されたプラグインのみが自動更新されます。現在選択されているプラグインはないため、プラグインは自動更新されません。",
|
||||
"batchUpgrade.button": "すべて更新",
|
||||
"batchUpgrade.errorMessage": "プラグイン更新タスクの送信に失敗しました。もう一度お試しください",
|
||||
"batchUpgrade.noUpdatesMessage": "すべてのプラグインは最新です",
|
||||
"batchUpgrade.submittedMessage": "プラグイン更新タスクを送信しました",
|
||||
"batchUpgrade.tooltip": "Marketplaceからインストールされたすべてのプラグインを最新バージョンに更新",
|
||||
"category.agents": "エージェント戦略",
|
||||
"category.all": "すべて",
|
||||
"category.bundles": "バンドル",
|
||||
|
||||
@@ -63,6 +63,11 @@
|
||||
"autoUpdate.upgradeMode.partial": "선택된 것만",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "선택한 플러그인은 자동으로 업데이트되지 않습니다.",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "선택된 플러그인만 자동 업데이트됩니다. 현재 선택된 플러그인이 없으므로 자동 업데이트되는 플러그인은 없습니다.",
|
||||
"batchUpgrade.button": "모두 업데이트",
|
||||
"batchUpgrade.errorMessage": "플러그인 업데이트 작업 제출 실패. 다시 시도하세요",
|
||||
"batchUpgrade.noUpdatesMessage": "모든 플러그인이 최신 버전입니다",
|
||||
"batchUpgrade.submittedMessage": "플러그인 업데이트 작업을 제출했습니다",
|
||||
"batchUpgrade.tooltip": "Marketplace에서 설치된 모든 플러그인을 최신 버전으로 업데이트",
|
||||
"category.agents": "에이전트 전략",
|
||||
"category.all": "모두",
|
||||
"category.bundles": "번들",
|
||||
|
||||
@@ -63,6 +63,11 @@
|
||||
"autoUpdate.upgradeMode.partial": "Tylko wybrane",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "Wybrane wtyczki nie będą aktualizować się automatycznie.",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Tylko wybrane wtyczki będą się aktualizować automatycznie. Obecnie nie wybrano żadnych wtyczek, więc żadna wtyczka nie będzie się automatycznie aktualizować.",
|
||||
"batchUpgrade.button": "Aktualizuj wszystko",
|
||||
"batchUpgrade.errorMessage": "Nie udało się przesłać zadania aktualizacji wtyczki. Spróbuj ponownie",
|
||||
"batchUpgrade.noUpdatesMessage": "Wszystkie wtyczki są aktualne",
|
||||
"batchUpgrade.submittedMessage": "Przesłano zadanie aktualizacji wtyczki",
|
||||
"batchUpgrade.tooltip": "Zaktualizuj wszystkie wtyczki zainstalowane z Marketplace do najnowszej wersji",
|
||||
"category.agents": "Strategie agentów",
|
||||
"category.all": "Cały",
|
||||
"category.bundles": "Wiązki",
|
||||
|
||||
@@ -63,6 +63,11 @@
|
||||
"autoUpdate.upgradeMode.partial": "Somente selecionado",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "Plugins selecionados não serão atualizados automaticamente",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Apenas plugins selecionados serão atualizados automaticamente. Nenhum plugin está atualmente selecionado, então nenhum plugin será atualizado automaticamente.",
|
||||
"batchUpgrade.button": "Atualizar tudo",
|
||||
"batchUpgrade.errorMessage": "Falha ao enviar tarefa de atualização de plugin. Por favor, tente novamente",
|
||||
"batchUpgrade.noUpdatesMessage": "Todos os plugins estão atualizados",
|
||||
"batchUpgrade.submittedMessage": "Tarefa de atualização de plugin enviada",
|
||||
"batchUpgrade.tooltip": "Atualizar todos os plugins instalados do Marketplace para a versão mais recente",
|
||||
"category.agents": "Estratégias do agente",
|
||||
"category.all": "Todo",
|
||||
"category.bundles": "Pacotes",
|
||||
|
||||
@@ -63,6 +63,11 @@
|
||||
"autoUpdate.upgradeMode.partial": "Numai selectat",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "Pluginurile selectate nu se vor actualiza automat.",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Numai pluginurile selectate se vor actualiza automat. Nu există pluginuri selectate în prezent, așa că niciun plugin nu se va actualiza automat.",
|
||||
"batchUpgrade.button": "Actualizează tot",
|
||||
"batchUpgrade.errorMessage": "Trimiterea sarcinii de actualizare a pluginului a eșuat. Vă rugăm să încercați din nou",
|
||||
"batchUpgrade.noUpdatesMessage": "Toate pluginurile sunt actualizate",
|
||||
"batchUpgrade.submittedMessage": "Sarcina de actualizare a pluginului a fost trimisă",
|
||||
"batchUpgrade.tooltip": "Actualizați toate pluginurile instalate din Marketplace la cea mai recentă versiune",
|
||||
"category.agents": "Strategii pentru agenți",
|
||||
"category.all": "Tot",
|
||||
"category.bundles": "Pachete",
|
||||
|
||||
@@ -63,6 +63,11 @@
|
||||
"autoUpdate.upgradeMode.partial": "Только выбрано",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "Выбранные плагины не будут обновляться автоматически",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Только выбранные плагины будут автоматически обновляться. В данный момент плагины не выбраны, поэтому никакие плагины не будут автоматически обновляться.",
|
||||
"batchUpgrade.button": "Обновить все",
|
||||
"batchUpgrade.errorMessage": "Не удалось отправить задачу обновления плагина. Попробуйте снова",
|
||||
"batchUpgrade.noUpdatesMessage": "Все плагины обновлены",
|
||||
"batchUpgrade.submittedMessage": "Задача обновления плагина отправлена",
|
||||
"batchUpgrade.tooltip": "Обновить все плагины, установленные из Marketplace, до последней версии",
|
||||
"category.agents": "Агентские стратегии",
|
||||
"category.all": "Все",
|
||||
"category.bundles": "Пакеты",
|
||||
|
||||
@@ -63,6 +63,11 @@
|
||||
"autoUpdate.upgradeMode.partial": "Samo izbrano",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "Izbrani vtičniki se ne bodo samodejno posodabljali.",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Samo izbrani vtičniki se bodo samodejno posodabljali. Trenutno ni izbranih nobenih vtičnikov, zato se nobeni vtičniki ne bodo samodejno posodobili.",
|
||||
"batchUpgrade.button": "Posodobi vse",
|
||||
"batchUpgrade.errorMessage": "Pošiljanje naloge za posodobitev vtičnika ni uspelo. Prosimo, poskusite znova",
|
||||
"batchUpgrade.noUpdatesMessage": "Vsi vtičniki so posodobljeni",
|
||||
"batchUpgrade.submittedMessage": "Naloga za posodobitev vtičnika je bila poslana",
|
||||
"batchUpgrade.tooltip": "Posodobi vse vtičnike, nameščene iz Marketplace, na najnovejšo različico",
|
||||
"category.agents": "Strategije agenta",
|
||||
"category.all": "Vse",
|
||||
"category.bundles": "Paketi",
|
||||
|
||||
@@ -63,6 +63,11 @@
|
||||
"autoUpdate.upgradeMode.partial": "เฉพาะที่เลือกไว้",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "ปลั๊กอินที่เลือกจะไม่อัปเดตอัตโนมัติ",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "เฉพาะปลั๊กอินที่เลือกจะอัปเดตโดยอัตโนมัติ ขณะนี้ไม่มีปลั๊กอินใดที่ถูกเลือก ดังนั้นจะไม่มีปลั๊กอินใดที่อัปเดตโดยอัตโนมัติ",
|
||||
"batchUpgrade.button": "อัปเดตทั้งหมด",
|
||||
"batchUpgrade.errorMessage": "ส่งงานอัปเดตปลั๊กอินล้มเหลว กรุณาลองอีกครั้ง",
|
||||
"batchUpgrade.noUpdatesMessage": "ปลั๊กอินทั้งหมดเป็นเวอร์ชันล่าสุดแล้ว",
|
||||
"batchUpgrade.submittedMessage": "ส่งงานอัปเดตปลั๊กอินแล้ว",
|
||||
"batchUpgrade.tooltip": "อัปเดตปลั๊กอินทั้งหมดที่ติดตั้งจาก Marketplace เป็นเวอร์ชันล่าสุด",
|
||||
"category.agents": "กลยุทธ์ตัวแทน",
|
||||
"category.all": "ทั้งหมด",
|
||||
"category.bundles": "ชุดรวม",
|
||||
|
||||
@@ -63,6 +63,11 @@
|
||||
"autoUpdate.upgradeMode.partial": "Sadece seçilen",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "Seçilen eklentiler otomatik olarak güncellenmeyecek.",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Sadece seçilen eklentiler otomatik olarak güncellenecek. Şu anda hiçbir eklenti seçilmedi, bu yüzden hiçbir eklenti otomatik olarak güncellenmeyecek.",
|
||||
"batchUpgrade.button": "Tümünü güncelle",
|
||||
"batchUpgrade.errorMessage": "Eklenti güncelleme görevi gönderimi başarısız. Lütfen tekrar deneyin",
|
||||
"batchUpgrade.noUpdatesMessage": "Tüm eklentiler güncel",
|
||||
"batchUpgrade.submittedMessage": "Eklenti güncelleme görevi gönderildi",
|
||||
"batchUpgrade.tooltip": "Marketplace'ten yüklenen tüm eklentileri en son sürüme güncelle",
|
||||
"category.agents": "Ajan Stratejileri",
|
||||
"category.all": "Tüm",
|
||||
"category.bundles": "Paketler",
|
||||
|
||||
@@ -63,6 +63,11 @@
|
||||
"autoUpdate.upgradeMode.partial": "Тільки вибрані",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "Вибрані плагіни не будуть оновлюватися автоматично",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Тільки вибрані плагіни будуть автоматично оновлюватись. Наразі жоден з плагінів не вибрано, тому жоден плагін не буде автоматично оновлений.",
|
||||
"batchUpgrade.button": "Оновити все",
|
||||
"batchUpgrade.errorMessage": "Не вдалося надіслати завдання оновлення плагіна. Спробуйте ще раз",
|
||||
"batchUpgrade.noUpdatesMessage": "Усі плагіни оновлені",
|
||||
"batchUpgrade.submittedMessage": "Завдання оновлення плагіна надіслано",
|
||||
"batchUpgrade.tooltip": "Оновити всі плагіни, встановлені з Marketplace, до останньої версії",
|
||||
"category.agents": "Стратегії агентів",
|
||||
"category.all": "Увесь",
|
||||
"category.bundles": "Пакети",
|
||||
|
||||
@@ -63,6 +63,11 @@
|
||||
"autoUpdate.upgradeMode.partial": "Chỉ được chọn",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "Các plugin được chọn sẽ không tự động cập nhật",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Chỉ những plugin được chọn mới tự động cập nhật. Hiện tại không có plugin nào được chọn, vì vậy sẽ không có plugin nào tự động cập nhật.",
|
||||
"batchUpgrade.button": "Cập nhật tất cả",
|
||||
"batchUpgrade.errorMessage": "Gửi tác vụ cập nhật plugin thất bại. Vui lòng thử lại",
|
||||
"batchUpgrade.noUpdatesMessage": "Tất cả các plugin đã được cập nhật",
|
||||
"batchUpgrade.submittedMessage": "Đã gửi tác vụ cập nhật plugin",
|
||||
"batchUpgrade.tooltip": "Cập nhật tất cả các plugin được cài đặt từ Marketplace lên phiên bản mới nhất",
|
||||
"category.agents": "Chiến lược đại lý",
|
||||
"category.all": "Tất cả",
|
||||
"category.bundles": "Bó",
|
||||
|
||||
@@ -63,6 +63,11 @@
|
||||
"autoUpdate.upgradeMode.partial": "仅选定",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "选定的插件将不会自动更新",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "仅选定的插件将自动更新。目前未选择任何插件,因此不会自动更新任何插件。",
|
||||
"batchUpgrade.button": "全部更新",
|
||||
"batchUpgrade.errorMessage": "提交插件更新任务失败,请重试",
|
||||
"batchUpgrade.noUpdatesMessage": "所有插件已是最新版本",
|
||||
"batchUpgrade.submittedMessage": "已提交插件更新任务",
|
||||
"batchUpgrade.tooltip": "将所有从 Marketplace 安装的插件更新到最新版本",
|
||||
"category.agents": "Agent 策略",
|
||||
"category.all": "全部",
|
||||
"category.bundles": "插件集",
|
||||
|
||||
@@ -63,6 +63,11 @@
|
||||
"autoUpdate.upgradeMode.partial": "僅選擇",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "選定的插件將不會自動更新",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "只有選定的插件會自動更新。目前未選定任何插件,因此不會自動更新任何插件。",
|
||||
"batchUpgrade.button": "全部更新",
|
||||
"batchUpgrade.errorMessage": "提交插件更新任務失敗,請重試",
|
||||
"batchUpgrade.noUpdatesMessage": "所有插件已是最新版本",
|
||||
"batchUpgrade.submittedMessage": "已提交插件更新任務",
|
||||
"batchUpgrade.tooltip": "將所有從 Marketplace 安裝的插件更新到最新版本",
|
||||
"category.agents": "代理策略",
|
||||
"category.all": "都",
|
||||
"category.bundles": "束",
|
||||
|
||||
@@ -104,3 +104,27 @@ export const updatePermission = async (permissions: Permissions) => {
|
||||
export const uninstallPlugin = async (pluginId: string) => {
|
||||
return post<UninstallPluginResponse>('/workspaces/current/plugin/uninstall', { body: { plugin_installation_id: pluginId } })
|
||||
}
|
||||
|
||||
export const batchUpgradePlugins = async () => {
|
||||
return post<{
|
||||
success: Array<{
|
||||
plugin_id: string
|
||||
from_version: string
|
||||
to_version: string
|
||||
from_identifier: string
|
||||
to_identifier: string
|
||||
}>
|
||||
failed: Array<{
|
||||
plugin_id: string
|
||||
current_version: string
|
||||
error: string
|
||||
}>
|
||||
skipped: Array<{
|
||||
plugin_id: string
|
||||
reason: string
|
||||
current_version: string
|
||||
}>
|
||||
}>('/workspaces/current/plugin/upgrade/batch', {
|
||||
body: {},
|
||||
})
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user