mirror of
https://github.com/onyx-dot-app/onyx.git
synced 2026-03-30 12:02:42 +00:00
Compare commits
3 Commits
v3.1.0-clo
...
ref-file-t
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1d2d79127d | ||
|
|
f44663c23c | ||
|
|
b73d26aedd |
3
.github/workflows/helm-chart-releases.yml
vendored
3
.github/workflows/helm-chart-releases.yml
vendored
@@ -47,8 +47,7 @@ jobs:
|
||||
done
|
||||
|
||||
- name: Publish Helm charts to gh-pages
|
||||
# NOTE: HEAD of https://github.com/stefanprodan/helm-gh-pages/pull/43
|
||||
uses: stefanprodan/helm-gh-pages@ad32ad3b8720abfeaac83532fd1e9bdfca5bbe27 # zizmor: ignore[impostor-commit]
|
||||
uses: stefanprodan/helm-gh-pages@0ad2bb377311d61ac04ad9eb6f252fb68e207260 # ratchet:stefanprodan/helm-gh-pages@v1.7.0
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
charts_dir: deployment/helm/charts
|
||||
|
||||
@@ -35,7 +35,7 @@ Onyx comes loaded with advanced features like Agents, Web Search, RAG, MCP, Deep
|
||||
> [!TIP]
|
||||
> Run Onyx with one command (or see deployment section below):
|
||||
> ```
|
||||
> curl -fsSL https://onyx.app/install_onyx.sh | bash
|
||||
> curl -fsSL https://raw.githubusercontent.com/onyx-dot-app/onyx/main/deployment/docker_compose/install.sh > install.sh && chmod +x install.sh && ./install.sh
|
||||
> ```
|
||||
|
||||
****
|
||||
|
||||
@@ -28,7 +28,6 @@ from onyx.access.models import DocExternalAccess
|
||||
from onyx.access.models import ElementExternalAccess
|
||||
from onyx.background.celery.apps.app_base import task_logger
|
||||
from onyx.background.celery.celery_redis import celery_find_task
|
||||
from onyx.background.celery.celery_redis import celery_get_broker_client
|
||||
from onyx.background.celery.celery_redis import celery_get_queue_length
|
||||
from onyx.background.celery.celery_redis import celery_get_queued_task_ids
|
||||
from onyx.background.celery.celery_redis import celery_get_unacked_task_ids
|
||||
@@ -188,6 +187,7 @@ def check_for_doc_permissions_sync(self: Task, *, tenant_id: str) -> bool | None
|
||||
# (which lives on a different db number)
|
||||
r = get_redis_client()
|
||||
r_replica = get_redis_replica_client()
|
||||
r_celery: Redis = self.app.broker_connection().channel().client # type: ignore
|
||||
|
||||
lock_beat: RedisLock = r.lock(
|
||||
OnyxRedisLocks.CHECK_CONNECTOR_DOC_PERMISSIONS_SYNC_BEAT_LOCK,
|
||||
@@ -227,7 +227,6 @@ def check_for_doc_permissions_sync(self: Task, *, tenant_id: str) -> bool | None
|
||||
# tasks can be in the queue in redis, in reserved tasks (prefetched by the worker),
|
||||
# or be currently executing
|
||||
try:
|
||||
r_celery = celery_get_broker_client(self.app)
|
||||
validate_permission_sync_fences(
|
||||
tenant_id, r, r_replica, r_celery, lock_beat
|
||||
)
|
||||
|
||||
@@ -29,7 +29,6 @@ from ee.onyx.external_permissions.sync_params import (
|
||||
from ee.onyx.external_permissions.sync_params import get_source_perm_sync_config
|
||||
from onyx.background.celery.apps.app_base import task_logger
|
||||
from onyx.background.celery.celery_redis import celery_find_task
|
||||
from onyx.background.celery.celery_redis import celery_get_broker_client
|
||||
from onyx.background.celery.celery_redis import celery_get_unacked_task_ids
|
||||
from onyx.background.celery.tasks.beat_schedule import CLOUD_BEAT_MULTIPLIER_DEFAULT
|
||||
from onyx.background.error_logging import emit_background_error
|
||||
@@ -163,6 +162,7 @@ def check_for_external_group_sync(self: Task, *, tenant_id: str) -> bool | None:
|
||||
# (which lives on a different db number)
|
||||
r = get_redis_client()
|
||||
r_replica = get_redis_replica_client()
|
||||
r_celery: Redis = self.app.broker_connection().channel().client # type: ignore
|
||||
|
||||
lock_beat: RedisLock = r.lock(
|
||||
OnyxRedisLocks.CHECK_CONNECTOR_EXTERNAL_GROUP_SYNC_BEAT_LOCK,
|
||||
@@ -221,7 +221,6 @@ def check_for_external_group_sync(self: Task, *, tenant_id: str) -> bool | None:
|
||||
# tasks can be in the queue in redis, in reserved tasks (prefetched by the worker),
|
||||
# or be currently executing
|
||||
try:
|
||||
r_celery = celery_get_broker_client(self.app)
|
||||
validate_external_group_sync_fences(
|
||||
tenant_id, self.app, r, r_replica, r_celery, lock_beat
|
||||
)
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
# These are helper objects for tracking the keys we need to write in redis
|
||||
import json
|
||||
import threading
|
||||
from typing import Any
|
||||
from typing import cast
|
||||
|
||||
@@ -8,59 +7,7 @@ from celery import Celery
|
||||
from redis import Redis
|
||||
|
||||
from onyx.background.celery.configs.base import CELERY_SEPARATOR
|
||||
from onyx.configs.app_configs import REDIS_HEALTH_CHECK_INTERVAL
|
||||
from onyx.configs.constants import OnyxCeleryPriority
|
||||
from onyx.configs.constants import REDIS_SOCKET_KEEPALIVE_OPTIONS
|
||||
|
||||
|
||||
_broker_client: Redis | None = None
|
||||
_broker_url: str | None = None
|
||||
_broker_client_lock = threading.Lock()
|
||||
|
||||
|
||||
def celery_get_broker_client(app: Celery) -> Redis:
|
||||
"""Return a shared Redis client connected to the Celery broker DB.
|
||||
|
||||
Uses a module-level singleton so all tasks on a worker share one
|
||||
connection instead of creating a new one per call. The client
|
||||
connects directly to the broker Redis DB (parsed from the broker URL).
|
||||
|
||||
Thread-safe via lock — safe for use in Celery thread-pool workers.
|
||||
|
||||
Usage:
|
||||
r_celery = celery_get_broker_client(self.app)
|
||||
length = celery_get_queue_length(queue, r_celery)
|
||||
"""
|
||||
global _broker_client, _broker_url
|
||||
with _broker_client_lock:
|
||||
url = app.conf.broker_url
|
||||
if _broker_client is not None and _broker_url == url:
|
||||
try:
|
||||
_broker_client.ping()
|
||||
return _broker_client
|
||||
except Exception:
|
||||
try:
|
||||
_broker_client.close()
|
||||
except Exception:
|
||||
pass
|
||||
_broker_client = None
|
||||
elif _broker_client is not None:
|
||||
try:
|
||||
_broker_client.close()
|
||||
except Exception:
|
||||
pass
|
||||
_broker_client = None
|
||||
|
||||
_broker_url = url
|
||||
_broker_client = Redis.from_url(
|
||||
url,
|
||||
decode_responses=False,
|
||||
health_check_interval=REDIS_HEALTH_CHECK_INTERVAL,
|
||||
socket_keepalive=True,
|
||||
socket_keepalive_options=REDIS_SOCKET_KEEPALIVE_OPTIONS,
|
||||
retry_on_timeout=True,
|
||||
)
|
||||
return _broker_client
|
||||
|
||||
|
||||
def celery_get_unacked_length(r: Redis) -> int:
|
||||
|
||||
@@ -14,7 +14,6 @@ from redis.lock import Lock as RedisLock
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from onyx.background.celery.apps.app_base import task_logger
|
||||
from onyx.background.celery.celery_redis import celery_get_broker_client
|
||||
from onyx.background.celery.celery_redis import celery_get_queue_length
|
||||
from onyx.background.celery.celery_redis import celery_get_queued_task_ids
|
||||
from onyx.configs.app_configs import JOB_TIMEOUT
|
||||
@@ -133,6 +132,7 @@ def revoke_tasks_blocking_deletion(
|
||||
def check_for_connector_deletion_task(self: Task, *, tenant_id: str) -> bool | None:
|
||||
r = get_redis_client()
|
||||
r_replica = get_redis_replica_client()
|
||||
r_celery: Redis = self.app.broker_connection().channel().client # type: ignore
|
||||
|
||||
lock_beat: RedisLock = r.lock(
|
||||
OnyxRedisLocks.CHECK_CONNECTOR_DELETION_BEAT_LOCK,
|
||||
@@ -149,7 +149,6 @@ def check_for_connector_deletion_task(self: Task, *, tenant_id: str) -> bool | N
|
||||
if not r.exists(OnyxRedisSignals.BLOCK_VALIDATE_CONNECTOR_DELETION_FENCES):
|
||||
# clear fences that don't have associated celery tasks in progress
|
||||
try:
|
||||
r_celery = celery_get_broker_client(self.app)
|
||||
validate_connector_deletion_fences(
|
||||
tenant_id, r, r_replica, r_celery, lock_beat
|
||||
)
|
||||
|
||||
@@ -22,7 +22,6 @@ from sqlalchemy.orm import Session
|
||||
|
||||
from onyx.background.celery.apps.app_base import task_logger
|
||||
from onyx.background.celery.celery_redis import celery_find_task
|
||||
from onyx.background.celery.celery_redis import celery_get_broker_client
|
||||
from onyx.background.celery.celery_redis import celery_get_unacked_task_ids
|
||||
from onyx.background.celery.celery_utils import httpx_init_vespa_pool
|
||||
from onyx.background.celery.memory_monitoring import emit_process_memory
|
||||
@@ -450,7 +449,7 @@ def check_indexing_completion(
|
||||
):
|
||||
# Check if the task exists in the celery queue
|
||||
# This handles the case where Redis dies after task creation but before task execution
|
||||
redis_celery = celery_get_broker_client(task.app)
|
||||
redis_celery = task.app.broker_connection().channel().client # type: ignore
|
||||
task_exists = celery_find_task(
|
||||
attempt.celery_task_id,
|
||||
OnyxCeleryQueues.CONNECTOR_DOC_FETCHING,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import json
|
||||
import time
|
||||
from collections.abc import Callable
|
||||
from datetime import timedelta
|
||||
from itertools import islice
|
||||
from typing import Any
|
||||
@@ -18,7 +19,6 @@ from sqlalchemy import text
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from onyx.background.celery.apps.app_base import task_logger
|
||||
from onyx.background.celery.celery_redis import celery_get_broker_client
|
||||
from onyx.background.celery.celery_redis import celery_get_queue_length
|
||||
from onyx.background.celery.celery_redis import celery_get_unacked_task_ids
|
||||
from onyx.background.celery.memory_monitoring import emit_process_memory
|
||||
@@ -698,27 +698,31 @@ def monitor_background_processes(self: Task, *, tenant_id: str) -> None:
|
||||
return None
|
||||
|
||||
try:
|
||||
# Get Redis client for Celery broker
|
||||
redis_celery = self.app.broker_connection().channel().client # type: ignore
|
||||
redis_std = get_redis_client()
|
||||
|
||||
# Collect queue metrics with broker connection
|
||||
r_celery = celery_get_broker_client(self.app)
|
||||
queue_metrics = _collect_queue_metrics(r_celery)
|
||||
# Define metric collection functions and their dependencies
|
||||
metric_functions: list[Callable[[], list[Metric]]] = [
|
||||
lambda: _collect_queue_metrics(redis_celery),
|
||||
lambda: _collect_connector_metrics(db_session, redis_std),
|
||||
lambda: _collect_sync_metrics(db_session, redis_std),
|
||||
]
|
||||
|
||||
# Collect remaining metrics (no broker connection needed)
|
||||
# Collect and log each metric
|
||||
with get_session_with_current_tenant() as db_session:
|
||||
all_metrics: list[Metric] = queue_metrics
|
||||
all_metrics.extend(_collect_connector_metrics(db_session, redis_std))
|
||||
all_metrics.extend(_collect_sync_metrics(db_session, redis_std))
|
||||
for metric_fn in metric_functions:
|
||||
metrics = metric_fn()
|
||||
for metric in metrics:
|
||||
# double check to make sure we aren't double-emitting metrics
|
||||
if metric.key is None or not _has_metric_been_emitted(
|
||||
redis_std, metric.key
|
||||
):
|
||||
metric.log()
|
||||
metric.emit(tenant_id)
|
||||
|
||||
for metric in all_metrics:
|
||||
if metric.key is None or not _has_metric_been_emitted(
|
||||
redis_std, metric.key
|
||||
):
|
||||
metric.log()
|
||||
metric.emit(tenant_id)
|
||||
|
||||
if metric.key is not None:
|
||||
_mark_metric_as_emitted(redis_std, metric.key)
|
||||
if metric.key is not None:
|
||||
_mark_metric_as_emitted(redis_std, metric.key)
|
||||
|
||||
task_logger.info("Successfully collected background metrics")
|
||||
except SoftTimeLimitExceeded:
|
||||
@@ -886,7 +890,7 @@ def monitor_celery_queues_helper(
|
||||
) -> None:
|
||||
"""A task to monitor all celery queue lengths."""
|
||||
|
||||
r_celery = celery_get_broker_client(task.app)
|
||||
r_celery = task.app.broker_connection().channel().client # type: ignore
|
||||
n_celery = celery_get_queue_length(OnyxCeleryQueues.PRIMARY, r_celery)
|
||||
n_docfetching = celery_get_queue_length(
|
||||
OnyxCeleryQueues.CONNECTOR_DOC_FETCHING, r_celery
|
||||
@@ -1076,7 +1080,7 @@ def cloud_monitor_celery_pidbox(
|
||||
num_deleted = 0
|
||||
|
||||
MAX_PIDBOX_IDLE = 24 * 3600 # 1 day in seconds
|
||||
r_celery = celery_get_broker_client(self.app)
|
||||
r_celery: Redis = self.app.broker_connection().channel().client # type: ignore
|
||||
for key in r_celery.scan_iter("*.reply.celery.pidbox"):
|
||||
key_bytes = cast(bytes, key)
|
||||
key_str = key_bytes.decode("utf-8")
|
||||
|
||||
@@ -17,7 +17,6 @@ from sqlalchemy.orm import Session
|
||||
|
||||
from onyx.background.celery.apps.app_base import task_logger
|
||||
from onyx.background.celery.celery_redis import celery_find_task
|
||||
from onyx.background.celery.celery_redis import celery_get_broker_client
|
||||
from onyx.background.celery.celery_redis import celery_get_queue_length
|
||||
from onyx.background.celery.celery_redis import celery_get_queued_task_ids
|
||||
from onyx.background.celery.celery_redis import celery_get_unacked_task_ids
|
||||
@@ -204,6 +203,7 @@ def _is_pruning_due(cc_pair: ConnectorCredentialPair) -> bool:
|
||||
def check_for_pruning(self: Task, *, tenant_id: str) -> bool | None:
|
||||
r = get_redis_client()
|
||||
r_replica = get_redis_replica_client()
|
||||
r_celery: Redis = self.app.broker_connection().channel().client # type: ignore
|
||||
|
||||
lock_beat: RedisLock = r.lock(
|
||||
OnyxRedisLocks.CHECK_PRUNE_BEAT_LOCK,
|
||||
@@ -261,7 +261,6 @@ def check_for_pruning(self: Task, *, tenant_id: str) -> bool | None:
|
||||
# tasks can be in the queue in redis, in reserved tasks (prefetched by the worker),
|
||||
# or be currently executing
|
||||
try:
|
||||
r_celery = celery_get_broker_client(self.app)
|
||||
validate_pruning_fences(tenant_id, r, r_replica, r_celery, lock_beat)
|
||||
except Exception:
|
||||
task_logger.exception("Exception while validating pruning fences")
|
||||
|
||||
@@ -16,7 +16,6 @@ from sqlalchemy.orm import Session
|
||||
|
||||
from onyx.access.access import build_access_for_user_files
|
||||
from onyx.background.celery.apps.app_base import task_logger
|
||||
from onyx.background.celery.celery_redis import celery_get_broker_client
|
||||
from onyx.background.celery.celery_redis import celery_get_queue_length
|
||||
from onyx.background.celery.celery_utils import httpx_init_vespa_pool
|
||||
from onyx.background.celery.tasks.shared.RetryDocumentIndex import RetryDocumentIndex
|
||||
@@ -106,7 +105,7 @@ def _user_file_delete_queued_key(user_file_id: str | UUID) -> str:
|
||||
|
||||
|
||||
def get_user_file_project_sync_queue_depth(celery_app: Celery) -> int:
|
||||
redis_celery = celery_get_broker_client(celery_app)
|
||||
redis_celery: Redis = celery_app.broker_connection().channel().client # type: ignore
|
||||
return celery_get_queue_length(
|
||||
OnyxCeleryQueues.USER_FILE_PROJECT_SYNC, redis_celery
|
||||
)
|
||||
@@ -239,7 +238,7 @@ def check_user_file_processing(self: Task, *, tenant_id: str) -> None:
|
||||
skipped_guard = 0
|
||||
try:
|
||||
# --- Protection 1: queue depth backpressure ---
|
||||
r_celery = celery_get_broker_client(self.app)
|
||||
r_celery = self.app.broker_connection().channel().client # type: ignore
|
||||
queue_len = celery_get_queue_length(
|
||||
OnyxCeleryQueues.USER_FILE_PROCESSING, r_celery
|
||||
)
|
||||
@@ -592,7 +591,7 @@ def check_for_user_file_delete(self: Task, *, tenant_id: str) -> None:
|
||||
# --- Protection 1: queue depth backpressure ---
|
||||
# NOTE: must use the broker's Redis client (not redis_client) because
|
||||
# Celery queues live on a separate Redis DB with CELERY_SEPARATOR keys.
|
||||
r_celery = celery_get_broker_client(self.app)
|
||||
r_celery: Redis = self.app.broker_connection().channel().client # type: ignore
|
||||
queue_len = celery_get_queue_length(OnyxCeleryQueues.USER_FILE_DELETE, r_celery)
|
||||
if queue_len > USER_FILE_DELETE_MAX_QUEUE_DEPTH:
|
||||
task_logger.warning(
|
||||
|
||||
@@ -7,6 +7,7 @@ from fastapi import HTTPException
|
||||
from fastapi import UploadFile
|
||||
from pydantic import BaseModel
|
||||
from pydantic import ConfigDict
|
||||
from pydantic import Field
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy.orm import Session
|
||||
from starlette.background import BackgroundTasks
|
||||
@@ -34,9 +35,19 @@ class CategorizedFilesResult(BaseModel):
|
||||
user_files: list[UserFile]
|
||||
rejected_files: list[RejectedFile]
|
||||
id_to_temp_id: dict[str, str]
|
||||
# Filenames that should be stored but not indexed.
|
||||
skip_indexing_filenames: set[str] = Field(default_factory=set)
|
||||
# Allow SQLAlchemy ORM models inside this result container
|
||||
model_config = ConfigDict(arbitrary_types_allowed=True)
|
||||
|
||||
@property
|
||||
def indexable_files(self) -> list[UserFile]:
|
||||
return [
|
||||
uf
|
||||
for uf in self.user_files
|
||||
if (uf.name or "") not in self.skip_indexing_filenames
|
||||
]
|
||||
|
||||
|
||||
def build_hashed_file_key(file: UploadFile) -> str:
|
||||
name_prefix = (file.filename or "")[:50]
|
||||
@@ -98,6 +109,7 @@ def create_user_files(
|
||||
user_files=user_files,
|
||||
rejected_files=rejected_files,
|
||||
id_to_temp_id=id_to_temp_id,
|
||||
skip_indexing_filenames=categorized_files.skip_indexing,
|
||||
)
|
||||
|
||||
|
||||
@@ -123,6 +135,7 @@ def upload_files_to_user_files_with_indexing(
|
||||
user_files = categorized_files_result.user_files
|
||||
rejected_files = categorized_files_result.rejected_files
|
||||
id_to_temp_id = categorized_files_result.id_to_temp_id
|
||||
indexable_files = categorized_files_result.indexable_files
|
||||
# Trigger per-file processing immediately for the current tenant
|
||||
tenant_id = get_current_tenant_id()
|
||||
for rejected_file in rejected_files:
|
||||
@@ -134,12 +147,12 @@ def upload_files_to_user_files_with_indexing(
|
||||
from onyx.background.task_utils import drain_processing_loop
|
||||
|
||||
background_tasks.add_task(drain_processing_loop, tenant_id)
|
||||
for user_file in user_files:
|
||||
for user_file in indexable_files:
|
||||
logger.info(f"Queued in-process processing for user_file_id={user_file.id}")
|
||||
else:
|
||||
from onyx.background.celery.versioned_apps.client import app as client_app
|
||||
|
||||
for user_file in user_files:
|
||||
for user_file in indexable_files:
|
||||
task = client_app.send_task(
|
||||
OnyxCeleryTask.PROCESS_SINGLE_USER_FILE,
|
||||
kwargs={"user_file_id": user_file.id, "tenant_id": tenant_id},
|
||||
@@ -155,6 +168,7 @@ def upload_files_to_user_files_with_indexing(
|
||||
user_files=user_files,
|
||||
rejected_files=rejected_files,
|
||||
id_to_temp_id=id_to_temp_id,
|
||||
skip_indexing_filenames=categorized_files_result.skip_indexing_filenames,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@ accidentally reaches the vector DB layer will fail loudly instead of timing
|
||||
out against a nonexistent Vespa/OpenSearch instance.
|
||||
"""
|
||||
|
||||
from collections.abc import Iterable
|
||||
from typing import Any
|
||||
|
||||
from onyx.context.search.models import IndexFilters
|
||||
@@ -67,7 +66,7 @@ class DisabledDocumentIndex(DocumentIndex):
|
||||
# ------------------------------------------------------------------
|
||||
def index(
|
||||
self,
|
||||
chunks: Iterable[DocMetadataAwareIndexChunk], # noqa: ARG002
|
||||
chunks: list[DocMetadataAwareIndexChunk], # noqa: ARG002
|
||||
index_batch_params: IndexBatchParams, # noqa: ARG002
|
||||
) -> set[DocumentInsertionRecord]:
|
||||
raise RuntimeError(VECTOR_DB_DISABLED_ERROR)
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import abc
|
||||
from collections.abc import Iterable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
@@ -207,7 +206,7 @@ class Indexable(abc.ABC):
|
||||
@abc.abstractmethod
|
||||
def index(
|
||||
self,
|
||||
chunks: Iterable[DocMetadataAwareIndexChunk],
|
||||
chunks: list[DocMetadataAwareIndexChunk],
|
||||
index_batch_params: IndexBatchParams,
|
||||
) -> set[DocumentInsertionRecord]:
|
||||
"""
|
||||
@@ -227,8 +226,8 @@ class Indexable(abc.ABC):
|
||||
it is done automatically outside of this code.
|
||||
|
||||
Parameters:
|
||||
- chunks: Document chunks with all of the information needed for
|
||||
indexing to the document index.
|
||||
- chunks: Document chunks with all of the information needed for indexing to the document
|
||||
index.
|
||||
- tenant_id: The tenant id of the user whose chunks are being indexed
|
||||
- large_chunks_enabled: Whether large chunks are enabled
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import abc
|
||||
from collections.abc import Iterable
|
||||
from typing import Self
|
||||
|
||||
from pydantic import BaseModel
|
||||
@@ -210,10 +209,10 @@ class Indexable(abc.ABC):
|
||||
@abc.abstractmethod
|
||||
def index(
|
||||
self,
|
||||
chunks: Iterable[DocMetadataAwareIndexChunk],
|
||||
chunks: list[DocMetadataAwareIndexChunk],
|
||||
indexing_metadata: IndexingMetadata,
|
||||
) -> list[DocumentInsertionRecord]:
|
||||
"""Indexes an iterable of document chunks into the document index.
|
||||
"""Indexes a list of document chunks into the document index.
|
||||
|
||||
This is often a batch operation including chunks from multiple
|
||||
documents.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import json
|
||||
from collections.abc import Iterable
|
||||
from collections import defaultdict
|
||||
from typing import Any
|
||||
|
||||
import httpx
|
||||
@@ -351,7 +351,7 @@ class OpenSearchOldDocumentIndex(OldDocumentIndex):
|
||||
|
||||
def index(
|
||||
self,
|
||||
chunks: Iterable[DocMetadataAwareIndexChunk],
|
||||
chunks: list[DocMetadataAwareIndexChunk],
|
||||
index_batch_params: IndexBatchParams,
|
||||
) -> set[OldDocumentInsertionRecord]:
|
||||
"""
|
||||
@@ -647,10 +647,10 @@ class OpenSearchDocumentIndex(DocumentIndex):
|
||||
|
||||
def index(
|
||||
self,
|
||||
chunks: Iterable[DocMetadataAwareIndexChunk],
|
||||
indexing_metadata: IndexingMetadata,
|
||||
chunks: list[DocMetadataAwareIndexChunk],
|
||||
indexing_metadata: IndexingMetadata, # noqa: ARG002
|
||||
) -> list[DocumentInsertionRecord]:
|
||||
"""Indexes an iterable of document chunks into the document index.
|
||||
"""Indexes a list of document chunks into the document index.
|
||||
|
||||
Groups chunks by document ID and for each document, deletes existing
|
||||
chunks and indexes the new chunks in bulk.
|
||||
@@ -673,34 +673,29 @@ class OpenSearchDocumentIndex(DocumentIndex):
|
||||
document is newly indexed or had already existed and was just
|
||||
updated.
|
||||
"""
|
||||
total_chunks = sum(
|
||||
cc.new_chunk_cnt
|
||||
for cc in indexing_metadata.doc_id_to_chunk_cnt_diff.values()
|
||||
# Group chunks by document ID.
|
||||
doc_id_to_chunks: dict[str, list[DocMetadataAwareIndexChunk]] = defaultdict(
|
||||
list
|
||||
)
|
||||
for chunk in chunks:
|
||||
doc_id_to_chunks[chunk.source_document.id].append(chunk)
|
||||
logger.debug(
|
||||
f"[OpenSearchDocumentIndex] Indexing {total_chunks} chunks from {len(indexing_metadata.doc_id_to_chunk_cnt_diff)} "
|
||||
f"[OpenSearchDocumentIndex] Indexing {len(chunks)} chunks from {len(doc_id_to_chunks)} "
|
||||
f"documents for index {self._index_name}."
|
||||
)
|
||||
|
||||
document_indexing_results: list[DocumentInsertionRecord] = []
|
||||
deleted_doc_ids: set[str] = set()
|
||||
# Buffer chunks per document as they arrive from the iterable.
|
||||
# When the document ID changes flush the buffered chunks.
|
||||
current_doc_id: str | None = None
|
||||
current_chunks: list[DocMetadataAwareIndexChunk] = []
|
||||
|
||||
def _flush_chunks(doc_chunks: list[DocMetadataAwareIndexChunk]) -> None:
|
||||
assert len(doc_chunks) > 0, "doc_chunks is empty"
|
||||
|
||||
# Try to index per-document.
|
||||
for _, chunks in doc_id_to_chunks.items():
|
||||
# Create a batch of OpenSearch-formatted chunks for bulk insertion.
|
||||
# Since we are doing this in batches, an error occurring midway
|
||||
# can result in a state where chunks are deleted and not all the
|
||||
# new chunks have been indexed.
|
||||
# Do this before deleting existing chunks to reduce the amount of
|
||||
# time the document index has no content for a given document, and
|
||||
# to reduce the chance of entering a state where we delete chunks,
|
||||
# then some error happens, and never successfully index new chunks.
|
||||
chunk_batch: list[DocumentChunk] = [
|
||||
_convert_onyx_chunk_to_opensearch_document(chunk)
|
||||
for chunk in doc_chunks
|
||||
_convert_onyx_chunk_to_opensearch_document(chunk) for chunk in chunks
|
||||
]
|
||||
onyx_document: Document = doc_chunks[0].source_document
|
||||
onyx_document: Document = chunks[0].source_document
|
||||
# First delete the doc's chunks from the index. This is so that
|
||||
# there are no dangling chunks in the index, in the event that the
|
||||
# new document's content contains fewer chunks than the previous
|
||||
@@ -709,40 +704,22 @@ class OpenSearchDocumentIndex(DocumentIndex):
|
||||
# if the chunk count has actually decreased. This assumes that
|
||||
# overlapping chunks are perfectly overwritten. If we can't
|
||||
# guarantee that then we need the code as-is.
|
||||
if onyx_document.id not in deleted_doc_ids:
|
||||
num_chunks_deleted = self.delete(
|
||||
onyx_document.id, onyx_document.chunk_count
|
||||
)
|
||||
deleted_doc_ids.add(onyx_document.id)
|
||||
# If we see that chunks were deleted we assume the doc already
|
||||
# existed. We record the result before bulk_index_documents
|
||||
# runs. If indexing raises, this entire result list is discarded
|
||||
# by the caller's retry logic, so early recording is safe.
|
||||
document_indexing_results.append(
|
||||
DocumentInsertionRecord(
|
||||
document_id=onyx_document.id,
|
||||
already_existed=num_chunks_deleted > 0,
|
||||
)
|
||||
)
|
||||
num_chunks_deleted = self.delete(
|
||||
onyx_document.id, onyx_document.chunk_count
|
||||
)
|
||||
# If we see that chunks were deleted we assume the doc already
|
||||
# existed.
|
||||
document_insertion_record = DocumentInsertionRecord(
|
||||
document_id=onyx_document.id,
|
||||
already_existed=num_chunks_deleted > 0,
|
||||
)
|
||||
# Now index. This will raise if a chunk of the same ID exists, which
|
||||
# we do not expect because we should have deleted all chunks.
|
||||
self._client.bulk_index_documents(
|
||||
documents=chunk_batch,
|
||||
tenant_state=self._tenant_state,
|
||||
)
|
||||
|
||||
for chunk in chunks:
|
||||
doc_id = chunk.source_document.id
|
||||
if doc_id != current_doc_id:
|
||||
if current_chunks:
|
||||
_flush_chunks(current_chunks)
|
||||
current_doc_id = doc_id
|
||||
current_chunks = [chunk]
|
||||
else:
|
||||
current_chunks.append(chunk)
|
||||
|
||||
if current_chunks:
|
||||
_flush_chunks(current_chunks)
|
||||
document_indexing_results.append(document_insertion_record)
|
||||
|
||||
return document_indexing_results
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@ import re
|
||||
import time
|
||||
import urllib
|
||||
import zipfile
|
||||
from collections.abc import Iterable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
@@ -462,7 +461,7 @@ class VespaIndex(DocumentIndex):
|
||||
|
||||
def index(
|
||||
self,
|
||||
chunks: Iterable[DocMetadataAwareIndexChunk],
|
||||
chunks: list[DocMetadataAwareIndexChunk],
|
||||
index_batch_params: IndexBatchParams,
|
||||
) -> set[OldDocumentInsertionRecord]:
|
||||
"""
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import concurrent.futures
|
||||
import logging
|
||||
import random
|
||||
from collections.abc import Generator
|
||||
from collections.abc import Iterable
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
@@ -320,7 +318,7 @@ class VespaDocumentIndex(DocumentIndex):
|
||||
|
||||
def index(
|
||||
self,
|
||||
chunks: Iterable[DocMetadataAwareIndexChunk],
|
||||
chunks: list[DocMetadataAwareIndexChunk],
|
||||
indexing_metadata: IndexingMetadata,
|
||||
) -> list[DocumentInsertionRecord]:
|
||||
doc_id_to_chunk_cnt_diff = indexing_metadata.doc_id_to_chunk_cnt_diff
|
||||
@@ -340,31 +338,22 @@ class VespaDocumentIndex(DocumentIndex):
|
||||
|
||||
# Vespa has restrictions on valid characters, yet document IDs come from
|
||||
# external w.r.t. this class. We need to sanitize them.
|
||||
#
|
||||
# Instead of materializing all cleaned chunks upfront, we stream them
|
||||
# through a generator that cleans IDs and builds the original-ID mapping
|
||||
# incrementally as chunks flow into Vespa.
|
||||
def _clean_and_track(
|
||||
chunks_iter: Iterable[DocMetadataAwareIndexChunk],
|
||||
id_map: dict[str, str],
|
||||
seen_ids: set[str],
|
||||
) -> Generator[DocMetadataAwareIndexChunk, None, None]:
|
||||
"""Cleans chunk IDs and builds the original-ID mapping
|
||||
incrementally as chunks flow through, avoiding a separate
|
||||
materialization pass."""
|
||||
for chunk in chunks_iter:
|
||||
original_id = chunk.source_document.id
|
||||
cleaned = clean_chunk_id_copy(chunk)
|
||||
cleaned_id = cleaned.source_document.id
|
||||
# Needed so the final DocumentInsertionRecord returned can have
|
||||
# the original document ID. cleaned_chunks might not contain IDs
|
||||
# exactly as callers supplied them.
|
||||
id_map[cleaned_id] = original_id
|
||||
seen_ids.add(cleaned_id)
|
||||
yield cleaned
|
||||
cleaned_chunks: list[DocMetadataAwareIndexChunk] = [
|
||||
clean_chunk_id_copy(chunk) for chunk in chunks
|
||||
]
|
||||
assert len(cleaned_chunks) == len(
|
||||
chunks
|
||||
), "Bug: Cleaned chunks and input chunks have different lengths."
|
||||
|
||||
new_document_id_to_original_document_id: dict[str, str] = {}
|
||||
all_cleaned_doc_ids: set[str] = set()
|
||||
# Needed so the final DocumentInsertionRecord returned can have the
|
||||
# original document ID. cleaned_chunks might not contain IDs exactly as
|
||||
# callers supplied them.
|
||||
new_document_id_to_original_document_id: dict[str, str] = dict()
|
||||
for i, cleaned_chunk in enumerate(cleaned_chunks):
|
||||
old_chunk = chunks[i]
|
||||
new_document_id_to_original_document_id[
|
||||
cleaned_chunk.source_document.id
|
||||
] = old_chunk.source_document.id
|
||||
|
||||
existing_docs: set[str] = set()
|
||||
|
||||
@@ -420,13 +409,7 @@ class VespaDocumentIndex(DocumentIndex):
|
||||
executor=executor,
|
||||
)
|
||||
|
||||
# Insert new Vespa documents, streaming through the cleaning
|
||||
# pipeline so chunks are never fully materialized.
|
||||
cleaned_chunks = _clean_and_track(
|
||||
chunks,
|
||||
new_document_id_to_original_document_id,
|
||||
all_cleaned_doc_ids,
|
||||
)
|
||||
# Insert new Vespa documents.
|
||||
for chunk_batch in batch_generator(cleaned_chunks, BATCH_SIZE):
|
||||
batch_index_vespa_chunks(
|
||||
chunks=chunk_batch,
|
||||
@@ -436,6 +419,10 @@ class VespaDocumentIndex(DocumentIndex):
|
||||
executor=executor,
|
||||
)
|
||||
|
||||
all_cleaned_doc_ids: set[str] = {
|
||||
chunk.source_document.id for chunk in cleaned_chunks
|
||||
}
|
||||
|
||||
return [
|
||||
DocumentInsertionRecord(
|
||||
document_id=new_document_id_to_original_document_id[cleaned_doc_id],
|
||||
|
||||
@@ -15,6 +15,7 @@ PLAIN_TEXT_MIME_TYPE = "text/plain"
|
||||
class OnyxMimeTypes:
|
||||
IMAGE_MIME_TYPES = {"image/jpg", "image/jpeg", "image/png", "image/webp"}
|
||||
CSV_MIME_TYPES = {"text/csv"}
|
||||
TABULAR_MIME_TYPES = CSV_MIME_TYPES | {SPREADSHEET_MIME_TYPE}
|
||||
TEXT_MIME_TYPES = {
|
||||
PLAIN_TEXT_MIME_TYPE,
|
||||
"text/markdown",
|
||||
@@ -34,13 +35,12 @@ class OnyxMimeTypes:
|
||||
PDF_MIME_TYPE,
|
||||
WORD_PROCESSING_MIME_TYPE,
|
||||
PRESENTATION_MIME_TYPE,
|
||||
SPREADSHEET_MIME_TYPE,
|
||||
"message/rfc822",
|
||||
"application/epub+zip",
|
||||
}
|
||||
|
||||
ALLOWED_MIME_TYPES = IMAGE_MIME_TYPES.union(
|
||||
TEXT_MIME_TYPES, DOCUMENT_MIME_TYPES, CSV_MIME_TYPES
|
||||
TEXT_MIME_TYPES, DOCUMENT_MIME_TYPES, TABULAR_MIME_TYPES
|
||||
)
|
||||
|
||||
EXCLUDED_IMAGE_TYPES = {
|
||||
|
||||
@@ -13,13 +13,14 @@ class ChatFileType(str, Enum):
|
||||
DOC = "document"
|
||||
# Plain text only contain the text
|
||||
PLAIN_TEXT = "plain_text"
|
||||
CSV = "csv"
|
||||
# Tabular data files (CSV, TSV, XLSX) — metadata-only injection
|
||||
TABULAR = "tabular"
|
||||
|
||||
def is_text_file(self) -> bool:
|
||||
return self in (
|
||||
ChatFileType.PLAIN_TEXT,
|
||||
ChatFileType.DOC,
|
||||
ChatFileType.CSV,
|
||||
ChatFileType.TABULAR,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -3844,9 +3844,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@ts-morph/common/node_modules/brace-expansion": {
|
||||
"version": "5.0.5",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.5.tgz",
|
||||
"integrity": "sha512-VZznLgtwhn+Mact9tfiwx64fA9erHH/MCXEUfB/0bX/6Fz6ny5EGTXYltMocqg4xFAQZtnO3DHWWXi8RiuN7cQ==",
|
||||
"version": "5.0.3",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.3.tgz",
|
||||
"integrity": "sha512-fy6KJm2RawA5RcHkLa1z/ScpBeA762UF9KmZQxwIbDtRJrgLzM10depAiEQ+CXYcoiqW1/m96OAAoke2nE9EeA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"balanced-match": "^4.0.2"
|
||||
@@ -4224,9 +4224,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": {
|
||||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.3.tgz",
|
||||
"integrity": "sha512-MCV/fYJEbqx68aE58kv2cA/kiky1G8vux3OR6/jbS+jIMe/6fJWa0DTzJU7dqijOWYwHi1t29FlfYI9uytqlpA==",
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz",
|
||||
"integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -5007,9 +5007,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/brace-expansion": {
|
||||
"version": "1.1.13",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz",
|
||||
"integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==",
|
||||
"version": "1.1.12",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
|
||||
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
|
||||
@@ -123,8 +123,9 @@ def _validate_endpoint(
|
||||
(not reachable — indicates the api_key is invalid).
|
||||
|
||||
Timeout handling:
|
||||
- Any httpx.TimeoutException (ConnectTimeout, ReadTimeout, WriteTimeout, PoolTimeout) →
|
||||
timeout (operator should consider increasing timeout_seconds).
|
||||
- ConnectTimeout: TCP handshake never completed → cannot_connect.
|
||||
- ReadTimeout / WriteTimeout: TCP was established, server responded slowly → timeout
|
||||
(operator should consider increasing timeout_seconds).
|
||||
- All other exceptions → cannot_connect.
|
||||
"""
|
||||
_check_ssrf_safety(endpoint_url)
|
||||
|
||||
@@ -76,11 +76,26 @@ class CategorizedFiles(BaseModel):
|
||||
acceptable: list[UploadFile] = Field(default_factory=list)
|
||||
rejected: list[RejectedFile] = Field(default_factory=list)
|
||||
acceptable_file_to_token_count: dict[str, int] = Field(default_factory=dict)
|
||||
# Filenames within `acceptable` that should be stored but not indexed.
|
||||
skip_indexing: set[str] = Field(default_factory=set)
|
||||
|
||||
# Allow FastAPI UploadFile instances
|
||||
model_config = ConfigDict(arbitrary_types_allowed=True)
|
||||
|
||||
|
||||
# Extensions that bypass the token-count threshold on upload.
|
||||
_TOKEN_THRESHOLD_EXEMPT_EXTENSIONS: set[str] = {
|
||||
".csv",
|
||||
".tsv",
|
||||
".xlsx",
|
||||
}
|
||||
|
||||
|
||||
def _skip_token_threshold(extension: str) -> bool:
|
||||
"""Return True if this file extension should bypass the token limit."""
|
||||
return extension.lower() in _TOKEN_THRESHOLD_EXEMPT_EXTENSIONS
|
||||
|
||||
|
||||
def _apply_long_side_cap(width: int, height: int, cap: int) -> tuple[int, int]:
|
||||
if max(width, height) <= cap:
|
||||
return width, height
|
||||
@@ -264,7 +279,17 @@ def categorize_uploaded_files(
|
||||
token_count = count_tokens(
|
||||
text_content, tokenizer, token_limit=token_threshold
|
||||
)
|
||||
if token_threshold is not None and token_count > token_threshold:
|
||||
exceeds_threshold = (
|
||||
token_threshold is not None and token_count > token_threshold
|
||||
)
|
||||
if exceeds_threshold and _skip_token_threshold(extension):
|
||||
# Exempt extensions (e.g. spreadsheets) are accepted
|
||||
# but flagged to skip indexing — only metadata is
|
||||
# injected into the LLM context.
|
||||
results.acceptable.append(upload)
|
||||
results.acceptable_file_to_token_count[filename] = token_count
|
||||
results.skip_indexing.add(filename)
|
||||
elif exceeds_threshold:
|
||||
results.rejected.append(
|
||||
RejectedFile(
|
||||
filename=filename,
|
||||
|
||||
@@ -12,6 +12,7 @@ stale, which is fine for monitoring dashboards.
|
||||
import json
|
||||
import threading
|
||||
import time
|
||||
from collections.abc import Callable
|
||||
from datetime import datetime
|
||||
from datetime import timezone
|
||||
from typing import Any
|
||||
@@ -103,23 +104,25 @@ class _CachedCollector(Collector):
|
||||
|
||||
|
||||
class QueueDepthCollector(_CachedCollector):
|
||||
"""Reads Celery queue lengths from the broker Redis on each scrape."""
|
||||
"""Reads Celery queue lengths from the broker Redis on each scrape.
|
||||
|
||||
Uses a Redis client factory (callable) rather than a stored client
|
||||
reference so the connection is always fresh from Celery's pool.
|
||||
"""
|
||||
|
||||
def __init__(self, cache_ttl: float = _DEFAULT_CACHE_TTL) -> None:
|
||||
super().__init__(cache_ttl)
|
||||
self._celery_app: Any | None = None
|
||||
self._get_redis: Callable[[], Redis] | None = None
|
||||
|
||||
def set_celery_app(self, app: Any) -> None:
|
||||
"""Set the Celery app for broker Redis access."""
|
||||
self._celery_app = app
|
||||
def set_redis_factory(self, factory: Callable[[], Redis]) -> None:
|
||||
"""Set a callable that returns a broker Redis client on demand."""
|
||||
self._get_redis = factory
|
||||
|
||||
def _collect_fresh(self) -> list[GaugeMetricFamily]:
|
||||
if self._celery_app is None:
|
||||
if self._get_redis is None:
|
||||
return []
|
||||
|
||||
from onyx.background.celery.celery_redis import celery_get_broker_client
|
||||
|
||||
redis_client = celery_get_broker_client(self._celery_app)
|
||||
redis_client = self._get_redis()
|
||||
|
||||
depth = GaugeMetricFamily(
|
||||
"onyx_queue_depth",
|
||||
@@ -401,19 +404,17 @@ class RedisHealthCollector(_CachedCollector):
|
||||
|
||||
def __init__(self, cache_ttl: float = _DEFAULT_CACHE_TTL) -> None:
|
||||
super().__init__(cache_ttl)
|
||||
self._celery_app: Any | None = None
|
||||
self._get_redis: Callable[[], Redis] | None = None
|
||||
|
||||
def set_celery_app(self, app: Any) -> None:
|
||||
"""Set the Celery app for broker Redis access."""
|
||||
self._celery_app = app
|
||||
def set_redis_factory(self, factory: Callable[[], Redis]) -> None:
|
||||
"""Set a callable that returns a broker Redis client on demand."""
|
||||
self._get_redis = factory
|
||||
|
||||
def _collect_fresh(self) -> list[GaugeMetricFamily]:
|
||||
if self._celery_app is None:
|
||||
if self._get_redis is None:
|
||||
return []
|
||||
|
||||
from onyx.background.celery.celery_redis import celery_get_broker_client
|
||||
|
||||
redis_client = celery_get_broker_client(self._celery_app)
|
||||
redis_client = self._get_redis()
|
||||
|
||||
memory_used = GaugeMetricFamily(
|
||||
"onyx_redis_memory_used_bytes",
|
||||
|
||||
@@ -3,8 +3,12 @@
|
||||
Called once by the monitoring celery worker after Redis and DB are ready.
|
||||
"""
|
||||
|
||||
from collections.abc import Callable
|
||||
from typing import Any
|
||||
|
||||
from celery import Celery
|
||||
from prometheus_client.registry import REGISTRY
|
||||
from redis import Redis
|
||||
|
||||
from onyx.server.metrics.indexing_pipeline import ConnectorHealthCollector
|
||||
from onyx.server.metrics.indexing_pipeline import IndexAttemptCollector
|
||||
@@ -17,7 +21,7 @@ from onyx.utils.logger import setup_logger
|
||||
logger = setup_logger()
|
||||
|
||||
# Module-level singletons — these are lightweight objects (no connections or DB
|
||||
# state) until configure() / set_celery_app() is called. Keeping them at
|
||||
# state) until configure() / set_redis_factory() is called. Keeping them at
|
||||
# module level ensures they survive the lifetime of the worker process and are
|
||||
# only registered with the Prometheus registry once.
|
||||
_queue_collector = QueueDepthCollector()
|
||||
@@ -28,15 +32,72 @@ _worker_health_collector = WorkerHealthCollector()
|
||||
_heartbeat_monitor: WorkerHeartbeatMonitor | None = None
|
||||
|
||||
|
||||
def _make_broker_redis_factory(celery_app: Celery) -> Callable[[], Redis]:
|
||||
"""Create a factory that returns a cached broker Redis client.
|
||||
|
||||
Reuses a single connection across scrapes to avoid leaking connections.
|
||||
Reconnects automatically if the cached connection becomes stale.
|
||||
"""
|
||||
_cached_client: list[Redis | None] = [None]
|
||||
# Keep a reference to the Kombu Connection so we can close it on
|
||||
# reconnect (the raw Redis client outlives the Kombu wrapper).
|
||||
_cached_kombu_conn: list[Any] = [None]
|
||||
|
||||
def _close_client(client: Redis) -> None:
|
||||
"""Best-effort close of a Redis client."""
|
||||
try:
|
||||
client.close()
|
||||
except Exception:
|
||||
logger.debug("Failed to close stale Redis client", exc_info=True)
|
||||
|
||||
def _close_kombu_conn() -> None:
|
||||
"""Best-effort close of the cached Kombu Connection."""
|
||||
conn = _cached_kombu_conn[0]
|
||||
if conn is not None:
|
||||
try:
|
||||
conn.close()
|
||||
except Exception:
|
||||
logger.debug("Failed to close Kombu connection", exc_info=True)
|
||||
_cached_kombu_conn[0] = None
|
||||
|
||||
def _get_broker_redis() -> Redis:
|
||||
client = _cached_client[0]
|
||||
if client is not None:
|
||||
try:
|
||||
client.ping()
|
||||
return client
|
||||
except Exception:
|
||||
logger.debug("Cached Redis client stale, reconnecting")
|
||||
_close_client(client)
|
||||
_cached_client[0] = None
|
||||
_close_kombu_conn()
|
||||
|
||||
# Get a fresh Redis client from the broker connection.
|
||||
# We hold this client long-term (cached above) rather than using a
|
||||
# context manager, because we need it to persist across scrapes.
|
||||
# The caching logic above ensures we only ever hold one connection,
|
||||
# and we close it explicitly on reconnect.
|
||||
conn = celery_app.broker_connection()
|
||||
# kombu's Channel exposes .client at runtime (the underlying Redis
|
||||
# client) but the type stubs don't declare it.
|
||||
new_client: Redis = conn.channel().client # type: ignore[attr-defined]
|
||||
_cached_client[0] = new_client
|
||||
_cached_kombu_conn[0] = conn
|
||||
return new_client
|
||||
|
||||
return _get_broker_redis
|
||||
|
||||
|
||||
def setup_indexing_pipeline_metrics(celery_app: Celery) -> None:
|
||||
"""Register all indexing pipeline collectors with the default registry.
|
||||
|
||||
Args:
|
||||
celery_app: The Celery application instance. Used to obtain a
|
||||
celery_app: The Celery application instance. Used to obtain a fresh
|
||||
broker Redis client on each scrape for queue depth metrics.
|
||||
"""
|
||||
_queue_collector.set_celery_app(celery_app)
|
||||
_redis_health_collector.set_celery_app(celery_app)
|
||||
redis_factory = _make_broker_redis_factory(celery_app)
|
||||
_queue_collector.set_redis_factory(redis_factory)
|
||||
_redis_health_collector.set_redis_factory(redis_factory)
|
||||
|
||||
# Start the heartbeat monitor daemon thread — uses a single persistent
|
||||
# connection to receive worker-heartbeat events.
|
||||
|
||||
@@ -9,8 +9,8 @@ def mime_type_to_chat_file_type(mime_type: str | None) -> ChatFileType:
|
||||
if mime_type in OnyxMimeTypes.IMAGE_MIME_TYPES:
|
||||
return ChatFileType.IMAGE
|
||||
|
||||
if mime_type in OnyxMimeTypes.CSV_MIME_TYPES:
|
||||
return ChatFileType.CSV
|
||||
if mime_type in OnyxMimeTypes.TABULAR_MIME_TYPES:
|
||||
return ChatFileType.TABULAR
|
||||
|
||||
if mime_type in OnyxMimeTypes.DOCUMENT_MIME_TYPES:
|
||||
return ChatFileType.DOC
|
||||
|
||||
@@ -169,10 +169,10 @@ class FileReaderTool(Tool[FileReaderToolOverrideKwargs]):
|
||||
|
||||
chat_file = self._load_file(file_id)
|
||||
|
||||
# Only PLAIN_TEXT and CSV are guaranteed to contain actual text bytes.
|
||||
# Only PLAIN_TEXT and TABULAR are guaranteed to contain actual text bytes.
|
||||
# DOC type in a loaded file means plaintext extraction failed and the
|
||||
# content is the original binary (e.g. raw PDF/DOCX bytes).
|
||||
if chat_file.file_type not in (ChatFileType.PLAIN_TEXT, ChatFileType.CSV):
|
||||
if chat_file.file_type not in (ChatFileType.PLAIN_TEXT, ChatFileType.TABULAR):
|
||||
raise ToolCallException(
|
||||
message=f"File {file_id} is not a text file (type={chat_file.file_type})",
|
||||
llm_facing_message=(
|
||||
|
||||
@@ -263,7 +263,7 @@ oauthlib==3.2.2
|
||||
# via
|
||||
# kubernetes
|
||||
# requests-oauthlib
|
||||
onyx-devtools==0.7.2
|
||||
onyx-devtools==0.7.1
|
||||
# via onyx
|
||||
openai==2.14.0
|
||||
# via
|
||||
|
||||
@@ -129,10 +129,6 @@ def _patch_task_app(task: Any, mock_app: MagicMock) -> Generator[None, None, Non
|
||||
return_value=mock_app,
|
||||
),
|
||||
patch(_PATCH_QUEUE_DEPTH, return_value=0),
|
||||
patch(
|
||||
"onyx.background.celery.tasks.user_file_processing.tasks.celery_get_broker_client",
|
||||
return_value=MagicMock(),
|
||||
),
|
||||
):
|
||||
yield
|
||||
|
||||
|
||||
@@ -88,22 +88,10 @@ def _patch_task_app(task: Any, mock_app: MagicMock) -> Generator[None, None, Non
|
||||
the actual task instance. We patch ``app`` on that instance's class
|
||||
(a unique Celery-generated Task subclass) so the mock is scoped to this
|
||||
task only.
|
||||
|
||||
Also patches ``celery_get_broker_client`` so the mock app doesn't need
|
||||
a real broker URL.
|
||||
"""
|
||||
task_instance = task.run.__self__
|
||||
with (
|
||||
patch.object(
|
||||
type(task_instance),
|
||||
"app",
|
||||
new_callable=PropertyMock,
|
||||
return_value=mock_app,
|
||||
),
|
||||
patch(
|
||||
"onyx.background.celery.tasks.user_file_processing.tasks.celery_get_broker_client",
|
||||
return_value=MagicMock(),
|
||||
),
|
||||
with patch.object(
|
||||
type(task_instance), "app", new_callable=PropertyMock, return_value=mock_app
|
||||
):
|
||||
yield
|
||||
|
||||
|
||||
@@ -90,17 +90,8 @@ def _patch_task_app(task: Any, mock_app: MagicMock) -> Generator[None, None, Non
|
||||
task only.
|
||||
"""
|
||||
task_instance = task.run.__self__
|
||||
with (
|
||||
patch.object(
|
||||
type(task_instance),
|
||||
"app",
|
||||
new_callable=PropertyMock,
|
||||
return_value=mock_app,
|
||||
),
|
||||
patch(
|
||||
"onyx.background.celery.tasks.user_file_processing.tasks.celery_get_broker_client",
|
||||
return_value=MagicMock(),
|
||||
),
|
||||
with patch.object(
|
||||
type(task_instance), "app", new_callable=PropertyMock, return_value=mock_app
|
||||
):
|
||||
yield
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@ These tests assume Vespa and OpenSearch are running.
|
||||
import time
|
||||
import uuid
|
||||
from collections.abc import Generator
|
||||
from collections.abc import Iterator
|
||||
|
||||
import httpx
|
||||
import pytest
|
||||
@@ -22,7 +21,6 @@ from onyx.document_index.opensearch.opensearch_document_index import (
|
||||
)
|
||||
from onyx.document_index.vespa.index import VespaIndex
|
||||
from onyx.document_index.vespa.vespa_document_index import VespaDocumentIndex
|
||||
from onyx.indexing.models import DocMetadataAwareIndexChunk
|
||||
from tests.external_dependency_unit.constants import TEST_TENANT_ID
|
||||
from tests.external_dependency_unit.document_index.conftest import EMBEDDING_DIM
|
||||
from tests.external_dependency_unit.document_index.conftest import make_chunk
|
||||
@@ -203,25 +201,3 @@ class TestDocumentIndexNew:
|
||||
assert len(result_map) == 2
|
||||
assert result_map[existing_doc] is True
|
||||
assert result_map[new_doc] is False
|
||||
|
||||
def test_index_accepts_generator(
|
||||
self,
|
||||
document_indices: list[DocumentIndexNew],
|
||||
tenant_context: None, # noqa: ARG002
|
||||
) -> None:
|
||||
"""index() accepts a generator (any iterable), not just a list."""
|
||||
for document_index in document_indices:
|
||||
doc_id = f"test_gen_{uuid.uuid4().hex[:8]}"
|
||||
metadata = make_indexing_metadata([doc_id], old_counts=[0], new_counts=[3])
|
||||
|
||||
def chunk_gen() -> Iterator[DocMetadataAwareIndexChunk]:
|
||||
for i in range(3):
|
||||
yield make_chunk(doc_id, chunk_id=i)
|
||||
|
||||
results = document_index.index(
|
||||
chunks=chunk_gen(), indexing_metadata=metadata
|
||||
)
|
||||
|
||||
assert len(results) == 1
|
||||
assert results[0].document_id == doc_id
|
||||
assert results[0].already_existed is False
|
||||
|
||||
@@ -5,7 +5,6 @@ These tests assume Vespa and OpenSearch are running.
|
||||
|
||||
import time
|
||||
from collections.abc import Generator
|
||||
from collections.abc import Iterator
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -167,29 +166,3 @@ class TestDocumentIndexOld:
|
||||
batch_retrieval=True,
|
||||
)
|
||||
assert len(inference_chunks) == 0
|
||||
|
||||
def test_index_accepts_generator(
|
||||
self,
|
||||
document_indices: list[DocumentIndex],
|
||||
tenant_context: None, # noqa: ARG002
|
||||
) -> None:
|
||||
"""index() accepts a generator (any iterable), not just a list."""
|
||||
for document_index in document_indices:
|
||||
|
||||
def chunk_gen() -> Iterator[DocMetadataAwareIndexChunk]:
|
||||
for i in range(3):
|
||||
yield make_chunk("test_doc_gen", chunk_id=i)
|
||||
|
||||
index_batch_params = IndexBatchParams(
|
||||
doc_id_to_previous_chunk_cnt={"test_doc_gen": 0},
|
||||
doc_id_to_new_chunk_cnt={"test_doc_gen": 3},
|
||||
tenant_id=get_current_tenant_id(),
|
||||
large_chunks_enabled=False,
|
||||
)
|
||||
|
||||
results = document_index.index(chunk_gen(), index_batch_params)
|
||||
|
||||
assert len(results) == 1
|
||||
record = results.pop()
|
||||
assert record.document_id == "test_doc_gen"
|
||||
assert record.already_existed is False
|
||||
|
||||
@@ -1175,7 +1175,7 @@ def test_code_interpreter_receives_chat_files(
|
||||
|
||||
file_descriptor: FileDescriptor = {
|
||||
"id": user_file.file_id,
|
||||
"type": ChatFileType.CSV,
|
||||
"type": ChatFileType.TABULAR,
|
||||
"name": "data.csv",
|
||||
"user_file_id": str(user_file.id),
|
||||
}
|
||||
|
||||
@@ -1,87 +0,0 @@
|
||||
"""Tests for celery_get_broker_client singleton."""
|
||||
|
||||
from collections.abc import Iterator
|
||||
from unittest.mock import MagicMock
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from onyx.background.celery import celery_redis
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_singleton() -> Iterator[None]:
|
||||
"""Reset the module-level singleton between tests."""
|
||||
celery_redis._broker_client = None
|
||||
celery_redis._broker_url = None
|
||||
yield
|
||||
celery_redis._broker_client = None
|
||||
celery_redis._broker_url = None
|
||||
|
||||
|
||||
def _make_mock_app(broker_url: str = "redis://localhost:6379/15") -> MagicMock:
|
||||
app = MagicMock()
|
||||
app.conf.broker_url = broker_url
|
||||
return app
|
||||
|
||||
|
||||
class TestCeleryGetBrokerClient:
|
||||
@patch("onyx.background.celery.celery_redis.Redis")
|
||||
def test_creates_client_on_first_call(self, mock_redis_cls: MagicMock) -> None:
|
||||
mock_client = MagicMock()
|
||||
mock_redis_cls.from_url.return_value = mock_client
|
||||
|
||||
app = _make_mock_app()
|
||||
result = celery_redis.celery_get_broker_client(app)
|
||||
|
||||
assert result is mock_client
|
||||
call_args = mock_redis_cls.from_url.call_args
|
||||
assert call_args[0][0] == "redis://localhost:6379/15"
|
||||
assert call_args[1]["decode_responses"] is False
|
||||
assert call_args[1]["socket_keepalive"] is True
|
||||
assert call_args[1]["retry_on_timeout"] is True
|
||||
|
||||
@patch("onyx.background.celery.celery_redis.Redis")
|
||||
def test_reuses_cached_client(self, mock_redis_cls: MagicMock) -> None:
|
||||
mock_client = MagicMock()
|
||||
mock_client.ping.return_value = True
|
||||
mock_redis_cls.from_url.return_value = mock_client
|
||||
|
||||
app = _make_mock_app()
|
||||
client1 = celery_redis.celery_get_broker_client(app)
|
||||
client2 = celery_redis.celery_get_broker_client(app)
|
||||
|
||||
assert client1 is client2
|
||||
# from_url called only once
|
||||
assert mock_redis_cls.from_url.call_count == 1
|
||||
|
||||
@patch("onyx.background.celery.celery_redis.Redis")
|
||||
def test_reconnects_on_ping_failure(self, mock_redis_cls: MagicMock) -> None:
|
||||
stale_client = MagicMock()
|
||||
stale_client.ping.side_effect = ConnectionError("disconnected")
|
||||
|
||||
fresh_client = MagicMock()
|
||||
fresh_client.ping.return_value = True
|
||||
|
||||
mock_redis_cls.from_url.side_effect = [stale_client, fresh_client]
|
||||
|
||||
app = _make_mock_app()
|
||||
|
||||
# First call creates stale_client
|
||||
client1 = celery_redis.celery_get_broker_client(app)
|
||||
assert client1 is stale_client
|
||||
|
||||
# Second call: ping fails, creates fresh_client
|
||||
client2 = celery_redis.celery_get_broker_client(app)
|
||||
assert client2 is fresh_client
|
||||
assert mock_redis_cls.from_url.call_count == 2
|
||||
|
||||
@patch("onyx.background.celery.celery_redis.Redis")
|
||||
def test_uses_broker_url_from_app_config(self, mock_redis_cls: MagicMock) -> None:
|
||||
mock_redis_cls.from_url.return_value = MagicMock()
|
||||
|
||||
app = _make_mock_app("redis://custom-host:6380/3")
|
||||
celery_redis.celery_get_broker_client(app)
|
||||
|
||||
call_args = mock_redis_cls.from_url.call_args
|
||||
assert call_args[0][0] == "redis://custom-host:6380/3"
|
||||
@@ -139,7 +139,7 @@ def test_csv_file_type() -> None:
|
||||
result = _extract_referenced_file_descriptors([tool_call], message)
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]["type"] == ChatFileType.CSV
|
||||
assert result[0]["type"] == ChatFileType.TABULAR
|
||||
|
||||
|
||||
def test_unknown_extension_defaults_to_plain_text() -> None:
|
||||
|
||||
@@ -82,7 +82,7 @@ class TestChatFileConversion:
|
||||
ChatLoadedFile(
|
||||
file_id="file-2",
|
||||
content=b"csv,data\n1,2",
|
||||
file_type=ChatFileType.CSV,
|
||||
file_type=ChatFileType.TABULAR,
|
||||
filename="data.csv",
|
||||
content_text="csv,data\n1,2",
|
||||
token_count=5,
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""Tests for indexing pipeline Prometheus collectors."""
|
||||
|
||||
from collections.abc import Iterator
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
from datetime import timezone
|
||||
@@ -14,16 +13,6 @@ from onyx.server.metrics.indexing_pipeline import IndexAttemptCollector
|
||||
from onyx.server.metrics.indexing_pipeline import QueueDepthCollector
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def _mock_broker_client() -> Iterator[None]:
|
||||
"""Patch celery_get_broker_client for all collector tests."""
|
||||
with patch(
|
||||
"onyx.background.celery.celery_redis.celery_get_broker_client",
|
||||
return_value=MagicMock(),
|
||||
):
|
||||
yield
|
||||
|
||||
|
||||
class TestQueueDepthCollector:
|
||||
def test_returns_empty_when_factory_not_set(self) -> None:
|
||||
collector = QueueDepthCollector()
|
||||
@@ -35,7 +24,8 @@ class TestQueueDepthCollector:
|
||||
|
||||
def test_collects_queue_depths(self) -> None:
|
||||
collector = QueueDepthCollector(cache_ttl=0)
|
||||
collector.set_celery_app(MagicMock())
|
||||
mock_redis = MagicMock()
|
||||
collector.set_redis_factory(lambda: mock_redis)
|
||||
|
||||
with (
|
||||
patch(
|
||||
@@ -70,8 +60,8 @@ class TestQueueDepthCollector:
|
||||
|
||||
def test_handles_redis_error_gracefully(self) -> None:
|
||||
collector = QueueDepthCollector(cache_ttl=0)
|
||||
MagicMock()
|
||||
collector.set_celery_app(MagicMock())
|
||||
mock_redis = MagicMock()
|
||||
collector.set_redis_factory(lambda: mock_redis)
|
||||
|
||||
with patch(
|
||||
"onyx.server.metrics.indexing_pipeline.celery_get_queue_length",
|
||||
@@ -84,8 +74,8 @@ class TestQueueDepthCollector:
|
||||
|
||||
def test_caching_returns_stale_within_ttl(self) -> None:
|
||||
collector = QueueDepthCollector(cache_ttl=60)
|
||||
MagicMock()
|
||||
collector.set_celery_app(MagicMock())
|
||||
mock_redis = MagicMock()
|
||||
collector.set_redis_factory(lambda: mock_redis)
|
||||
|
||||
with (
|
||||
patch(
|
||||
@@ -108,10 +98,31 @@ class TestQueueDepthCollector:
|
||||
|
||||
assert first is second # Same object, from cache
|
||||
|
||||
def test_factory_called_each_scrape(self) -> None:
|
||||
"""Verify the Redis factory is called on each fresh collect, not cached."""
|
||||
collector = QueueDepthCollector(cache_ttl=0)
|
||||
factory = MagicMock(return_value=MagicMock())
|
||||
collector.set_redis_factory(factory)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"onyx.server.metrics.indexing_pipeline.celery_get_queue_length",
|
||||
return_value=0,
|
||||
),
|
||||
patch(
|
||||
"onyx.server.metrics.indexing_pipeline.celery_get_unacked_task_ids",
|
||||
return_value=set(),
|
||||
),
|
||||
):
|
||||
collector.collect()
|
||||
collector.collect()
|
||||
|
||||
assert factory.call_count == 2
|
||||
|
||||
def test_error_returns_stale_cache(self) -> None:
|
||||
collector = QueueDepthCollector(cache_ttl=0)
|
||||
MagicMock()
|
||||
collector.set_celery_app(MagicMock())
|
||||
mock_redis = MagicMock()
|
||||
collector.set_redis_factory(lambda: mock_redis)
|
||||
|
||||
# First call succeeds
|
||||
with (
|
||||
|
||||
@@ -1,22 +1,96 @@
|
||||
"""Tests for indexing pipeline setup."""
|
||||
"""Tests for indexing pipeline setup (Redis factory caching)."""
|
||||
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from onyx.server.metrics.indexing_pipeline import QueueDepthCollector
|
||||
from onyx.server.metrics.indexing_pipeline import RedisHealthCollector
|
||||
from onyx.server.metrics.indexing_pipeline_setup import _make_broker_redis_factory
|
||||
|
||||
|
||||
class TestCollectorCeleryAppSetup:
|
||||
def test_queue_depth_collector_uses_celery_app(self) -> None:
|
||||
"""QueueDepthCollector.set_celery_app stores the app for broker access."""
|
||||
collector = QueueDepthCollector()
|
||||
mock_app = MagicMock()
|
||||
collector.set_celery_app(mock_app)
|
||||
assert collector._celery_app is mock_app
|
||||
def _make_mock_app(client: MagicMock) -> MagicMock:
|
||||
"""Create a mock Celery app whose broker_connection().channel().client
|
||||
returns the given client."""
|
||||
mock_app = MagicMock()
|
||||
mock_conn = MagicMock()
|
||||
mock_conn.channel.return_value.client = client
|
||||
|
||||
def test_redis_health_collector_uses_celery_app(self) -> None:
|
||||
"""RedisHealthCollector.set_celery_app stores the app for broker access."""
|
||||
collector = RedisHealthCollector()
|
||||
mock_app = MagicMock()
|
||||
collector.set_celery_app(mock_app)
|
||||
assert collector._celery_app is mock_app
|
||||
mock_app.broker_connection.return_value = mock_conn
|
||||
|
||||
return mock_app
|
||||
|
||||
|
||||
class TestMakeBrokerRedisFactory:
|
||||
def test_caches_redis_client_across_calls(self) -> None:
|
||||
"""Factory should reuse the same client on subsequent calls."""
|
||||
mock_client = MagicMock()
|
||||
mock_client.ping.return_value = True
|
||||
mock_app = _make_mock_app(mock_client)
|
||||
|
||||
factory = _make_broker_redis_factory(mock_app)
|
||||
|
||||
client1 = factory()
|
||||
client2 = factory()
|
||||
|
||||
assert client1 is client2
|
||||
# broker_connection should only be called once
|
||||
assert mock_app.broker_connection.call_count == 1
|
||||
|
||||
def test_reconnects_when_ping_fails(self) -> None:
|
||||
"""Factory should create a new client if ping fails (stale connection)."""
|
||||
mock_client_stale = MagicMock()
|
||||
mock_client_stale.ping.side_effect = ConnectionError("disconnected")
|
||||
|
||||
mock_client_fresh = MagicMock()
|
||||
mock_client_fresh.ping.return_value = True
|
||||
|
||||
mock_app = _make_mock_app(mock_client_stale)
|
||||
|
||||
factory = _make_broker_redis_factory(mock_app)
|
||||
|
||||
# First call — creates and caches
|
||||
client1 = factory()
|
||||
assert client1 is mock_client_stale
|
||||
assert mock_app.broker_connection.call_count == 1
|
||||
|
||||
# Switch to fresh client for next connection
|
||||
mock_conn_fresh = MagicMock()
|
||||
mock_conn_fresh.channel.return_value.client = mock_client_fresh
|
||||
mock_app.broker_connection.return_value = mock_conn_fresh
|
||||
|
||||
# Second call — ping fails on stale, reconnects
|
||||
client2 = factory()
|
||||
assert client2 is mock_client_fresh
|
||||
assert mock_app.broker_connection.call_count == 2
|
||||
|
||||
def test_reconnect_closes_stale_client(self) -> None:
|
||||
"""When ping fails, the old client should be closed before reconnecting."""
|
||||
mock_client_stale = MagicMock()
|
||||
mock_client_stale.ping.side_effect = ConnectionError("disconnected")
|
||||
|
||||
mock_client_fresh = MagicMock()
|
||||
mock_client_fresh.ping.return_value = True
|
||||
|
||||
mock_app = _make_mock_app(mock_client_stale)
|
||||
|
||||
factory = _make_broker_redis_factory(mock_app)
|
||||
|
||||
# First call — creates and caches
|
||||
factory()
|
||||
|
||||
# Switch to fresh client
|
||||
mock_conn_fresh = MagicMock()
|
||||
mock_conn_fresh.channel.return_value.client = mock_client_fresh
|
||||
mock_app.broker_connection.return_value = mock_conn_fresh
|
||||
|
||||
# Second call — ping fails, should close stale client
|
||||
factory()
|
||||
mock_client_stale.close.assert_called_once()
|
||||
|
||||
def test_first_call_creates_connection(self) -> None:
|
||||
"""First call should always create a new connection."""
|
||||
mock_client = MagicMock()
|
||||
mock_app = _make_mock_app(mock_client)
|
||||
|
||||
factory = _make_broker_redis_factory(mock_app)
|
||||
client = factory()
|
||||
|
||||
assert client is mock_client
|
||||
mock_app.broker_connection.assert_called_once()
|
||||
|
||||
@@ -5,7 +5,7 @@ home: https://www.onyx.app/
|
||||
sources:
|
||||
- "https://github.com/onyx-dot-app/onyx"
|
||||
type: application
|
||||
version: 0.4.38
|
||||
version: 0.4.37
|
||||
appVersion: latest
|
||||
annotations:
|
||||
category: Productivity
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
{{- /* Metrics port must match the default in metrics_server.py (_DEFAULT_PORTS).
|
||||
Do NOT use PROMETHEUS_METRICS_PORT env var in Helm — each worker needs its own port. */ -}}
|
||||
{{- if and .Values.vectorDB.enabled (gt (int .Values.celery_worker_docfetching.replicaCount) 0) }}
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: {{ include "onyx.fullname" . }}-celery-worker-docfetching-metrics
|
||||
labels:
|
||||
{{- include "onyx.labels" . | nindent 4 }}
|
||||
{{- if .Values.celery_worker_docfetching.deploymentLabels }}
|
||||
{{- toYaml .Values.celery_worker_docfetching.deploymentLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
metrics: "true"
|
||||
spec:
|
||||
type: ClusterIP
|
||||
ports:
|
||||
- port: 9092
|
||||
targetPort: metrics
|
||||
protocol: TCP
|
||||
name: metrics
|
||||
selector:
|
||||
{{- include "onyx.selectorLabels" . | nindent 4 }}
|
||||
{{- if .Values.celery_worker_docfetching.deploymentLabels }}
|
||||
{{- toYaml .Values.celery_worker_docfetching.deploymentLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
@@ -73,10 +73,6 @@ spec:
|
||||
"-Q",
|
||||
"connector_doc_fetching",
|
||||
]
|
||||
ports:
|
||||
- name: metrics
|
||||
containerPort: 9092
|
||||
protocol: TCP
|
||||
resources:
|
||||
{{- toYaml .Values.celery_worker_docfetching.resources | nindent 12 }}
|
||||
envFrom:
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
{{- /* Metrics port must match the default in metrics_server.py (_DEFAULT_PORTS).
|
||||
Do NOT use PROMETHEUS_METRICS_PORT env var in Helm — each worker needs its own port. */ -}}
|
||||
{{- if and .Values.vectorDB.enabled (gt (int .Values.celery_worker_docprocessing.replicaCount) 0) }}
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: {{ include "onyx.fullname" . }}-celery-worker-docprocessing-metrics
|
||||
labels:
|
||||
{{- include "onyx.labels" . | nindent 4 }}
|
||||
{{- if .Values.celery_worker_docprocessing.deploymentLabels }}
|
||||
{{- toYaml .Values.celery_worker_docprocessing.deploymentLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
metrics: "true"
|
||||
spec:
|
||||
type: ClusterIP
|
||||
ports:
|
||||
- port: 9093
|
||||
targetPort: metrics
|
||||
protocol: TCP
|
||||
name: metrics
|
||||
selector:
|
||||
{{- include "onyx.selectorLabels" . | nindent 4 }}
|
||||
{{- if .Values.celery_worker_docprocessing.deploymentLabels }}
|
||||
{{- toYaml .Values.celery_worker_docprocessing.deploymentLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
@@ -73,10 +73,6 @@ spec:
|
||||
"-Q",
|
||||
"docprocessing",
|
||||
]
|
||||
ports:
|
||||
- name: metrics
|
||||
containerPort: 9093
|
||||
protocol: TCP
|
||||
resources:
|
||||
{{- toYaml .Values.celery_worker_docprocessing.resources | nindent 12 }}
|
||||
envFrom:
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
{{- /* Metrics port must match the default in metrics_server.py (_DEFAULT_PORTS).
|
||||
Do NOT use PROMETHEUS_METRICS_PORT env var in Helm — each worker needs its own port. */ -}}
|
||||
{{- if and .Values.vectorDB.enabled (gt (int .Values.celery_worker_monitoring.replicaCount) 0) }}
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: {{ include "onyx.fullname" . }}-celery-worker-monitoring-metrics
|
||||
labels:
|
||||
{{- include "onyx.labels" . | nindent 4 }}
|
||||
{{- if .Values.celery_worker_monitoring.deploymentLabels }}
|
||||
{{- toYaml .Values.celery_worker_monitoring.deploymentLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
metrics: "true"
|
||||
spec:
|
||||
type: ClusterIP
|
||||
ports:
|
||||
- port: 9096
|
||||
targetPort: metrics
|
||||
protocol: TCP
|
||||
name: metrics
|
||||
selector:
|
||||
{{- include "onyx.selectorLabels" . | nindent 4 }}
|
||||
{{- if .Values.celery_worker_monitoring.deploymentLabels }}
|
||||
{{- toYaml .Values.celery_worker_monitoring.deploymentLabels | nindent 4 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
@@ -70,10 +70,6 @@ spec:
|
||||
"-Q",
|
||||
"monitoring",
|
||||
]
|
||||
ports:
|
||||
- name: metrics
|
||||
containerPort: 9096
|
||||
protocol: TCP
|
||||
resources:
|
||||
{{- toYaml .Values.celery_worker_monitoring.resources | nindent 12 }}
|
||||
envFrom:
|
||||
|
||||
@@ -144,7 +144,7 @@ dev = [
|
||||
"matplotlib==3.10.8",
|
||||
"mypy-extensions==1.0.0",
|
||||
"mypy==1.13.0",
|
||||
"onyx-devtools==0.7.2",
|
||||
"onyx-devtools==0.7.1",
|
||||
"openapi-generator-cli==7.17.0",
|
||||
"pandas-stubs~=2.3.3",
|
||||
"pre-commit==3.2.2",
|
||||
|
||||
16
uv.lock
generated
16
uv.lock
generated
@@ -4458,7 +4458,7 @@ requires-dist = [
|
||||
{ name = "numpy", marker = "extra == 'model-server'", specifier = "==2.4.1" },
|
||||
{ name = "oauthlib", marker = "extra == 'backend'", specifier = "==3.2.2" },
|
||||
{ name = "office365-rest-python-client", marker = "extra == 'backend'", specifier = "==2.6.2" },
|
||||
{ name = "onyx-devtools", marker = "extra == 'dev'", specifier = "==0.7.2" },
|
||||
{ name = "onyx-devtools", marker = "extra == 'dev'", specifier = "==0.7.1" },
|
||||
{ name = "openai", specifier = "==2.14.0" },
|
||||
{ name = "openapi-generator-cli", marker = "extra == 'dev'", specifier = "==7.17.0" },
|
||||
{ name = "openinference-instrumentation", marker = "extra == 'backend'", specifier = "==0.1.42" },
|
||||
@@ -4563,19 +4563,19 @@ requires-dist = [{ name = "onyx", extras = ["backend", "dev", "ee"], editable =
|
||||
|
||||
[[package]]
|
||||
name = "onyx-devtools"
|
||||
version = "0.7.2"
|
||||
version = "0.7.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "fastapi" },
|
||||
{ name = "openapi-generator-cli" },
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/22/b0/765ed49157470e8ccc8ab89e6a896ade50cde3aa2a494662ad4db92a48c4/onyx_devtools-0.7.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:553a2b5e61b29b7913c991c8d5aed78f930f0f81a0f42229c6a8de2b1e8ff57e", size = 4203859, upload-time = "2026-03-27T15:09:49.63Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/9d/bba0a44a16d2fc27e5441aaf10727e10514e7a49bce70eca02bced566eb9/onyx_devtools-0.7.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5cf0782dca8b3d861de9e18e65e990cfce5161cd559df44d8fabd3fefd54fdcd", size = 3879750, upload-time = "2026-03-27T15:09:42.413Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4d/d8/c5725e8af14c74fe0aeed29e4746400bb3c0a078fd1240df729dc6432b84/onyx_devtools-0.7.2-py3-none-manylinux_2_17_aarch64.whl", hash = "sha256:9a0d67373e16b4fbb38a5290c0d9dfd4cfa837e5da0c165b32841b9d37f7455b", size = 3743529, upload-time = "2026-03-27T15:09:44.546Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/82/b7c398a21dbc3e14fd7a29e49caa86b1bc0f8d7c75c051514785441ab779/onyx_devtools-0.7.2-py3-none-manylinux_2_17_x86_64.whl", hash = "sha256:794af14b2de575d0ae41b94551399eca8f8ba9b950c5db7acb7612767fd228f9", size = 4166562, upload-time = "2026-03-27T15:09:49.471Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/26/76/be129e2baafc91fe792d919b1f4d73fc943ba9c2b728a60f1fb98e0c115a/onyx_devtools-0.7.2-py3-none-win_amd64.whl", hash = "sha256:83b3eb84df58d865e4f714222a5fab3ea464836e2c8690569454a940bbb651ff", size = 4282270, upload-time = "2026-03-27T15:09:44.676Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/72/29b8c8dbcf069c56475f00511f04c4aaa5ba3faba1dfc8276107d4b3ef7f/onyx_devtools-0.7.2-py3-none-win_arm64.whl", hash = "sha256:62f0836624ee6a5b31e64fd93162e7fce142ac8a4f959607e411824bc2b88174", size = 3823053, upload-time = "2026-03-27T15:09:43.546Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/9d/74bcd02583706bdf90c8ac9084eb60bd71d0671392152410ab21b7b68ea1/onyx_devtools-0.7.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:178385dce0b413fd2a1f761055a99f556ec536ef5c32963fc273e751813621eb", size = 4007974, upload-time = "2026-03-17T21:10:39.267Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/f8/d8ddb32120428c083c60eb07244479da6e07eaebd31847658a049ab33815/onyx_devtools-0.7.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7960ae6e440ebf1584e02d9e1d0c9ef543b1d54c2584cdcace15695aec3121b2", size = 3696924, upload-time = "2026-03-17T21:10:50.716Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/87/21/1e427280066db42ff9dd5f34c70b9dca5d9781f96d0d9a88aaa454fdb432/onyx_devtools-0.7.1-py3-none-manylinux_2_17_aarch64.whl", hash = "sha256:6785dda88ca0a3d8464a9bfab76a253ed90da89d53a9c4a67227980f37df1ccf", size = 3568300, upload-time = "2026-03-17T21:10:41.997Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/0e/afbbe1164b3d016ddb5352353cb2541eef5a8b2c04e8f02d5d1319cb8b8c/onyx_devtools-0.7.1-py3-none-manylinux_2_17_x86_64.whl", hash = "sha256:9e77f2b725c0c00061a3dda5eba199404b51638cec0bf54fc7611fee1f26db34", size = 3974668, upload-time = "2026-03-17T21:10:43.879Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/a5/22840643289ef4ca83931b7a79fba8f1db7e626b4b870d4b4f8206c4ff5f/onyx_devtools-0.7.1-py3-none-win_amd64.whl", hash = "sha256:de37daa0e4db9b5dccf94408a3422be4f821e380ab70081bd1032cec1e3c91e6", size = 4078640, upload-time = "2026-03-17T21:10:40.275Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/c1/a0295506a521d9942b0f55523781a113e4555420d800a386d5a2eb46a7ad/onyx_devtools-0.7.1-py3-none-win_arm64.whl", hash = "sha256:ab88c53ebda6dff27350316b4ac9bd5f258cd586c2109971a9d976411e1e22ea", size = 3636787, upload-time = "2026-03-17T21:10:37.492Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@@ -73,15 +73,6 @@ interface ContentMdProps {
|
||||
/** When `true`, the title color hooks into `Interactive`'s `--interactive-foreground` variable. */
|
||||
withInteractive?: boolean;
|
||||
|
||||
/** Optional class name applied to the title element. */
|
||||
titleClassName?: string;
|
||||
|
||||
/** Optional class name applied to the icon element. */
|
||||
iconClassName?: string;
|
||||
|
||||
/** Content rendered below the description, indented to align with it. */
|
||||
bottomChildren?: React.ReactNode;
|
||||
|
||||
/** Ref forwarded to the root `<div>`. */
|
||||
ref?: React.Ref<HTMLDivElement>;
|
||||
}
|
||||
@@ -155,9 +146,6 @@ function ContentMd({
|
||||
tag,
|
||||
sizePreset = "main-ui",
|
||||
withInteractive,
|
||||
titleClassName,
|
||||
iconClassName,
|
||||
bottomChildren,
|
||||
ref,
|
||||
}: ContentMdProps) {
|
||||
const [editing, setEditing] = useState(false);
|
||||
@@ -196,11 +184,7 @@ function ContentMd({
|
||||
style={{ minHeight: config.lineHeight }}
|
||||
>
|
||||
<Icon
|
||||
className={cn(
|
||||
"opal-content-md-icon",
|
||||
config.iconColorClass,
|
||||
iconClassName
|
||||
)}
|
||||
className={cn("opal-content-md-icon", config.iconColorClass)}
|
||||
style={{ width: config.iconSize, height: config.iconSize }}
|
||||
/>
|
||||
</div>
|
||||
@@ -243,8 +227,7 @@ function ContentMd({
|
||||
"opal-content-md-title",
|
||||
config.titleFont,
|
||||
"text-text-04",
|
||||
editable && "cursor-pointer",
|
||||
titleClassName
|
||||
editable && "cursor-pointer"
|
||||
)}
|
||||
title={toPlainString(title)}
|
||||
onClick={editable ? startEditing : undefined}
|
||||
@@ -312,13 +295,6 @@ function ContentMd({
|
||||
{resolveStr(description)}
|
||||
</div>
|
||||
)}
|
||||
{bottomChildren && (
|
||||
<div
|
||||
style={Icon ? { paddingLeft: config.descriptionIndent } : undefined}
|
||||
>
|
||||
{bottomChildren}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -138,12 +138,6 @@ type MdContentProps = ContentBaseProps & {
|
||||
auxIcon?: "info-gray" | "info-blue" | "warning" | "error";
|
||||
/** Tag rendered beside the title. */
|
||||
tag?: TagProps;
|
||||
/** Optional class name applied to the title element. */
|
||||
titleClassName?: string;
|
||||
/** Optional class name applied to the icon element. */
|
||||
iconClassName?: string;
|
||||
/** Content rendered below the description, indented to align with it. */
|
||||
bottomChildren?: React.ReactNode;
|
||||
};
|
||||
|
||||
/** ContentSm does not support descriptions or inline editing. */
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
import { useState, useMemo, useEffect } from "react";
|
||||
import useSWR from "swr";
|
||||
import Text from "@/refresh-components/texts/Text";
|
||||
import { Select } from "@/refresh-components/cards";
|
||||
import { useCreateModal } from "@/refresh-components/contexts/ModalContext";
|
||||
import { toast } from "@/hooks/useToast";
|
||||
@@ -23,9 +24,8 @@ import { ProviderIcon } from "@/app/admin/configuration/llm/ProviderIcon";
|
||||
import Message from "@/refresh-components/messages/Message";
|
||||
import ConfirmationModalLayout from "@/refresh-components/layouts/ConfirmationModalLayout";
|
||||
import InputSelect from "@/refresh-components/inputs/InputSelect";
|
||||
import { Button, Text } from "@opal/components";
|
||||
import { Button } from "@opal/components";
|
||||
import { SvgSlash, SvgUnplug } from "@opal/icons";
|
||||
import { markdown } from "@opal/utils";
|
||||
|
||||
const NO_DEFAULT_VALUE = "__none__";
|
||||
|
||||
@@ -201,10 +201,10 @@ export default function ImageGenerationContent() {
|
||||
<div className="flex flex-col gap-6">
|
||||
{/* Section Header */}
|
||||
<div className="flex flex-col gap-0.5">
|
||||
<Text font="main-content-emphasis" color="text-05">
|
||||
<Text mainContentEmphasis text05>
|
||||
Image Generation Model
|
||||
</Text>
|
||||
<Text font="secondary-body" color="text-03">
|
||||
<Text secondaryBody text03>
|
||||
Select a model to generate images in chat.
|
||||
</Text>
|
||||
</div>
|
||||
@@ -223,7 +223,7 @@ export default function ImageGenerationContent() {
|
||||
{/* Provider Groups */}
|
||||
{IMAGE_PROVIDER_GROUPS.map((group) => (
|
||||
<div key={group.name} className="flex flex-col gap-2">
|
||||
<Text font="secondary-body" color="text-03">
|
||||
<Text secondaryBody text03>
|
||||
{group.name}
|
||||
</Text>
|
||||
<div className="flex flex-col gap-2">
|
||||
@@ -277,13 +277,12 @@ export default function ImageGenerationContent() {
|
||||
{needsReplacement ? (
|
||||
hasReplacements ? (
|
||||
<Section alignItems="start">
|
||||
<Text as="p" color="text-03">
|
||||
{markdown(
|
||||
`**${disconnectProvider.title}** is currently the default image generation model. Session history will be preserved.`
|
||||
)}
|
||||
<Text as="p" text03>
|
||||
<b>{disconnectProvider.title}</b> is currently the default
|
||||
image generation model. Session history will be preserved.
|
||||
</Text>
|
||||
<Section alignItems="start" gap={0.25}>
|
||||
<Text as="p" color="text-04">
|
||||
<Text as="p" text04>
|
||||
Set New Default
|
||||
</Text>
|
||||
<InputSelect
|
||||
@@ -330,24 +329,22 @@ export default function ImageGenerationContent() {
|
||||
</Section>
|
||||
) : (
|
||||
<>
|
||||
<Text as="p" color="text-03">
|
||||
{markdown(
|
||||
`**${disconnectProvider.title}** is currently the default image generation model.`
|
||||
)}
|
||||
<Text as="p" text03>
|
||||
<b>{disconnectProvider.title}</b> is currently the default
|
||||
image generation model.
|
||||
</Text>
|
||||
<Text as="p" color="text-03">
|
||||
<Text as="p" text03>
|
||||
Connect another provider to continue using image generation.
|
||||
</Text>
|
||||
</>
|
||||
)
|
||||
) : (
|
||||
<>
|
||||
<Text as="p" color="text-03">
|
||||
{markdown(
|
||||
`**${disconnectProvider.title}** models will no longer be used to generate images.`
|
||||
)}
|
||||
<Text as="p" text03>
|
||||
<b>{disconnectProvider.title}</b> models will no longer be used
|
||||
to generate images.
|
||||
</Text>
|
||||
<Text as="p" color="text-03">
|
||||
<Text as="p" text03>
|
||||
Session history will be preserved.
|
||||
</Text>
|
||||
</>
|
||||
|
||||
@@ -15,7 +15,7 @@ import { Callout } from "@/components/ui/callout";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { toast } from "@/hooks/useToast";
|
||||
import { SvgGlobe, SvgOnyxLogo, SvgSlash, SvgUnplug } from "@opal/icons";
|
||||
import { Button } from "@opal/components";
|
||||
import { Button as OpalButton } from "@opal/components";
|
||||
import { ADMIN_ROUTES } from "@/lib/admin-routes";
|
||||
import { WebProviderSetupModal } from "@/app/admin/configuration/web-search/WebProviderSetupModal";
|
||||
import ConfirmationModalLayout from "@/refresh-components/layouts/ConfirmationModalLayout";
|
||||
@@ -151,7 +151,7 @@ function WebSearchDisconnectModal({
|
||||
description="This will remove the stored credentials for this provider."
|
||||
onClose={onClose}
|
||||
submit={
|
||||
<Button
|
||||
<OpalButton
|
||||
variant="danger"
|
||||
onClick={onDisconnect}
|
||||
disabled={
|
||||
@@ -159,7 +159,7 @@ function WebSearchDisconnectModal({
|
||||
}
|
||||
>
|
||||
Disconnect
|
||||
</Button>
|
||||
</OpalButton>
|
||||
}
|
||||
>
|
||||
{needsReplacement ? (
|
||||
|
||||
@@ -75,14 +75,14 @@ export enum ChatFileType {
|
||||
IMAGE = "image",
|
||||
DOCUMENT = "document",
|
||||
PLAIN_TEXT = "plain_text",
|
||||
CSV = "csv",
|
||||
TABULAR = "tabular",
|
||||
USER_KNOWLEDGE = "user_knowledge",
|
||||
}
|
||||
|
||||
export const isTextFile = (fileType: ChatFileType) =>
|
||||
[
|
||||
ChatFileType.PLAIN_TEXT,
|
||||
ChatFileType.CSV,
|
||||
ChatFileType.TABULAR,
|
||||
ChatFileType.USER_KNOWLEDGE,
|
||||
ChatFileType.DOCUMENT,
|
||||
].includes(fileType);
|
||||
|
||||
@@ -42,7 +42,7 @@ export default function FileDisplay({ files }: FileDisplayProps) {
|
||||
file.type === ChatFileType.DOCUMENT
|
||||
);
|
||||
const imageFiles = files.filter((file) => file.type === ChatFileType.IMAGE);
|
||||
const csvFiles = files.filter((file) => file.type === ChatFileType.CSV);
|
||||
const csvFiles = files.filter((file) => file.type === ChatFileType.TABULAR);
|
||||
|
||||
const presentingDocument: MinimalOnyxDocument = {
|
||||
document_id: previewingFile?.id ?? "",
|
||||
|
||||
@@ -142,7 +142,6 @@ function PopoverContent({
|
||||
collisionPadding={8}
|
||||
className={cn(
|
||||
"bg-background-neutral-00 p-1 z-popover rounded-12 border shadow-md data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2",
|
||||
"flex flex-col",
|
||||
"max-h-[var(--radix-popover-content-available-height)]",
|
||||
"overflow-hidden",
|
||||
widthClasses[width]
|
||||
@@ -227,7 +226,7 @@ export function PopoverMenu({
|
||||
});
|
||||
|
||||
return (
|
||||
<Section alignItems="stretch" height="auto" className="flex-1 min-h-0">
|
||||
<Section alignItems="stretch">
|
||||
<ShadowDiv
|
||||
scrollContainerRef={scrollContainerRef}
|
||||
className="flex flex-col gap-1 max-h-[20rem] w-full"
|
||||
|
||||
@@ -105,7 +105,7 @@ export default function ShadowDiv({
|
||||
}, [containerRef, checkScroll]);
|
||||
|
||||
return (
|
||||
<div className="relative min-h-0 flex flex-col">
|
||||
<div className="relative min-h-0">
|
||||
<div
|
||||
ref={containerRef}
|
||||
className={cn("overflow-y-auto", className)}
|
||||
|
||||
@@ -1,420 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { useState } from "react";
|
||||
import { toast } from "@/hooks/useToast";
|
||||
import { Button } from "@opal/components";
|
||||
import { Disabled } from "@opal/core";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { ContentAction } from "@opal/layouts";
|
||||
import Card from "@/refresh-components/cards/Card";
|
||||
import Text from "@/refresh-components/texts/Text";
|
||||
import { Section } from "@/layouts/general-layouts";
|
||||
import {
|
||||
SvgCheckCircle,
|
||||
SvgExternalLink,
|
||||
SvgPlug,
|
||||
SvgRefreshCw,
|
||||
SvgSettings,
|
||||
SvgTrash,
|
||||
SvgUnplug,
|
||||
} from "@opal/icons";
|
||||
import Modal, { BasicModalFooter } from "@/refresh-components/Modal";
|
||||
import type {
|
||||
HookPointMeta,
|
||||
HookResponse,
|
||||
} from "@/refresh-pages/admin/HooksPage/interfaces";
|
||||
import {
|
||||
activateHook,
|
||||
deactivateHook,
|
||||
deleteHook,
|
||||
validateHook,
|
||||
} from "@/refresh-pages/admin/HooksPage/svc";
|
||||
import { getHookPointIcon } from "@/refresh-pages/admin/HooksPage/hookPointIcons";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Sub-component: disconnect confirmation modal
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
interface DisconnectConfirmModalProps {
|
||||
open: boolean;
|
||||
onOpenChange: (open: boolean) => void;
|
||||
hook: HookResponse;
|
||||
onDisconnect: () => void;
|
||||
onDisconnectAndDelete: () => void;
|
||||
}
|
||||
|
||||
function DisconnectConfirmModal({
|
||||
open,
|
||||
onOpenChange,
|
||||
hook,
|
||||
onDisconnect,
|
||||
onDisconnectAndDelete,
|
||||
}: DisconnectConfirmModalProps) {
|
||||
return (
|
||||
<Modal open={open} onOpenChange={onOpenChange}>
|
||||
<Modal.Content width="md" height="fit">
|
||||
<Modal.Header
|
||||
icon={(props) => (
|
||||
<SvgUnplug {...props} className="text-action-danger-05" />
|
||||
)}
|
||||
title={`Disconnect ${hook.name}`}
|
||||
onClose={() => onOpenChange(false)}
|
||||
/>
|
||||
<Modal.Body>
|
||||
<div className="flex flex-col gap-4">
|
||||
<Text mainUiBody text03>
|
||||
Onyx will stop calling this endpoint for hook{" "}
|
||||
<strong>
|
||||
<em>{hook.name}</em>
|
||||
</strong>
|
||||
. In-flight requests will continue to run. The external endpoint
|
||||
may still retain data previously sent to it. You can reconnect
|
||||
this hook later if needed.
|
||||
</Text>
|
||||
<Text mainUiBody text03>
|
||||
You can also delete this hook. Deletion cannot be undone.
|
||||
</Text>
|
||||
</div>
|
||||
</Modal.Body>
|
||||
<Modal.Footer>
|
||||
<BasicModalFooter
|
||||
cancel={
|
||||
<Button
|
||||
prominence="secondary"
|
||||
onClick={() => onOpenChange(false)}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
}
|
||||
submit={
|
||||
<div className="flex items-center gap-2">
|
||||
<Button
|
||||
variant="danger"
|
||||
prominence="secondary"
|
||||
onClick={onDisconnectAndDelete}
|
||||
>
|
||||
Disconnect & Delete
|
||||
</Button>
|
||||
<Button
|
||||
variant="danger"
|
||||
prominence="primary"
|
||||
onClick={onDisconnect}
|
||||
>
|
||||
Disconnect
|
||||
</Button>
|
||||
</div>
|
||||
}
|
||||
/>
|
||||
</Modal.Footer>
|
||||
</Modal.Content>
|
||||
</Modal>
|
||||
);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Sub-component: delete confirmation modal
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
interface DeleteConfirmModalProps {
|
||||
open: boolean;
|
||||
onOpenChange: (open: boolean) => void;
|
||||
hook: HookResponse;
|
||||
onDelete: () => void;
|
||||
}
|
||||
|
||||
function DeleteConfirmModal({
|
||||
open,
|
||||
onOpenChange,
|
||||
hook,
|
||||
onDelete,
|
||||
}: DeleteConfirmModalProps) {
|
||||
return (
|
||||
<Modal open={open} onOpenChange={onOpenChange}>
|
||||
<Modal.Content width="md" height="fit">
|
||||
<Modal.Header
|
||||
icon={(props) => (
|
||||
<SvgTrash {...props} className="text-action-danger-05" />
|
||||
)}
|
||||
title={`Delete ${hook.name}`}
|
||||
onClose={() => onOpenChange(false)}
|
||||
/>
|
||||
<Modal.Body>
|
||||
<div className="flex flex-col gap-4">
|
||||
<Text mainUiBody text03>
|
||||
Hook{" "}
|
||||
<strong>
|
||||
<em>{hook.name}</em>
|
||||
</strong>{" "}
|
||||
will be permanently removed from this hook point. The external
|
||||
endpoint may still retain data previously sent to it.
|
||||
</Text>
|
||||
<Text mainUiBody text03>
|
||||
Deletion cannot be undone.
|
||||
</Text>
|
||||
</div>
|
||||
</Modal.Body>
|
||||
<Modal.Footer>
|
||||
<BasicModalFooter
|
||||
cancel={
|
||||
<Button
|
||||
prominence="secondary"
|
||||
onClick={() => onOpenChange(false)}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
}
|
||||
submit={
|
||||
<Button variant="danger" prominence="primary" onClick={onDelete}>
|
||||
Delete
|
||||
</Button>
|
||||
}
|
||||
/>
|
||||
</Modal.Footer>
|
||||
</Modal.Content>
|
||||
</Modal>
|
||||
);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// ConnectedHookCard
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export interface ConnectedHookCardProps {
|
||||
hook: HookResponse;
|
||||
spec: HookPointMeta | undefined;
|
||||
onEdit: () => void;
|
||||
onDeleted: () => void;
|
||||
onToggled: (updated: HookResponse) => void;
|
||||
}
|
||||
|
||||
export default function ConnectedHookCard({
|
||||
hook,
|
||||
spec,
|
||||
onEdit,
|
||||
onDeleted,
|
||||
onToggled,
|
||||
}: ConnectedHookCardProps) {
|
||||
const [isBusy, setIsBusy] = useState(false);
|
||||
const [disconnectConfirmOpen, setDisconnectConfirmOpen] = useState(false);
|
||||
const [deleteConfirmOpen, setDeleteConfirmOpen] = useState(false);
|
||||
|
||||
async function handleDelete() {
|
||||
setDeleteConfirmOpen(false);
|
||||
setIsBusy(true);
|
||||
try {
|
||||
await deleteHook(hook.id);
|
||||
onDeleted();
|
||||
} catch (err) {
|
||||
console.error("Failed to delete hook:", err);
|
||||
toast.error(
|
||||
err instanceof Error ? err.message : "Failed to delete hook."
|
||||
);
|
||||
} finally {
|
||||
setIsBusy(false);
|
||||
}
|
||||
}
|
||||
|
||||
async function handleActivate() {
|
||||
setIsBusy(true);
|
||||
try {
|
||||
const updated = await activateHook(hook.id);
|
||||
onToggled(updated);
|
||||
} catch (err) {
|
||||
console.error("Failed to reconnect hook:", err);
|
||||
toast.error(
|
||||
err instanceof Error ? err.message : "Failed to reconnect hook."
|
||||
);
|
||||
} finally {
|
||||
setIsBusy(false);
|
||||
}
|
||||
}
|
||||
|
||||
async function handleDeactivate() {
|
||||
setDisconnectConfirmOpen(false);
|
||||
setIsBusy(true);
|
||||
try {
|
||||
const updated = await deactivateHook(hook.id);
|
||||
onToggled(updated);
|
||||
} catch (err) {
|
||||
console.error("Failed to deactivate hook:", err);
|
||||
toast.error(
|
||||
err instanceof Error ? err.message : "Failed to deactivate hook."
|
||||
);
|
||||
} finally {
|
||||
setIsBusy(false);
|
||||
}
|
||||
}
|
||||
|
||||
async function handleDisconnectAndDelete() {
|
||||
setDisconnectConfirmOpen(false);
|
||||
setIsBusy(true);
|
||||
try {
|
||||
const deactivated = await deactivateHook(hook.id);
|
||||
onToggled(deactivated);
|
||||
await deleteHook(hook.id);
|
||||
onDeleted();
|
||||
} catch (err) {
|
||||
console.error("Failed to disconnect hook:", err);
|
||||
toast.error(
|
||||
err instanceof Error ? err.message : "Failed to disconnect hook."
|
||||
);
|
||||
} finally {
|
||||
setIsBusy(false);
|
||||
}
|
||||
}
|
||||
|
||||
async function handleValidate() {
|
||||
setIsBusy(true);
|
||||
try {
|
||||
const result = await validateHook(hook.id);
|
||||
if (result.status === "passed") {
|
||||
toast.success("Hook validated successfully.");
|
||||
} else {
|
||||
toast.error(
|
||||
result.error_message ?? `Validation failed: ${result.status}`
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Failed to validate hook:", err);
|
||||
toast.error(
|
||||
err instanceof Error ? err.message : "Failed to validate hook."
|
||||
);
|
||||
} finally {
|
||||
setIsBusy(false);
|
||||
}
|
||||
}
|
||||
|
||||
const HookIcon = getHookPointIcon(hook.hook_point);
|
||||
|
||||
return (
|
||||
<>
|
||||
<DisconnectConfirmModal
|
||||
open={disconnectConfirmOpen}
|
||||
onOpenChange={setDisconnectConfirmOpen}
|
||||
hook={hook}
|
||||
onDisconnect={handleDeactivate}
|
||||
onDisconnectAndDelete={handleDisconnectAndDelete}
|
||||
/>
|
||||
<DeleteConfirmModal
|
||||
open={deleteConfirmOpen}
|
||||
onOpenChange={setDeleteConfirmOpen}
|
||||
hook={hook}
|
||||
onDelete={handleDelete}
|
||||
/>
|
||||
<Card
|
||||
variant="primary"
|
||||
padding={0.5}
|
||||
gap={0}
|
||||
className={cn(
|
||||
"hover:border-border-02",
|
||||
!hook.is_active && "!bg-background-neutral-02"
|
||||
)}
|
||||
>
|
||||
<ContentAction
|
||||
sizePreset="main-ui"
|
||||
variant="section"
|
||||
paddingVariant="sm"
|
||||
icon={HookIcon}
|
||||
title={hook.name}
|
||||
titleClassName={!hook.is_active ? "line-through" : undefined}
|
||||
iconClassName="text-text-04"
|
||||
description={`Hook Point: ${spec?.display_name ?? hook.hook_point}`}
|
||||
bottomChildren={
|
||||
spec?.docs_url ? (
|
||||
<a
|
||||
href={spec.docs_url}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="flex items-center gap-1 w-fit font-secondary-body text-text-03"
|
||||
>
|
||||
<span className="underline">Documentation</span>
|
||||
<SvgExternalLink size={12} className="shrink-0" />
|
||||
</a>
|
||||
) : undefined
|
||||
}
|
||||
rightChildren={
|
||||
<Section
|
||||
flexDirection="column"
|
||||
alignItems="end"
|
||||
width="fit"
|
||||
height="fit"
|
||||
gap={0}
|
||||
>
|
||||
<div className="flex items-center gap-1 p-2">
|
||||
{hook.is_active ? (
|
||||
<>
|
||||
<Text mainUiAction text03>
|
||||
Connected
|
||||
</Text>
|
||||
<SvgCheckCircle
|
||||
size={16}
|
||||
className="text-status-success-05"
|
||||
/>
|
||||
</>
|
||||
) : (
|
||||
<div
|
||||
className={cn(
|
||||
"flex items-center gap-1",
|
||||
isBusy
|
||||
? "opacity-50 pointer-events-none"
|
||||
: "cursor-pointer"
|
||||
)}
|
||||
onClick={handleActivate}
|
||||
>
|
||||
<Text mainUiAction text03>
|
||||
Reconnect
|
||||
</Text>
|
||||
<SvgPlug size={16} className="text-text-03 shrink-0" />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<Disabled disabled={isBusy}>
|
||||
{/* Plain div instead of Section: Section applies style={{ padding }} inline which
|
||||
overrides Tailwind padding classes, making per-side padding (pl/pr/pb) ineffective. */}
|
||||
<div className="flex items-center gap-0.5 pl-1 pr-1 pb-1">
|
||||
{hook.is_active ? (
|
||||
<>
|
||||
<Button
|
||||
prominence="tertiary"
|
||||
size="sm"
|
||||
icon={SvgUnplug}
|
||||
onClick={() => setDisconnectConfirmOpen(true)}
|
||||
tooltip="Disconnect Hook"
|
||||
aria-label="Deactivate hook"
|
||||
/>
|
||||
<Button
|
||||
prominence="tertiary"
|
||||
size="sm"
|
||||
icon={SvgRefreshCw}
|
||||
onClick={handleValidate}
|
||||
tooltip="Test Connection"
|
||||
aria-label="Re-validate hook"
|
||||
/>
|
||||
</>
|
||||
) : (
|
||||
<Button
|
||||
prominence="tertiary"
|
||||
size="sm"
|
||||
icon={SvgTrash}
|
||||
onClick={() => setDeleteConfirmOpen(true)}
|
||||
tooltip="Delete"
|
||||
aria-label="Delete hook"
|
||||
/>
|
||||
)}
|
||||
<Button
|
||||
prominence="tertiary"
|
||||
size="sm"
|
||||
icon={SvgSettings}
|
||||
onClick={onEdit}
|
||||
tooltip="Manage"
|
||||
aria-label="Configure hook"
|
||||
/>
|
||||
</div>
|
||||
</Disabled>
|
||||
</Section>
|
||||
}
|
||||
/>
|
||||
</Card>
|
||||
</>
|
||||
);
|
||||
}
|
||||
@@ -1,211 +1,117 @@
|
||||
"use client";
|
||||
|
||||
import { useState } from "react";
|
||||
import { useState, useEffect } from "react";
|
||||
import { toast } from "@/hooks/useToast";
|
||||
import { useHookSpecs } from "@/hooks/useHookSpecs";
|
||||
import { useHooks } from "@/hooks/useHooks";
|
||||
import SimpleLoader from "@/refresh-components/loaders/SimpleLoader";
|
||||
import { Button } from "@opal/components";
|
||||
import { ContentAction } from "@opal/layouts";
|
||||
import { Button } from "@opal/components";
|
||||
import InputSearch from "@/refresh-components/inputs/InputSearch";
|
||||
import Card from "@/refresh-components/cards/Card";
|
||||
import Text from "@/refresh-components/texts/Text";
|
||||
import { SvgArrowExchange, SvgExternalLink } from "@opal/icons";
|
||||
import HookFormModal from "@/refresh-pages/admin/HooksPage/HookFormModal";
|
||||
import ConnectedHookCard from "@/refresh-pages/admin/HooksPage/ConnectedHookCard";
|
||||
import { getHookPointIcon } from "@/refresh-pages/admin/HooksPage/hookPointIcons";
|
||||
import type {
|
||||
HookPointMeta,
|
||||
HookResponse,
|
||||
} from "@/refresh-pages/admin/HooksPage/interfaces";
|
||||
import {
|
||||
SvgArrowExchange,
|
||||
SvgBubbleText,
|
||||
SvgExternalLink,
|
||||
SvgFileBroadcast,
|
||||
SvgHookNodes,
|
||||
} from "@opal/icons";
|
||||
import { IconFunctionComponent } from "@opal/types";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Main component
|
||||
// ---------------------------------------------------------------------------
|
||||
const HOOK_POINT_ICONS: Record<string, IconFunctionComponent> = {
|
||||
document_ingestion: SvgFileBroadcast,
|
||||
query_processing: SvgBubbleText,
|
||||
};
|
||||
|
||||
function getHookPointIcon(hookPoint: string): IconFunctionComponent {
|
||||
return HOOK_POINT_ICONS[hookPoint] ?? SvgHookNodes;
|
||||
}
|
||||
|
||||
export default function HooksContent() {
|
||||
const [search, setSearch] = useState("");
|
||||
const [connectSpec, setConnectSpec] = useState<HookPointMeta | null>(null);
|
||||
const [editHook, setEditHook] = useState<HookResponse | null>(null);
|
||||
|
||||
const { specs, isLoading: specsLoading, error: specsError } = useHookSpecs();
|
||||
const {
|
||||
hooks,
|
||||
isLoading: hooksLoading,
|
||||
error: hooksError,
|
||||
mutate,
|
||||
} = useHooks();
|
||||
const { specs, isLoading, error } = useHookSpecs();
|
||||
|
||||
if (specsLoading || hooksLoading) {
|
||||
useEffect(() => {
|
||||
if (error) {
|
||||
toast.error("Failed to load hook specifications.");
|
||||
}
|
||||
}, [error]);
|
||||
|
||||
if (isLoading) {
|
||||
return <SimpleLoader />;
|
||||
}
|
||||
|
||||
if (specsError || hooksError) {
|
||||
if (error) {
|
||||
return (
|
||||
<Text text03 secondaryBody>
|
||||
Failed to load{specsError ? " hook specifications" : " hooks"}. Please
|
||||
refresh the page.
|
||||
Failed to load hook specifications. Please refresh the page.
|
||||
</Text>
|
||||
);
|
||||
}
|
||||
|
||||
const hooksByPoint: Record<string, HookResponse[]> = {};
|
||||
for (const hook of hooks ?? []) {
|
||||
(hooksByPoint[hook.hook_point] ??= []).push(hook);
|
||||
}
|
||||
|
||||
const searchLower = search.toLowerCase();
|
||||
|
||||
// Connected hooks sorted alphabetically by hook name
|
||||
const connectedHooks = (hooks ?? [])
|
||||
.filter(
|
||||
(hook) =>
|
||||
!searchLower ||
|
||||
hook.name.toLowerCase().includes(searchLower) ||
|
||||
specs
|
||||
?.find((s) => s.hook_point === hook.hook_point)
|
||||
?.display_name.toLowerCase()
|
||||
.includes(searchLower)
|
||||
)
|
||||
.sort((a, b) => a.name.localeCompare(b.name));
|
||||
|
||||
// Unconnected hook point specs sorted alphabetically
|
||||
const unconnectedSpecs = (specs ?? [])
|
||||
.filter(
|
||||
(spec) =>
|
||||
(hooksByPoint[spec.hook_point]?.length ?? 0) === 0 &&
|
||||
(!searchLower ||
|
||||
spec.display_name.toLowerCase().includes(searchLower) ||
|
||||
spec.description.toLowerCase().includes(searchLower))
|
||||
)
|
||||
.sort((a, b) => a.display_name.localeCompare(b.display_name));
|
||||
|
||||
function handleHookSuccess(updated: HookResponse) {
|
||||
mutate((prev) => {
|
||||
if (!prev) return [updated];
|
||||
const idx = prev.findIndex((h) => h.id === updated.id);
|
||||
if (idx >= 0) {
|
||||
const next = [...prev];
|
||||
next[idx] = updated;
|
||||
return next;
|
||||
}
|
||||
return [...prev, updated];
|
||||
});
|
||||
}
|
||||
|
||||
function handleHookDeleted(id: number) {
|
||||
mutate((prev) => prev?.filter((h) => h.id !== id));
|
||||
}
|
||||
|
||||
const connectSpec_ =
|
||||
connectSpec ??
|
||||
(editHook
|
||||
? specs?.find((s) => s.hook_point === editHook.hook_point)
|
||||
: undefined);
|
||||
const filtered = (specs ?? []).filter(
|
||||
(spec) =>
|
||||
spec.display_name.toLowerCase().includes(search.toLowerCase()) ||
|
||||
spec.description.toLowerCase().includes(search.toLowerCase())
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="flex flex-col gap-6">
|
||||
<InputSearch
|
||||
placeholder="Search hooks..."
|
||||
value={search}
|
||||
onChange={(e) => setSearch(e.target.value)}
|
||||
/>
|
||||
<div className="flex flex-col gap-6">
|
||||
<InputSearch
|
||||
placeholder="Search hooks..."
|
||||
value={search}
|
||||
onChange={(e) => setSearch(e.target.value)}
|
||||
/>
|
||||
|
||||
<div className="flex flex-col gap-2">
|
||||
{connectedHooks.length === 0 && unconnectedSpecs.length === 0 ? (
|
||||
<Text text03 secondaryBody>
|
||||
{search
|
||||
? "No hooks match your search."
|
||||
: "No hook points are available."}
|
||||
</Text>
|
||||
) : (
|
||||
<>
|
||||
{connectedHooks.map((hook) => {
|
||||
const spec = specs?.find(
|
||||
(s) => s.hook_point === hook.hook_point
|
||||
);
|
||||
return (
|
||||
<ConnectedHookCard
|
||||
key={hook.id}
|
||||
hook={hook}
|
||||
spec={spec}
|
||||
onEdit={() => setEditHook(hook)}
|
||||
onDeleted={() => handleHookDeleted(hook.id)}
|
||||
onToggled={handleHookSuccess}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
{unconnectedSpecs.map((spec) => {
|
||||
const UnconnectedIcon = getHookPointIcon(spec.hook_point);
|
||||
return (
|
||||
<Card
|
||||
key={spec.hook_point}
|
||||
variant="secondary"
|
||||
padding={0.5}
|
||||
gap={0}
|
||||
className="hover:border-border-02"
|
||||
<div className="flex flex-col gap-2">
|
||||
{filtered.length === 0 ? (
|
||||
<Text text03 secondaryBody>
|
||||
{search
|
||||
? "No hooks match your search."
|
||||
: "No hook points are available."}
|
||||
</Text>
|
||||
) : (
|
||||
filtered.map((spec) => (
|
||||
<Card
|
||||
key={spec.hook_point}
|
||||
variant="secondary"
|
||||
padding={0.5}
|
||||
gap={0}
|
||||
>
|
||||
<ContentAction
|
||||
icon={getHookPointIcon(spec.hook_point)}
|
||||
title={spec.display_name}
|
||||
description={spec.description}
|
||||
sizePreset="main-content"
|
||||
variant="section"
|
||||
paddingVariant="fit"
|
||||
rightChildren={
|
||||
// TODO(Bo-Onyx): wire up Connect — open modal to create/edit hook
|
||||
<Button prominence="tertiary" rightIcon={SvgArrowExchange}>
|
||||
Connect
|
||||
</Button>
|
||||
}
|
||||
/>
|
||||
{spec.docs_url && (
|
||||
<div className="pl-7 pt-1">
|
||||
<a
|
||||
href={spec.docs_url}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="flex items-center gap-1 w-fit text-text-03"
|
||||
>
|
||||
<ContentAction
|
||||
sizePreset="main-ui"
|
||||
variant="section"
|
||||
paddingVariant="sm"
|
||||
icon={UnconnectedIcon}
|
||||
title={spec.display_name}
|
||||
iconClassName="text-text-04"
|
||||
description={spec.description}
|
||||
bottomChildren={
|
||||
spec.docs_url ? (
|
||||
<a
|
||||
href={spec.docs_url}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="flex items-center gap-1 w-fit font-secondary-body text-text-03"
|
||||
>
|
||||
<span className="underline">Documentation</span>
|
||||
<SvgExternalLink size={12} className="shrink-0" />
|
||||
</a>
|
||||
) : undefined
|
||||
}
|
||||
rightChildren={
|
||||
<Button
|
||||
prominence="tertiary"
|
||||
rightIcon={SvgArrowExchange}
|
||||
onClick={() => setConnectSpec(spec)}
|
||||
>
|
||||
Connect
|
||||
</Button>
|
||||
}
|
||||
/>
|
||||
</Card>
|
||||
);
|
||||
})}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
<Text as="span" secondaryBody text03 className="underline">
|
||||
Documentation
|
||||
</Text>
|
||||
<SvgExternalLink size={16} className="text-text-02" />
|
||||
</a>
|
||||
</div>
|
||||
)}
|
||||
</Card>
|
||||
))
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Create modal */}
|
||||
<HookFormModal
|
||||
key={connectSpec?.hook_point ?? "create"}
|
||||
open={!!connectSpec}
|
||||
onOpenChange={(open) => {
|
||||
if (!open) setConnectSpec(null);
|
||||
}}
|
||||
spec={connectSpec ?? undefined}
|
||||
onSuccess={handleHookSuccess}
|
||||
/>
|
||||
|
||||
{/* Edit modal */}
|
||||
<HookFormModal
|
||||
key={editHook?.id ?? "edit"}
|
||||
open={!!editHook}
|
||||
onOpenChange={(open) => {
|
||||
if (!open) setEditHook(null);
|
||||
}}
|
||||
hook={editHook ?? undefined}
|
||||
spec={connectSpec_ ?? undefined}
|
||||
onSuccess={handleHookSuccess}
|
||||
/>
|
||||
</>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
import { SvgBubbleText, SvgFileBroadcast, SvgHookNodes } from "@opal/icons";
|
||||
import type { IconFunctionComponent } from "@opal/types";
|
||||
|
||||
const HOOK_POINT_ICONS: Record<string, IconFunctionComponent> = {
|
||||
document_ingestion: SvgFileBroadcast,
|
||||
query_processing: SvgBubbleText,
|
||||
};
|
||||
|
||||
function getHookPointIcon(hookPoint: string): IconFunctionComponent {
|
||||
return HOOK_POINT_ICONS[hookPoint] ?? SvgHookNodes;
|
||||
}
|
||||
|
||||
export { HOOK_POINT_ICONS, getHookPointIcon };
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
IconProps,
|
||||
OpenAIIcon,
|
||||
} from "@/components/icons/icons";
|
||||
import Text from "@/refresh-components/texts/Text";
|
||||
import { Select } from "@/refresh-components/cards";
|
||||
import Message from "@/refresh-components/messages/Message";
|
||||
import * as SettingsLayouts from "@/layouts/settings-layouts";
|
||||
@@ -25,8 +26,7 @@ import { toast } from "@/hooks/useToast";
|
||||
import { Callout } from "@/components/ui/callout";
|
||||
import { Content } from "@opal/layouts";
|
||||
import { SvgMicrophone, SvgSlash, SvgUnplug } from "@opal/icons";
|
||||
import { Button, Text } from "@opal/components";
|
||||
import { markdown } from "@opal/utils";
|
||||
import { Button as OpalButton } from "@opal/components";
|
||||
import ConfirmationModalLayout from "@/refresh-components/layouts/ConfirmationModalLayout";
|
||||
import { Section } from "@/layouts/general-layouts";
|
||||
import { ADMIN_ROUTES } from "@/lib/admin-routes";
|
||||
@@ -205,7 +205,7 @@ function VoiceDisconnectModal({
|
||||
description="Voice models"
|
||||
onClose={onClose}
|
||||
submit={
|
||||
<Button
|
||||
<OpalButton
|
||||
variant="danger"
|
||||
onClick={onDisconnect}
|
||||
disabled={
|
||||
@@ -213,19 +213,19 @@ function VoiceDisconnectModal({
|
||||
}
|
||||
>
|
||||
Disconnect
|
||||
</Button>
|
||||
</OpalButton>
|
||||
}
|
||||
>
|
||||
{needsReplacement ? (
|
||||
hasReplacements ? (
|
||||
<Section alignItems="start">
|
||||
<Text as="p" color="text-03">
|
||||
{markdown(
|
||||
`**${disconnectTarget.providerLabel}** models will no longer be used for speech-to-text or text-to-speech, and it will no longer be your default. Session history will be preserved.`
|
||||
)}
|
||||
<Text as="p" text03>
|
||||
<b>{disconnectTarget.providerLabel}</b> models will no longer be
|
||||
used for speech-to-text or text-to-speech, and it will no longer
|
||||
be your default. Session history will be preserved.
|
||||
</Text>
|
||||
<Section alignItems="start" gap={0.25}>
|
||||
<Text as="p" color="text-04">
|
||||
<Text as="p" text04>
|
||||
Set New Default
|
||||
</Text>
|
||||
<InputSelect
|
||||
@@ -256,24 +256,23 @@ function VoiceDisconnectModal({
|
||||
</Section>
|
||||
) : (
|
||||
<>
|
||||
<Text as="p" color="text-03">
|
||||
{markdown(
|
||||
`**${disconnectTarget.providerLabel}** models will no longer be used for speech-to-text or text-to-speech, and it will no longer be your default.`
|
||||
)}
|
||||
<Text as="p" text03>
|
||||
<b>{disconnectTarget.providerLabel}</b> models will no longer be
|
||||
used for speech-to-text or text-to-speech, and it will no longer
|
||||
be your default.
|
||||
</Text>
|
||||
<Text as="p" color="text-03">
|
||||
<Text as="p" text03>
|
||||
Connect another provider to continue using voice.
|
||||
</Text>
|
||||
</>
|
||||
)
|
||||
) : (
|
||||
<>
|
||||
<Text as="p" color="text-03">
|
||||
{markdown(
|
||||
`**${disconnectTarget.providerLabel}** models will no longer be available for voice.`
|
||||
)}
|
||||
<Text as="p" text03>
|
||||
<b>{disconnectTarget.providerLabel}</b> models will no longer be
|
||||
available for voice.
|
||||
</Text>
|
||||
<Text as="p" color="text-03">
|
||||
<Text as="p" text03>
|
||||
Session history will be preserved.
|
||||
</Text>
|
||||
</>
|
||||
@@ -537,7 +536,7 @@ export default function VoiceConfigurationPage() {
|
||||
<Callout type="danger" title="Failed to load voice settings">
|
||||
{message}
|
||||
{detail && (
|
||||
<Text as="p" font="main-content-body" color="text-03">
|
||||
<Text as="p" mainContentBody text03>
|
||||
{detail}
|
||||
</Text>
|
||||
)}
|
||||
@@ -627,7 +626,7 @@ export default function VoiceConfigurationPage() {
|
||||
|
||||
{TTS_PROVIDER_GROUPS.map((group) => (
|
||||
<div key={group.providerType} className="flex flex-col gap-2">
|
||||
<Text font="secondary-body" color="text-03">
|
||||
<Text secondaryBody text03>
|
||||
{group.providerLabel}
|
||||
</Text>
|
||||
<div className="flex flex-col gap-2">
|
||||
|
||||
@@ -4,14 +4,6 @@ import { expectScreenshot } from "@tests/e2e/utils/visualRegression";
|
||||
|
||||
test.use({ storageState: "admin_auth.json" });
|
||||
|
||||
/** Maps each settings slug to the header title shown on that page. */
|
||||
const SLUG_TO_HEADER: Record<string, string> = {
|
||||
general: "Profile",
|
||||
"chat-preferences": "Chats",
|
||||
"accounts-access": "Accounts",
|
||||
connectors: "Connectors",
|
||||
};
|
||||
|
||||
for (const theme of THEMES) {
|
||||
test.describe(`Settings pages (${theme} mode)`, () => {
|
||||
test.beforeEach(async ({ page }) => {
|
||||
@@ -19,33 +11,21 @@ for (const theme of THEMES) {
|
||||
});
|
||||
|
||||
test("should screenshot each settings tab", async ({ page }) => {
|
||||
await page.goto("/app/settings/general");
|
||||
await page
|
||||
.getByTestId("settings-left-tab-navigation")
|
||||
.waitFor({ state: "visible" });
|
||||
await page.goto("/app/settings");
|
||||
await page.waitForLoadState("networkidle");
|
||||
|
||||
const nav = page.getByTestId("settings-left-tab-navigation");
|
||||
const tabs = nav.locator("a");
|
||||
await expect(tabs.first()).toBeVisible({ timeout: 10_000 });
|
||||
const count = await tabs.count();
|
||||
|
||||
expect(count).toBeGreaterThan(0);
|
||||
for (let i = 0; i < count; i++) {
|
||||
const tab = tabs.nth(i);
|
||||
const href = await tab.getAttribute("href");
|
||||
const slug = href ? href.replace("/app/settings/", "") : `tab-${i}`;
|
||||
|
||||
await tab.click();
|
||||
|
||||
const expectedHeader = SLUG_TO_HEADER[slug];
|
||||
if (expectedHeader) {
|
||||
await expect(
|
||||
page
|
||||
.locator(".opal-content-md-header")
|
||||
.filter({ hasText: expectedHeader })
|
||||
).toBeVisible({ timeout: 10_000 });
|
||||
} else {
|
||||
await page.waitForLoadState("networkidle");
|
||||
}
|
||||
await page.waitForLoadState("networkidle");
|
||||
|
||||
await expectScreenshot(page, {
|
||||
name: `settings-${theme}-${slug}`,
|
||||
|
||||
Reference in New Issue
Block a user