mirror of
https://github.com/onyx-dot-app/onyx.git
synced 2026-03-25 09:32:45 +00:00
Compare commits
24 Commits
dane/index
...
jamison/re
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e816e06a04 | ||
|
|
415c05b5f8 | ||
|
|
352fd19f0a | ||
|
|
41ae039bfa | ||
|
|
782c734287 | ||
|
|
728cdb0715 | ||
|
|
baf6437117 | ||
|
|
f187165077 | ||
|
|
727be3d663 | ||
|
|
98c8f9884b | ||
|
|
d79a068984 | ||
|
|
ba0740d15f | ||
|
|
86b7bed90b | ||
|
|
aead6ab9a5 | ||
|
|
c9d4c186dd | ||
|
|
70aad1ec46 | ||
|
|
ca3cc16ead | ||
|
|
9ea1780ce5 | ||
|
|
f70e5e605e | ||
|
|
84b134e226 | ||
|
|
b17c63a7d6 | ||
|
|
76c41d1b0b | ||
|
|
579b86f1ce | ||
|
|
a53cf13db1 |
@@ -6,3 +6,4 @@
|
||||
|
||||
3134e5f840c12c8f32613ce520101a047c89dcc2 # refactor(whitespace): rm temporary react fragments (#7161)
|
||||
ed3f72bc75f3e3a9ae9e4d8cd38278f9c97e78b4 # refactor(whitespace): rm react fragment #7190
|
||||
7b927e79c25f4ddfd18a067f489e122acd2c89de # chore(format): format files where `ruff` and `black` agree (#9339)
|
||||
|
||||
@@ -7,6 +7,15 @@ on:
|
||||
merge_group:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "backend/**"
|
||||
- "pyproject.toml"
|
||||
- "uv.lock"
|
||||
- ".github/workflows/pr-external-dependency-unit-tests.yml"
|
||||
- ".github/actions/setup-python-and-install-dependencies/**"
|
||||
- ".github/actions/setup-playwright/**"
|
||||
- "deployment/docker_compose/docker-compose.yml"
|
||||
- "deployment/docker_compose/docker-compose.dev.yml"
|
||||
push:
|
||||
tags:
|
||||
- "v*.*.*"
|
||||
|
||||
@@ -7,6 +7,13 @@ on:
|
||||
merge_group:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "backend/**"
|
||||
- "pyproject.toml"
|
||||
- "uv.lock"
|
||||
- ".github/workflows/pr-python-connector-tests.yml"
|
||||
- ".github/actions/setup-python-and-install-dependencies/**"
|
||||
- ".github/actions/setup-playwright/**"
|
||||
push:
|
||||
tags:
|
||||
- "v*.*.*"
|
||||
|
||||
@@ -25,10 +25,13 @@ from onyx.redis.redis_pool import get_redis_client
|
||||
from shared_configs.configs import MULTI_TENANT
|
||||
from shared_configs.configs import TENANT_ID_PREFIX
|
||||
|
||||
# Soft time limit for tenant pre-provisioning tasks (in seconds)
|
||||
_TENANT_PROVISIONING_SOFT_TIME_LIMIT = 60 * 5 # 5 minutes
|
||||
# Hard time limit for tenant pre-provisioning tasks (in seconds)
|
||||
_TENANT_PROVISIONING_TIME_LIMIT = 60 * 10 # 10 minutes
|
||||
# Maximum tenants to provision in a single task run.
|
||||
# Each tenant takes ~80s (alembic migrations), so 5 tenants ≈ 7 minutes.
|
||||
_MAX_TENANTS_PER_RUN = 5
|
||||
|
||||
# Time limits sized for worst-case batch: _MAX_TENANTS_PER_RUN × ~90s + buffer.
|
||||
_TENANT_PROVISIONING_SOFT_TIME_LIMIT = 60 * 10 # 10 minutes
|
||||
_TENANT_PROVISIONING_TIME_LIMIT = 60 * 15 # 15 minutes
|
||||
|
||||
|
||||
@shared_task(
|
||||
@@ -85,9 +88,26 @@ def check_available_tenants(self: Task) -> None: # noqa: ARG001
|
||||
f"To provision: {tenants_to_provision}"
|
||||
)
|
||||
|
||||
# just provision one tenant each time we run this ... increase if needed.
|
||||
if tenants_to_provision > 0:
|
||||
pre_provision_tenant()
|
||||
batch_size = min(tenants_to_provision, _MAX_TENANTS_PER_RUN)
|
||||
if batch_size < tenants_to_provision:
|
||||
task_logger.info(
|
||||
f"Capping batch to {batch_size} "
|
||||
f"(need {tenants_to_provision}, will catch up next cycle)"
|
||||
)
|
||||
|
||||
provisioned = 0
|
||||
for i in range(batch_size):
|
||||
task_logger.info(f"Provisioning tenant {i + 1}/{batch_size}")
|
||||
try:
|
||||
if pre_provision_tenant():
|
||||
provisioned += 1
|
||||
except Exception:
|
||||
task_logger.exception(
|
||||
f"Failed to provision tenant {i + 1}/{batch_size}, "
|
||||
"continuing with remaining tenants"
|
||||
)
|
||||
|
||||
task_logger.info(f"Provisioning complete: {provisioned}/{batch_size} succeeded")
|
||||
|
||||
except Exception:
|
||||
task_logger.exception("Error in check_available_tenants task")
|
||||
@@ -101,11 +121,13 @@ def check_available_tenants(self: Task) -> None: # noqa: ARG001
|
||||
)
|
||||
|
||||
|
||||
def pre_provision_tenant() -> None:
|
||||
def pre_provision_tenant() -> bool:
|
||||
"""
|
||||
Pre-provision a new tenant and store it in the NewAvailableTenant table.
|
||||
This function fully sets up the tenant with all necessary configurations,
|
||||
so it's ready to be assigned to a user immediately.
|
||||
|
||||
Returns True if a tenant was successfully provisioned, False otherwise.
|
||||
"""
|
||||
# The MULTI_TENANT check is now done at the caller level (check_available_tenants)
|
||||
# rather than inside this function
|
||||
@@ -118,10 +140,10 @@ def pre_provision_tenant() -> None:
|
||||
|
||||
# Allow multiple pre-provisioning tasks to run, but ensure they don't overlap
|
||||
if not lock_provision.acquire(blocking=False):
|
||||
task_logger.debug(
|
||||
"Skipping pre_provision_tenant task because it is already running"
|
||||
task_logger.warning(
|
||||
"Skipping pre_provision_tenant — could not acquire provision lock"
|
||||
)
|
||||
return
|
||||
return False
|
||||
|
||||
tenant_id: str | None = None
|
||||
try:
|
||||
@@ -161,6 +183,7 @@ def pre_provision_tenant() -> None:
|
||||
db_session.add(new_tenant)
|
||||
db_session.commit()
|
||||
task_logger.info(f"Successfully pre-provisioned tenant: {tenant_id}")
|
||||
return True
|
||||
except Exception:
|
||||
db_session.rollback()
|
||||
task_logger.error(
|
||||
@@ -184,6 +207,7 @@ def pre_provision_tenant() -> None:
|
||||
asyncio.run(rollback_tenant_provisioning(tenant_id))
|
||||
except Exception:
|
||||
task_logger.exception(f"Error during rollback for tenant: {tenant_id}")
|
||||
return False
|
||||
finally:
|
||||
try:
|
||||
lock_provision.release()
|
||||
|
||||
@@ -800,6 +800,33 @@ def update_user_group(
|
||||
return db_user_group
|
||||
|
||||
|
||||
def rename_user_group(
|
||||
db_session: Session,
|
||||
user_group_id: int,
|
||||
new_name: str,
|
||||
) -> UserGroup:
|
||||
stmt = select(UserGroup).where(UserGroup.id == user_group_id)
|
||||
db_user_group = db_session.scalar(stmt)
|
||||
if db_user_group is None:
|
||||
raise ValueError(f"UserGroup with id '{user_group_id}' not found")
|
||||
|
||||
_check_user_group_is_modifiable(db_user_group)
|
||||
|
||||
db_user_group.name = new_name
|
||||
db_user_group.time_last_modified_by_user = func.now()
|
||||
|
||||
# CC pair documents in Vespa contain the group name, so we need to
|
||||
# trigger a sync to update them with the new name.
|
||||
_mark_user_group__cc_pair_relationships_outdated__no_commit(
|
||||
db_session=db_session, user_group_id=user_group_id
|
||||
)
|
||||
if not DISABLE_VECTOR_DB:
|
||||
db_user_group.is_up_to_date = False
|
||||
|
||||
db_session.commit()
|
||||
return db_user_group
|
||||
|
||||
|
||||
def prepare_user_group_for_deletion(db_session: Session, user_group_id: int) -> None:
|
||||
stmt = select(UserGroup).where(UserGroup.id == user_group_id)
|
||||
db_user_group = db_session.scalar(stmt)
|
||||
|
||||
@@ -11,6 +11,7 @@ from ee.onyx.db.user_group import fetch_user_groups
|
||||
from ee.onyx.db.user_group import fetch_user_groups_for_user
|
||||
from ee.onyx.db.user_group import insert_user_group
|
||||
from ee.onyx.db.user_group import prepare_user_group_for_deletion
|
||||
from ee.onyx.db.user_group import rename_user_group
|
||||
from ee.onyx.db.user_group import update_user_curator_relationship
|
||||
from ee.onyx.db.user_group import update_user_group
|
||||
from ee.onyx.server.user_group.models import AddUsersToUserGroupRequest
|
||||
@@ -18,6 +19,7 @@ from ee.onyx.server.user_group.models import MinimalUserGroupSnapshot
|
||||
from ee.onyx.server.user_group.models import SetCuratorRequest
|
||||
from ee.onyx.server.user_group.models import UserGroup
|
||||
from ee.onyx.server.user_group.models import UserGroupCreate
|
||||
from ee.onyx.server.user_group.models import UserGroupRename
|
||||
from ee.onyx.server.user_group.models import UserGroupUpdate
|
||||
from onyx.auth.users import current_admin_user
|
||||
from onyx.auth.users import current_curator_or_admin_user
|
||||
@@ -27,6 +29,8 @@ from onyx.configs.constants import PUBLIC_API_TAGS
|
||||
from onyx.db.engine.sql_engine import get_session
|
||||
from onyx.db.models import User
|
||||
from onyx.db.models import UserRole
|
||||
from onyx.error_handling.error_codes import OnyxErrorCode
|
||||
from onyx.error_handling.exceptions import OnyxError
|
||||
from onyx.utils.logger import setup_logger
|
||||
|
||||
logger = setup_logger()
|
||||
@@ -87,6 +91,32 @@ def create_user_group(
|
||||
return UserGroup.from_model(db_user_group)
|
||||
|
||||
|
||||
@router.patch("/admin/user-group/rename")
|
||||
def rename_user_group_endpoint(
|
||||
rename_request: UserGroupRename,
|
||||
_: User = Depends(current_admin_user),
|
||||
db_session: Session = Depends(get_session),
|
||||
) -> UserGroup:
|
||||
try:
|
||||
return UserGroup.from_model(
|
||||
rename_user_group(
|
||||
db_session=db_session,
|
||||
user_group_id=rename_request.id,
|
||||
new_name=rename_request.name,
|
||||
)
|
||||
)
|
||||
except IntegrityError:
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.DUPLICATE_RESOURCE,
|
||||
f"User group with name '{rename_request.name}' already exists.",
|
||||
)
|
||||
except ValueError as e:
|
||||
msg = str(e)
|
||||
if "not found" in msg.lower():
|
||||
raise OnyxError(OnyxErrorCode.NOT_FOUND, msg)
|
||||
raise OnyxError(OnyxErrorCode.CONFLICT, msg)
|
||||
|
||||
|
||||
@router.patch("/admin/user-group/{user_group_id}")
|
||||
def patch_user_group(
|
||||
user_group_id: int,
|
||||
|
||||
@@ -104,6 +104,11 @@ class AddUsersToUserGroupRequest(BaseModel):
|
||||
user_ids: list[UUID]
|
||||
|
||||
|
||||
class UserGroupRename(BaseModel):
|
||||
id: int
|
||||
name: str
|
||||
|
||||
|
||||
class SetCuratorRequest(BaseModel):
|
||||
user_id: UUID
|
||||
is_curator: bool
|
||||
|
||||
@@ -59,6 +59,7 @@ from onyx.db.chat import create_new_chat_message
|
||||
from onyx.db.chat import get_chat_session_by_id
|
||||
from onyx.db.chat import get_or_create_root_message
|
||||
from onyx.db.chat import reserve_message_id
|
||||
from onyx.db.enums import HookPoint
|
||||
from onyx.db.memory import get_memories
|
||||
from onyx.db.models import ChatMessage
|
||||
from onyx.db.models import ChatSession
|
||||
@@ -68,11 +69,19 @@ from onyx.db.models import UserFile
|
||||
from onyx.db.projects import get_user_files_from_project
|
||||
from onyx.db.tools import get_tools
|
||||
from onyx.deep_research.dr_loop import run_deep_research_llm_loop
|
||||
from onyx.error_handling.error_codes import OnyxErrorCode
|
||||
from onyx.error_handling.exceptions import log_onyx_error
|
||||
from onyx.error_handling.exceptions import OnyxError
|
||||
from onyx.file_processing.extract_file_text import extract_file_text
|
||||
from onyx.file_store.models import ChatFileType
|
||||
from onyx.file_store.models import InMemoryChatFile
|
||||
from onyx.file_store.utils import load_in_memory_chat_files
|
||||
from onyx.file_store.utils import verify_user_files
|
||||
from onyx.hooks.executor import execute_hook
|
||||
from onyx.hooks.executor import HookSkipped
|
||||
from onyx.hooks.executor import HookSoftFailed
|
||||
from onyx.hooks.points.query_processing import QueryProcessingPayload
|
||||
from onyx.hooks.points.query_processing import QueryProcessingResponse
|
||||
from onyx.llm.factory import get_llm_for_persona
|
||||
from onyx.llm.factory import get_llm_token_counter
|
||||
from onyx.llm.interfaces import LLM
|
||||
@@ -424,6 +433,28 @@ def determine_search_params(
|
||||
)
|
||||
|
||||
|
||||
def _resolve_query_processing_hook_result(
|
||||
hook_result: QueryProcessingResponse | HookSkipped | HookSoftFailed,
|
||||
message_text: str,
|
||||
) -> str:
|
||||
"""Apply the Query Processing hook result to the message text.
|
||||
|
||||
Returns the (possibly rewritten) message text, or raises OnyxError with
|
||||
QUERY_REJECTED if the hook signals rejection (query is null or empty).
|
||||
HookSkipped and HookSoftFailed are pass-throughs — the original text is
|
||||
returned unchanged.
|
||||
"""
|
||||
if isinstance(hook_result, (HookSkipped, HookSoftFailed)):
|
||||
return message_text
|
||||
if not (hook_result.query and hook_result.query.strip()):
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.QUERY_REJECTED,
|
||||
hook_result.rejection_message
|
||||
or "The hook extension for query processing did not return a valid query. No rejection reason was provided.",
|
||||
)
|
||||
return hook_result.query.strip()
|
||||
|
||||
|
||||
def handle_stream_message_objects(
|
||||
new_msg_req: SendMessageRequest,
|
||||
user: User,
|
||||
@@ -474,16 +505,24 @@ def handle_stream_message_objects(
|
||||
db_session=db_session,
|
||||
)
|
||||
yield CreateChatSessionID(chat_session_id=chat_session.id)
|
||||
chat_session = get_chat_session_by_id(
|
||||
chat_session_id=chat_session.id,
|
||||
user_id=user_id,
|
||||
db_session=db_session,
|
||||
eager_load_persona=True,
|
||||
)
|
||||
else:
|
||||
chat_session = get_chat_session_by_id(
|
||||
chat_session_id=new_msg_req.chat_session_id,
|
||||
user_id=user_id,
|
||||
db_session=db_session,
|
||||
eager_load_persona=True,
|
||||
)
|
||||
|
||||
persona = chat_session.persona
|
||||
|
||||
message_text = new_msg_req.message
|
||||
|
||||
user_identity = LLMUserIdentity(
|
||||
user_id=llm_user_identifier, session_id=str(chat_session.id)
|
||||
)
|
||||
@@ -575,6 +614,28 @@ def handle_stream_message_objects(
|
||||
if parent_message.message_type == MessageType.USER:
|
||||
user_message = parent_message
|
||||
else:
|
||||
# New message — run the Query Processing hook before saving to DB.
|
||||
# Skipped on regeneration: the message already exists and was accepted previously.
|
||||
# Skip the hook for empty/whitespace-only messages — no meaningful query
|
||||
# to process, and SendMessageRequest.message has no min_length guard.
|
||||
if message_text.strip():
|
||||
hook_result = execute_hook(
|
||||
db_session=db_session,
|
||||
hook_point=HookPoint.QUERY_PROCESSING,
|
||||
payload=QueryProcessingPayload(
|
||||
query=message_text,
|
||||
# Pass None for anonymous users or authenticated users without an email
|
||||
# (e.g. some SSO flows). QueryProcessingPayload.user_email is str | None,
|
||||
# so None is accepted and serialised as null in both cases.
|
||||
user_email=None if user.is_anonymous else user.email,
|
||||
chat_session_id=str(chat_session.id),
|
||||
).model_dump(),
|
||||
response_type=QueryProcessingResponse,
|
||||
)
|
||||
message_text = _resolve_query_processing_hook_result(
|
||||
hook_result, message_text
|
||||
)
|
||||
|
||||
user_message = create_new_chat_message(
|
||||
chat_session_id=chat_session.id,
|
||||
parent_message=parent_message,
|
||||
@@ -914,6 +975,17 @@ def handle_stream_message_objects(
|
||||
state_container=state_container,
|
||||
)
|
||||
|
||||
except OnyxError as e:
|
||||
if e.error_code is not OnyxErrorCode.QUERY_REJECTED:
|
||||
log_onyx_error(e)
|
||||
yield StreamingError(
|
||||
error=e.detail,
|
||||
error_code=e.error_code.code,
|
||||
is_retryable=e.status_code >= 500,
|
||||
)
|
||||
db_session.rollback()
|
||||
return
|
||||
|
||||
except ValueError as e:
|
||||
logger.exception("Failed to process chat message.")
|
||||
|
||||
|
||||
@@ -787,10 +787,6 @@ MINI_CHUNK_SIZE = 150
|
||||
# This is the number of regular chunks per large chunk
|
||||
LARGE_CHUNK_RATIO = 4
|
||||
|
||||
# The maximum number of chunks that can be held for 1 document processing batch
|
||||
# The purpose of this is to set an upper bound on memory usage
|
||||
MAX_CHUNKS_PER_DOC_BATCH = int(os.environ.get("MAX_CHUNKS_PER_DOC_BATCH") or 1000)
|
||||
|
||||
# Include the document level metadata in each chunk. If the metadata is too long, then it is thrown out
|
||||
# We don't want the metadata to overwhelm the actual contents of the chunk
|
||||
SKIP_METADATA_IN_CHUNK = os.environ.get("SKIP_METADATA_IN_CHUNK", "").lower() == "true"
|
||||
|
||||
@@ -16,6 +16,7 @@ from sqlalchemy import Row
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy import update
|
||||
from sqlalchemy.exc import MultipleResultsFound
|
||||
from sqlalchemy.orm import joinedload
|
||||
from sqlalchemy.orm import selectinload
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
@@ -28,6 +29,7 @@ from onyx.db.models import ChatMessage
|
||||
from onyx.db.models import ChatMessage__SearchDoc
|
||||
from onyx.db.models import ChatSession
|
||||
from onyx.db.models import ChatSessionSharedStatus
|
||||
from onyx.db.models import Persona
|
||||
from onyx.db.models import SearchDoc as DBSearchDoc
|
||||
from onyx.db.models import ToolCall
|
||||
from onyx.db.models import User
|
||||
@@ -53,9 +55,19 @@ def get_chat_session_by_id(
|
||||
db_session: Session,
|
||||
include_deleted: bool = False,
|
||||
is_shared: bool = False,
|
||||
eager_load_persona: bool = False,
|
||||
) -> ChatSession:
|
||||
stmt = select(ChatSession).where(ChatSession.id == chat_session_id)
|
||||
|
||||
if eager_load_persona:
|
||||
stmt = stmt.options(
|
||||
joinedload(ChatSession.persona).options(
|
||||
selectinload(Persona.tools),
|
||||
selectinload(Persona.user_files),
|
||||
),
|
||||
joinedload(ChatSession.project),
|
||||
)
|
||||
|
||||
if is_shared:
|
||||
stmt = stmt.where(ChatSession.shared_status == ChatSessionSharedStatus.PUBLIC)
|
||||
else:
|
||||
|
||||
@@ -5,7 +5,6 @@ accidentally reaches the vector DB layer will fail loudly instead of timing
|
||||
out against a nonexistent Vespa/OpenSearch instance.
|
||||
"""
|
||||
|
||||
from collections.abc import Iterable
|
||||
from typing import Any
|
||||
|
||||
from onyx.context.search.models import IndexFilters
|
||||
@@ -67,7 +66,7 @@ class DisabledDocumentIndex(DocumentIndex):
|
||||
# ------------------------------------------------------------------
|
||||
def index(
|
||||
self,
|
||||
chunks: Iterable[DocMetadataAwareIndexChunk], # noqa: ARG002
|
||||
chunks: list[DocMetadataAwareIndexChunk], # noqa: ARG002
|
||||
index_batch_params: IndexBatchParams, # noqa: ARG002
|
||||
) -> set[DocumentInsertionRecord]:
|
||||
raise RuntimeError(VECTOR_DB_DISABLED_ERROR)
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import abc
|
||||
from collections.abc import Iterable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
@@ -207,7 +206,7 @@ class Indexable(abc.ABC):
|
||||
@abc.abstractmethod
|
||||
def index(
|
||||
self,
|
||||
chunks: Iterable[DocMetadataAwareIndexChunk],
|
||||
chunks: list[DocMetadataAwareIndexChunk],
|
||||
index_batch_params: IndexBatchParams,
|
||||
) -> set[DocumentInsertionRecord]:
|
||||
"""
|
||||
@@ -227,8 +226,8 @@ class Indexable(abc.ABC):
|
||||
it is done automatically outside of this code.
|
||||
|
||||
Parameters:
|
||||
- chunks: Document chunks with all of the information needed for
|
||||
indexing to the document index.
|
||||
- chunks: Document chunks with all of the information needed for indexing to the document
|
||||
index.
|
||||
- tenant_id: The tenant id of the user whose chunks are being indexed
|
||||
- large_chunks_enabled: Whether large chunks are enabled
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import abc
|
||||
from collections.abc import Iterable
|
||||
from typing import Self
|
||||
|
||||
from pydantic import BaseModel
|
||||
@@ -210,10 +209,10 @@ class Indexable(abc.ABC):
|
||||
@abc.abstractmethod
|
||||
def index(
|
||||
self,
|
||||
chunks: Iterable[DocMetadataAwareIndexChunk],
|
||||
chunks: list[DocMetadataAwareIndexChunk],
|
||||
indexing_metadata: IndexingMetadata,
|
||||
) -> list[DocumentInsertionRecord]:
|
||||
"""Indexes an iterable of document chunks into the document index.
|
||||
"""Indexes a list of document chunks into the document index.
|
||||
|
||||
This is often a batch operation including chunks from multiple
|
||||
documents.
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
import json
|
||||
from collections.abc import Iterable
|
||||
from collections import defaultdict
|
||||
from typing import Any
|
||||
|
||||
import httpx
|
||||
from opensearchpy import NotFoundError
|
||||
|
||||
from onyx.access.models import DocumentAccess
|
||||
from onyx.configs.app_configs import MAX_CHUNKS_PER_DOC_BATCH
|
||||
from onyx.configs.app_configs import VERIFY_CREATE_OPENSEARCH_INDEX_ON_INIT_MT
|
||||
from onyx.configs.chat_configs import NUM_RETURNED_HITS
|
||||
from onyx.configs.chat_configs import TITLE_CONTENT_RATIO
|
||||
@@ -351,7 +350,7 @@ class OpenSearchOldDocumentIndex(OldDocumentIndex):
|
||||
|
||||
def index(
|
||||
self,
|
||||
chunks: Iterable[DocMetadataAwareIndexChunk],
|
||||
chunks: list[DocMetadataAwareIndexChunk],
|
||||
index_batch_params: IndexBatchParams,
|
||||
) -> set[OldDocumentInsertionRecord]:
|
||||
"""
|
||||
@@ -647,10 +646,10 @@ class OpenSearchDocumentIndex(DocumentIndex):
|
||||
|
||||
def index(
|
||||
self,
|
||||
chunks: Iterable[DocMetadataAwareIndexChunk],
|
||||
indexing_metadata: IndexingMetadata,
|
||||
chunks: list[DocMetadataAwareIndexChunk],
|
||||
indexing_metadata: IndexingMetadata, # noqa: ARG002
|
||||
) -> list[DocumentInsertionRecord]:
|
||||
"""Indexes an iterable of document chunks into the document index.
|
||||
"""Indexes a list of document chunks into the document index.
|
||||
|
||||
Groups chunks by document ID and for each document, deletes existing
|
||||
chunks and indexes the new chunks in bulk.
|
||||
@@ -673,34 +672,29 @@ class OpenSearchDocumentIndex(DocumentIndex):
|
||||
document is newly indexed or had already existed and was just
|
||||
updated.
|
||||
"""
|
||||
total_chunks = sum(
|
||||
cc.new_chunk_cnt
|
||||
for cc in indexing_metadata.doc_id_to_chunk_cnt_diff.values()
|
||||
# Group chunks by document ID.
|
||||
doc_id_to_chunks: dict[str, list[DocMetadataAwareIndexChunk]] = defaultdict(
|
||||
list
|
||||
)
|
||||
for chunk in chunks:
|
||||
doc_id_to_chunks[chunk.source_document.id].append(chunk)
|
||||
logger.debug(
|
||||
f"[OpenSearchDocumentIndex] Indexing {total_chunks} chunks from {len(indexing_metadata.doc_id_to_chunk_cnt_diff)} "
|
||||
f"[OpenSearchDocumentIndex] Indexing {len(chunks)} chunks from {len(doc_id_to_chunks)} "
|
||||
f"documents for index {self._index_name}."
|
||||
)
|
||||
|
||||
document_indexing_results: list[DocumentInsertionRecord] = []
|
||||
deleted_doc_ids: set[str] = set()
|
||||
# Buffer chunks per document as they arrive from the iterable.
|
||||
# When the document ID changes flush the buffered chunks.
|
||||
current_doc_id: str | None = None
|
||||
current_chunks: list[DocMetadataAwareIndexChunk] = []
|
||||
|
||||
def _flush_chunks(doc_chunks: list[DocMetadataAwareIndexChunk]) -> None:
|
||||
assert len(doc_chunks) > 0, "doc_chunks is empty"
|
||||
|
||||
# Try to index per-document.
|
||||
for _, chunks in doc_id_to_chunks.items():
|
||||
# Create a batch of OpenSearch-formatted chunks for bulk insertion.
|
||||
# Since we are doing this in batches, an error occurring midway
|
||||
# can result in a state where chunks are deleted and not all the
|
||||
# new chunks have been indexed.
|
||||
# Do this before deleting existing chunks to reduce the amount of
|
||||
# time the document index has no content for a given document, and
|
||||
# to reduce the chance of entering a state where we delete chunks,
|
||||
# then some error happens, and never successfully index new chunks.
|
||||
chunk_batch: list[DocumentChunk] = [
|
||||
_convert_onyx_chunk_to_opensearch_document(chunk)
|
||||
for chunk in doc_chunks
|
||||
_convert_onyx_chunk_to_opensearch_document(chunk) for chunk in chunks
|
||||
]
|
||||
onyx_document: Document = doc_chunks[0].source_document
|
||||
onyx_document: Document = chunks[0].source_document
|
||||
# First delete the doc's chunks from the index. This is so that
|
||||
# there are no dangling chunks in the index, in the event that the
|
||||
# new document's content contains fewer chunks than the previous
|
||||
@@ -709,43 +703,22 @@ class OpenSearchDocumentIndex(DocumentIndex):
|
||||
# if the chunk count has actually decreased. This assumes that
|
||||
# overlapping chunks are perfectly overwritten. If we can't
|
||||
# guarantee that then we need the code as-is.
|
||||
if onyx_document.id not in deleted_doc_ids:
|
||||
num_chunks_deleted = self.delete(
|
||||
onyx_document.id, onyx_document.chunk_count
|
||||
)
|
||||
deleted_doc_ids.add(onyx_document.id)
|
||||
# If we see that chunks were deleted we assume the doc already
|
||||
# existed. We record the result before bulk_index_documents
|
||||
# runs. If indexing raises, this entire result list is discarded
|
||||
# by the caller's retry logic, so early recording is safe.
|
||||
document_indexing_results.append(
|
||||
DocumentInsertionRecord(
|
||||
document_id=onyx_document.id,
|
||||
already_existed=num_chunks_deleted > 0,
|
||||
)
|
||||
)
|
||||
num_chunks_deleted = self.delete(
|
||||
onyx_document.id, onyx_document.chunk_count
|
||||
)
|
||||
# If we see that chunks were deleted we assume the doc already
|
||||
# existed.
|
||||
document_insertion_record = DocumentInsertionRecord(
|
||||
document_id=onyx_document.id,
|
||||
already_existed=num_chunks_deleted > 0,
|
||||
)
|
||||
# Now index. This will raise if a chunk of the same ID exists, which
|
||||
# we do not expect because we should have deleted all chunks.
|
||||
self._client.bulk_index_documents(
|
||||
documents=chunk_batch,
|
||||
tenant_state=self._tenant_state,
|
||||
)
|
||||
|
||||
for chunk in chunks:
|
||||
doc_id = chunk.source_document.id
|
||||
if doc_id != current_doc_id:
|
||||
if current_chunks:
|
||||
_flush_chunks(current_chunks)
|
||||
current_doc_id = doc_id
|
||||
current_chunks = [chunk]
|
||||
elif len(current_chunks) >= MAX_CHUNKS_PER_DOC_BATCH:
|
||||
_flush_chunks(current_chunks)
|
||||
current_chunks = [chunk]
|
||||
else:
|
||||
current_chunks.append(chunk)
|
||||
|
||||
if current_chunks:
|
||||
_flush_chunks(current_chunks)
|
||||
document_indexing_results.append(document_insertion_record)
|
||||
|
||||
return document_indexing_results
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@ import re
|
||||
import time
|
||||
import urllib
|
||||
import zipfile
|
||||
from collections.abc import Iterable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
@@ -462,7 +461,7 @@ class VespaIndex(DocumentIndex):
|
||||
|
||||
def index(
|
||||
self,
|
||||
chunks: Iterable[DocMetadataAwareIndexChunk],
|
||||
chunks: list[DocMetadataAwareIndexChunk],
|
||||
index_batch_params: IndexBatchParams,
|
||||
) -> set[OldDocumentInsertionRecord]:
|
||||
"""
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import concurrent.futures
|
||||
import logging
|
||||
import random
|
||||
from collections.abc import Generator
|
||||
from collections.abc import Iterable
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
@@ -10,7 +8,6 @@ import httpx
|
||||
from pydantic import BaseModel
|
||||
from retry import retry
|
||||
|
||||
from onyx.configs.app_configs import MAX_CHUNKS_PER_DOC_BATCH
|
||||
from onyx.configs.app_configs import RECENCY_BIAS_MULTIPLIER
|
||||
from onyx.configs.app_configs import RERANK_COUNT
|
||||
from onyx.configs.chat_configs import DOC_TIME_DECAY
|
||||
@@ -321,7 +318,7 @@ class VespaDocumentIndex(DocumentIndex):
|
||||
|
||||
def index(
|
||||
self,
|
||||
chunks: Iterable[DocMetadataAwareIndexChunk],
|
||||
chunks: list[DocMetadataAwareIndexChunk],
|
||||
indexing_metadata: IndexingMetadata,
|
||||
) -> list[DocumentInsertionRecord]:
|
||||
doc_id_to_chunk_cnt_diff = indexing_metadata.doc_id_to_chunk_cnt_diff
|
||||
@@ -341,31 +338,22 @@ class VespaDocumentIndex(DocumentIndex):
|
||||
|
||||
# Vespa has restrictions on valid characters, yet document IDs come from
|
||||
# external w.r.t. this class. We need to sanitize them.
|
||||
#
|
||||
# Instead of materializing all cleaned chunks upfront, we stream them
|
||||
# through a generator that cleans IDs and builds the original-ID mapping
|
||||
# incrementally as chunks flow into Vespa.
|
||||
def _clean_and_track(
|
||||
chunks_iter: Iterable[DocMetadataAwareIndexChunk],
|
||||
id_map: dict[str, str],
|
||||
seen_ids: set[str],
|
||||
) -> Generator[DocMetadataAwareIndexChunk, None, None]:
|
||||
"""Cleans chunk IDs and builds the original-ID mapping
|
||||
incrementally as chunks flow through, avoiding a separate
|
||||
materialization pass."""
|
||||
for chunk in chunks_iter:
|
||||
original_id = chunk.source_document.id
|
||||
cleaned = clean_chunk_id_copy(chunk)
|
||||
cleaned_id = cleaned.source_document.id
|
||||
# Needed so the final DocumentInsertionRecord returned can have
|
||||
# the original document ID. cleaned_chunks might not contain IDs
|
||||
# exactly as callers supplied them.
|
||||
id_map[cleaned_id] = original_id
|
||||
seen_ids.add(cleaned_id)
|
||||
yield cleaned
|
||||
cleaned_chunks: list[DocMetadataAwareIndexChunk] = [
|
||||
clean_chunk_id_copy(chunk) for chunk in chunks
|
||||
]
|
||||
assert len(cleaned_chunks) == len(
|
||||
chunks
|
||||
), "Bug: Cleaned chunks and input chunks have different lengths."
|
||||
|
||||
new_document_id_to_original_document_id: dict[str, str] = {}
|
||||
all_cleaned_doc_ids: set[str] = set()
|
||||
# Needed so the final DocumentInsertionRecord returned can have the
|
||||
# original document ID. cleaned_chunks might not contain IDs exactly as
|
||||
# callers supplied them.
|
||||
new_document_id_to_original_document_id: dict[str, str] = dict()
|
||||
for i, cleaned_chunk in enumerate(cleaned_chunks):
|
||||
old_chunk = chunks[i]
|
||||
new_document_id_to_original_document_id[
|
||||
cleaned_chunk.source_document.id
|
||||
] = old_chunk.source_document.id
|
||||
|
||||
existing_docs: set[str] = set()
|
||||
|
||||
@@ -421,16 +409,8 @@ class VespaDocumentIndex(DocumentIndex):
|
||||
executor=executor,
|
||||
)
|
||||
|
||||
# Insert new Vespa documents, streaming through the cleaning
|
||||
# pipeline so chunks are never fully materialized.
|
||||
cleaned_chunks = _clean_and_track(
|
||||
chunks,
|
||||
new_document_id_to_original_document_id,
|
||||
all_cleaned_doc_ids,
|
||||
)
|
||||
for chunk_batch in batch_generator(
|
||||
cleaned_chunks, min(BATCH_SIZE, MAX_CHUNKS_PER_DOC_BATCH)
|
||||
):
|
||||
# Insert new Vespa documents.
|
||||
for chunk_batch in batch_generator(cleaned_chunks, BATCH_SIZE):
|
||||
batch_index_vespa_chunks(
|
||||
chunks=chunk_batch,
|
||||
index_name=self._index_name,
|
||||
@@ -439,6 +419,10 @@ class VespaDocumentIndex(DocumentIndex):
|
||||
executor=executor,
|
||||
)
|
||||
|
||||
all_cleaned_doc_ids: set[str] = {
|
||||
chunk.source_document.id for chunk in cleaned_chunks
|
||||
}
|
||||
|
||||
return [
|
||||
DocumentInsertionRecord(
|
||||
document_id=new_document_id_to_original_document_id[cleaned_doc_id],
|
||||
|
||||
@@ -44,6 +44,7 @@ class OnyxErrorCode(Enum):
|
||||
VALIDATION_ERROR = ("VALIDATION_ERROR", 400)
|
||||
INVALID_INPUT = ("INVALID_INPUT", 400)
|
||||
MISSING_REQUIRED_FIELD = ("MISSING_REQUIRED_FIELD", 400)
|
||||
QUERY_REJECTED = ("QUERY_REJECTED", 400)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Not Found (404)
|
||||
|
||||
@@ -5,6 +5,7 @@ Usage (Celery tasks and FastAPI handlers):
|
||||
db_session=db_session,
|
||||
hook_point=HookPoint.QUERY_PROCESSING,
|
||||
payload={"query": "...", "user_email": "...", "chat_session_id": "..."},
|
||||
response_type=QueryProcessingResponse,
|
||||
)
|
||||
|
||||
if isinstance(result, HookSkipped):
|
||||
@@ -14,7 +15,7 @@ Usage (Celery tasks and FastAPI handlers):
|
||||
# hook failed but fail strategy is SOFT — continue with original behavior
|
||||
...
|
||||
else:
|
||||
# result is the response payload dict from the customer's endpoint
|
||||
# result is a validated Pydantic model instance (spec.response_model)
|
||||
...
|
||||
|
||||
is_reachable update policy
|
||||
@@ -53,9 +54,11 @@ The executor uses three sessions:
|
||||
import json
|
||||
import time
|
||||
from typing import Any
|
||||
from typing import TypeVar
|
||||
|
||||
import httpx
|
||||
from pydantic import BaseModel
|
||||
from pydantic import ValidationError
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from onyx.db.engine.sql_engine import get_session_with_current_tenant
|
||||
@@ -81,6 +84,9 @@ class HookSoftFailed:
|
||||
"""Hook was called but failed with SOFT fail strategy — continuing."""
|
||||
|
||||
|
||||
T = TypeVar("T", bound=BaseModel)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Private helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
@@ -268,22 +274,21 @@ def _persist_result(
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def execute_hook(
|
||||
*,
|
||||
db_session: Session,
|
||||
hook_point: HookPoint,
|
||||
def _execute_hook_inner(
|
||||
hook: Hook,
|
||||
payload: dict[str, Any],
|
||||
) -> dict[str, Any] | HookSkipped | HookSoftFailed:
|
||||
"""Execute the hook for the given hook point synchronously."""
|
||||
hook = _lookup_hook(db_session, hook_point)
|
||||
if isinstance(hook, HookSkipped):
|
||||
return hook
|
||||
response_type: type[T],
|
||||
) -> T | HookSoftFailed:
|
||||
"""Make the HTTP call, validate the response, and return a typed model.
|
||||
|
||||
Raises OnyxError on HARD failure. Returns HookSoftFailed on SOFT failure.
|
||||
"""
|
||||
timeout = hook.timeout_seconds
|
||||
hook_id = hook.id
|
||||
fail_strategy = hook.fail_strategy
|
||||
endpoint_url = hook.endpoint_url
|
||||
current_is_reachable: bool | None = hook.is_reachable
|
||||
|
||||
if not endpoint_url:
|
||||
raise ValueError(
|
||||
f"hook_id={hook_id} is active but has no endpoint_url — "
|
||||
@@ -300,13 +305,36 @@ def execute_hook(
|
||||
headers: dict[str, str] = {"Content-Type": "application/json"}
|
||||
if api_key:
|
||||
headers["Authorization"] = f"Bearer {api_key}"
|
||||
with httpx.Client(timeout=timeout) as client:
|
||||
with httpx.Client(
|
||||
timeout=timeout, follow_redirects=False
|
||||
) as client: # SSRF guard: never follow redirects
|
||||
response = client.post(endpoint_url, json=payload, headers=headers)
|
||||
except Exception as e:
|
||||
exc = e
|
||||
duration_ms = int((time.monotonic() - start) * 1000)
|
||||
|
||||
outcome = _process_response(response=response, exc=exc, timeout=timeout)
|
||||
|
||||
# Validate the response payload against response_type.
|
||||
# A validation failure downgrades the outcome to a failure so it is logged,
|
||||
# is_reachable is left unchanged (server responded — just a bad payload),
|
||||
# and fail_strategy is respected below.
|
||||
validated_model: T | None = None
|
||||
if outcome.is_success and outcome.response_payload is not None:
|
||||
try:
|
||||
validated_model = response_type.model_validate(outcome.response_payload)
|
||||
except ValidationError as e:
|
||||
msg = (
|
||||
f"Hook response failed validation against {response_type.__name__}: {e}"
|
||||
)
|
||||
outcome = _HttpOutcome(
|
||||
is_success=False,
|
||||
updated_is_reachable=None, # server responded — reachability unchanged
|
||||
status_code=outcome.status_code,
|
||||
error_message=msg,
|
||||
response_payload=None,
|
||||
)
|
||||
|
||||
# Skip the is_reachable write when the value would not change — avoids a
|
||||
# no-op DB round-trip on every call when the hook is already in the expected state.
|
||||
if outcome.updated_is_reachable == current_is_reachable:
|
||||
@@ -323,8 +351,41 @@ def execute_hook(
|
||||
f"Hook execution failed (soft fail) for hook_id={hook_id}: {outcome.error_message}"
|
||||
)
|
||||
return HookSoftFailed()
|
||||
if outcome.response_payload is None:
|
||||
raise ValueError(
|
||||
f"response_payload is None for successful hook call (hook_id={hook_id})"
|
||||
|
||||
if validated_model is None:
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.INTERNAL_ERROR,
|
||||
f"validated_model is None for successful hook call (hook_id={hook_id})",
|
||||
)
|
||||
return outcome.response_payload
|
||||
return validated_model
|
||||
|
||||
|
||||
def execute_hook(
|
||||
*,
|
||||
db_session: Session,
|
||||
hook_point: HookPoint,
|
||||
payload: dict[str, Any],
|
||||
response_type: type[T],
|
||||
) -> T | HookSkipped | HookSoftFailed:
|
||||
"""Execute the hook for the given hook point synchronously.
|
||||
|
||||
Returns HookSkipped if no active hook is configured, HookSoftFailed if the
|
||||
hook failed with SOFT fail strategy, or a validated response model on success.
|
||||
Raises OnyxError on HARD failure or if the hook is misconfigured.
|
||||
"""
|
||||
hook = _lookup_hook(db_session, hook_point)
|
||||
if isinstance(hook, HookSkipped):
|
||||
return hook
|
||||
|
||||
fail_strategy = hook.fail_strategy
|
||||
hook_id = hook.id
|
||||
|
||||
try:
|
||||
return _execute_hook_inner(hook, payload, response_type)
|
||||
except Exception:
|
||||
if fail_strategy == HookFailStrategy.SOFT:
|
||||
logger.exception(
|
||||
f"Unexpected error in hook execution (soft fail) for hook_id={hook_id}"
|
||||
)
|
||||
return HookSoftFailed()
|
||||
raise
|
||||
|
||||
@@ -51,13 +51,12 @@ class HookPointSpec:
|
||||
output_schema: ClassVar[dict[str, Any]]
|
||||
|
||||
def __init_subclass__(cls, **kwargs: object) -> None:
|
||||
"""Enforce that every concrete subclass declares all required class attributes.
|
||||
"""Enforce that every subclass declares all required class attributes.
|
||||
|
||||
Called automatically by Python whenever a class inherits from HookPointSpec.
|
||||
Abstract subclasses (those still carrying unimplemented abstract methods) are
|
||||
skipped — they are intermediate base classes and may not yet define everything.
|
||||
Only fully concrete subclasses are validated, ensuring a clear TypeError at
|
||||
import time rather than a confusing AttributeError at runtime.
|
||||
Raises TypeError at import time if any required attribute is missing or if
|
||||
payload_model / response_model are not Pydantic BaseModel subclasses.
|
||||
input_schema and output_schema are derived automatically from the models.
|
||||
"""
|
||||
super().__init_subclass__(**kwargs)
|
||||
missing = [attr for attr in _REQUIRED_ATTRS if not hasattr(cls, attr)]
|
||||
|
||||
@@ -15,7 +15,7 @@ class QueryProcessingPayload(BaseModel):
|
||||
description="Email of the user submitting the query, or null if unauthenticated."
|
||||
)
|
||||
chat_session_id: str = Field(
|
||||
description="UUID of the chat session. Always present — the session is guaranteed to exist by the time this hook fires."
|
||||
description="UUID of the chat session, formatted as a hyphenated lowercase string (e.g. '550e8400-e29b-41d4-a716-446655440000'). Always present — the session is guaranteed to exist by the time this hook fires."
|
||||
)
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ class QueryProcessingResponse(BaseModel):
|
||||
default=None,
|
||||
description=(
|
||||
"The query to use in the pipeline. "
|
||||
"Null, empty string, or absent = reject the query."
|
||||
"Null, empty string, whitespace-only, or absent = reject the query."
|
||||
),
|
||||
)
|
||||
rejection_message: str | None = Field(
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
from collections.abc import Generator
|
||||
|
||||
@@ -21,8 +19,7 @@ from onyx.db.document import update_docs_updated_at__no_commit
|
||||
from onyx.db.document_set import fetch_document_sets_for_documents
|
||||
from onyx.indexing.indexing_pipeline import DocumentBatchPrepareContext
|
||||
from onyx.indexing.indexing_pipeline import index_doc_batch_prepare
|
||||
from onyx.indexing.models import ChunkEnrichmentContext
|
||||
from onyx.indexing.models import DocAwareChunk
|
||||
from onyx.indexing.models import BuildMetadataAwareChunksResult
|
||||
from onyx.indexing.models import DocMetadataAwareIndexChunk
|
||||
from onyx.indexing.models import IndexChunk
|
||||
from onyx.indexing.models import UpdatableChunkData
|
||||
@@ -88,21 +85,14 @@ class DocumentIndexingBatchAdapter:
|
||||
) as transaction:
|
||||
yield transaction
|
||||
|
||||
def prepare_enrichment(
|
||||
def build_metadata_aware_chunks(
|
||||
self,
|
||||
context: DocumentBatchPrepareContext,
|
||||
chunks_with_embeddings: list[IndexChunk],
|
||||
chunk_content_scores: list[float],
|
||||
tenant_id: str,
|
||||
chunks: list[DocAwareChunk],
|
||||
) -> DocumentChunkEnricher:
|
||||
"""Do all DB lookups once and return a per-chunk enricher."""
|
||||
updatable_ids = [doc.id for doc in context.updatable_docs]
|
||||
|
||||
doc_id_to_new_chunk_cnt: dict[str, int] = {
|
||||
doc_id: 0 for doc_id in updatable_ids
|
||||
}
|
||||
for chunk in chunks:
|
||||
if chunk.source_document.id in doc_id_to_new_chunk_cnt:
|
||||
doc_id_to_new_chunk_cnt[chunk.source_document.id] += 1
|
||||
context: DocumentBatchPrepareContext,
|
||||
) -> BuildMetadataAwareChunksResult:
|
||||
"""Enrich chunks with access, document sets, boosts, token counts, and hierarchy."""
|
||||
|
||||
no_access = DocumentAccess.build(
|
||||
user_emails=[],
|
||||
@@ -112,30 +102,67 @@ class DocumentIndexingBatchAdapter:
|
||||
is_public=False,
|
||||
)
|
||||
|
||||
return DocumentChunkEnricher(
|
||||
doc_id_to_access_info=get_access_for_documents(
|
||||
updatable_ids = [doc.id for doc in context.updatable_docs]
|
||||
|
||||
doc_id_to_access_info = get_access_for_documents(
|
||||
document_ids=updatable_ids, db_session=self.db_session
|
||||
)
|
||||
doc_id_to_document_set = {
|
||||
document_id: document_sets
|
||||
for document_id, document_sets in fetch_document_sets_for_documents(
|
||||
document_ids=updatable_ids, db_session=self.db_session
|
||||
),
|
||||
doc_id_to_document_set={
|
||||
document_id: document_sets
|
||||
for document_id, document_sets in fetch_document_sets_for_documents(
|
||||
document_ids=updatable_ids, db_session=self.db_session
|
||||
)
|
||||
},
|
||||
doc_id_to_ancestor_ids=self._get_ancestor_ids_for_documents(
|
||||
context.updatable_docs, tenant_id
|
||||
),
|
||||
id_to_boost_map=context.id_to_boost_map,
|
||||
doc_id_to_previous_chunk_cnt={
|
||||
document_id: chunk_count
|
||||
for document_id, chunk_count in fetch_chunk_counts_for_documents(
|
||||
document_ids=updatable_ids,
|
||||
db_session=self.db_session,
|
||||
)
|
||||
},
|
||||
doc_id_to_new_chunk_cnt=dict(doc_id_to_new_chunk_cnt),
|
||||
no_access=no_access,
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
}
|
||||
|
||||
doc_id_to_previous_chunk_cnt: dict[str, int] = {
|
||||
document_id: chunk_count
|
||||
for document_id, chunk_count in fetch_chunk_counts_for_documents(
|
||||
document_ids=updatable_ids,
|
||||
db_session=self.db_session,
|
||||
)
|
||||
}
|
||||
|
||||
doc_id_to_new_chunk_cnt: dict[str, int] = {
|
||||
doc_id: 0 for doc_id in updatable_ids
|
||||
}
|
||||
for chunk in chunks_with_embeddings:
|
||||
if chunk.source_document.id in doc_id_to_new_chunk_cnt:
|
||||
doc_id_to_new_chunk_cnt[chunk.source_document.id] += 1
|
||||
|
||||
# Get ancestor hierarchy node IDs for each document
|
||||
doc_id_to_ancestor_ids = self._get_ancestor_ids_for_documents(
|
||||
context.updatable_docs, tenant_id
|
||||
)
|
||||
|
||||
access_aware_chunks = [
|
||||
DocMetadataAwareIndexChunk.from_index_chunk(
|
||||
index_chunk=chunk,
|
||||
access=doc_id_to_access_info.get(chunk.source_document.id, no_access),
|
||||
document_sets=set(
|
||||
doc_id_to_document_set.get(chunk.source_document.id, [])
|
||||
),
|
||||
user_project=[],
|
||||
personas=[],
|
||||
boost=(
|
||||
context.id_to_boost_map[chunk.source_document.id]
|
||||
if chunk.source_document.id in context.id_to_boost_map
|
||||
else DEFAULT_BOOST
|
||||
),
|
||||
tenant_id=tenant_id,
|
||||
aggregated_chunk_boost_factor=chunk_content_scores[chunk_num],
|
||||
ancestor_hierarchy_node_ids=doc_id_to_ancestor_ids[
|
||||
chunk.source_document.id
|
||||
],
|
||||
)
|
||||
for chunk_num, chunk in enumerate(chunks_with_embeddings)
|
||||
]
|
||||
|
||||
return BuildMetadataAwareChunksResult(
|
||||
chunks=access_aware_chunks,
|
||||
doc_id_to_previous_chunk_cnt=doc_id_to_previous_chunk_cnt,
|
||||
doc_id_to_new_chunk_cnt=doc_id_to_new_chunk_cnt,
|
||||
user_file_id_to_raw_text={},
|
||||
user_file_id_to_token_count={},
|
||||
)
|
||||
|
||||
def _get_ancestor_ids_for_documents(
|
||||
@@ -176,7 +203,7 @@ class DocumentIndexingBatchAdapter:
|
||||
context: DocumentBatchPrepareContext,
|
||||
updatable_chunk_data: list[UpdatableChunkData],
|
||||
filtered_documents: list[Document],
|
||||
enrichment: ChunkEnrichmentContext,
|
||||
result: BuildMetadataAwareChunksResult,
|
||||
) -> None:
|
||||
"""Finalize DB updates, store plaintext, and mark docs as indexed."""
|
||||
updatable_ids = [doc.id for doc in context.updatable_docs]
|
||||
@@ -200,7 +227,7 @@ class DocumentIndexingBatchAdapter:
|
||||
|
||||
update_docs_chunk_count__no_commit(
|
||||
document_ids=updatable_ids,
|
||||
doc_id_to_chunk_count=enrichment.doc_id_to_new_chunk_cnt,
|
||||
doc_id_to_chunk_count=result.doc_id_to_new_chunk_cnt,
|
||||
db_session=self.db_session,
|
||||
)
|
||||
|
||||
@@ -222,52 +249,3 @@ class DocumentIndexingBatchAdapter:
|
||||
)
|
||||
|
||||
self.db_session.commit()
|
||||
|
||||
|
||||
class DocumentChunkEnricher:
|
||||
"""Pre-computed metadata for per-chunk enrichment of connector documents."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
doc_id_to_access_info: dict[str, DocumentAccess],
|
||||
doc_id_to_document_set: dict[str, list[str]],
|
||||
doc_id_to_ancestor_ids: dict[str, list[int]],
|
||||
id_to_boost_map: dict[str, int],
|
||||
doc_id_to_previous_chunk_cnt: dict[str, int],
|
||||
doc_id_to_new_chunk_cnt: dict[str, int],
|
||||
no_access: DocumentAccess,
|
||||
tenant_id: str,
|
||||
) -> None:
|
||||
self._doc_id_to_access_info = doc_id_to_access_info
|
||||
self._doc_id_to_document_set = doc_id_to_document_set
|
||||
self._doc_id_to_ancestor_ids = doc_id_to_ancestor_ids
|
||||
self._id_to_boost_map = id_to_boost_map
|
||||
self._no_access = no_access
|
||||
self._tenant_id = tenant_id
|
||||
self.doc_id_to_previous_chunk_cnt = doc_id_to_previous_chunk_cnt
|
||||
self.doc_id_to_new_chunk_cnt = doc_id_to_new_chunk_cnt
|
||||
|
||||
def enrich_chunk(
|
||||
self, chunk: IndexChunk, score: float
|
||||
) -> DocMetadataAwareIndexChunk:
|
||||
return DocMetadataAwareIndexChunk.from_index_chunk(
|
||||
index_chunk=chunk,
|
||||
access=self._doc_id_to_access_info.get(
|
||||
chunk.source_document.id, self._no_access
|
||||
),
|
||||
document_sets=set(
|
||||
self._doc_id_to_document_set.get(chunk.source_document.id, [])
|
||||
),
|
||||
user_project=[],
|
||||
personas=[],
|
||||
boost=(
|
||||
self._id_to_boost_map[chunk.source_document.id]
|
||||
if chunk.source_document.id in self._id_to_boost_map
|
||||
else DEFAULT_BOOST
|
||||
),
|
||||
tenant_id=self._tenant_id,
|
||||
aggregated_chunk_boost_factor=score,
|
||||
ancestor_hierarchy_node_ids=self._doc_id_to_ancestor_ids[
|
||||
chunk.source_document.id
|
||||
],
|
||||
)
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import datetime
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from collections.abc import Generator
|
||||
from uuid import UUID
|
||||
|
||||
@@ -27,8 +24,7 @@ from onyx.db.user_file import fetch_persona_ids_for_user_files
|
||||
from onyx.db.user_file import fetch_user_project_ids_for_user_files
|
||||
from onyx.file_store.utils import store_user_file_plaintext
|
||||
from onyx.indexing.indexing_pipeline import DocumentBatchPrepareContext
|
||||
from onyx.indexing.models import ChunkEnrichmentContext
|
||||
from onyx.indexing.models import DocAwareChunk
|
||||
from onyx.indexing.models import BuildMetadataAwareChunksResult
|
||||
from onyx.indexing.models import DocMetadataAwareIndexChunk
|
||||
from onyx.indexing.models import IndexChunk
|
||||
from onyx.indexing.models import UpdatableChunkData
|
||||
@@ -105,20 +101,13 @@ class UserFileIndexingAdapter:
|
||||
f"Failed to acquire locks after {_NUM_LOCK_ATTEMPTS} attempts for user files: {[doc.id for doc in documents]}"
|
||||
)
|
||||
|
||||
def prepare_enrichment(
|
||||
def build_metadata_aware_chunks(
|
||||
self,
|
||||
context: DocumentBatchPrepareContext,
|
||||
chunks_with_embeddings: list[IndexChunk],
|
||||
chunk_content_scores: list[float],
|
||||
tenant_id: str,
|
||||
chunks: list[DocAwareChunk],
|
||||
) -> UserFileChunkEnricher:
|
||||
"""Do all DB lookups and pre-compute file metadata from chunks."""
|
||||
updatable_ids = [doc.id for doc in context.updatable_docs]
|
||||
|
||||
doc_id_to_new_chunk_cnt: dict[str, int] = defaultdict(int)
|
||||
content_by_file: dict[str, list[str]] = defaultdict(list)
|
||||
for chunk in chunks:
|
||||
doc_id_to_new_chunk_cnt[chunk.source_document.id] += 1
|
||||
content_by_file[chunk.source_document.id].append(chunk.content)
|
||||
context: DocumentBatchPrepareContext,
|
||||
) -> BuildMetadataAwareChunksResult:
|
||||
|
||||
no_access = DocumentAccess.build(
|
||||
user_emails=[],
|
||||
@@ -128,6 +117,7 @@ class UserFileIndexingAdapter:
|
||||
is_public=False,
|
||||
)
|
||||
|
||||
updatable_ids = [doc.id for doc in context.updatable_docs]
|
||||
user_file_id_to_project_ids = fetch_user_project_ids_for_user_files(
|
||||
user_file_ids=updatable_ids,
|
||||
db_session=self.db_session,
|
||||
@@ -148,6 +138,17 @@ class UserFileIndexingAdapter:
|
||||
)
|
||||
}
|
||||
|
||||
user_file_id_to_new_chunk_cnt: dict[str, int] = {
|
||||
user_file_id: len(
|
||||
[
|
||||
chunk
|
||||
for chunk in chunks_with_embeddings
|
||||
if chunk.source_document.id == user_file_id
|
||||
]
|
||||
)
|
||||
for user_file_id in updatable_ids
|
||||
}
|
||||
|
||||
# Initialize tokenizer used for token count calculation
|
||||
try:
|
||||
llm = get_default_llm()
|
||||
@@ -162,9 +163,15 @@ class UserFileIndexingAdapter:
|
||||
user_file_id_to_raw_text: dict[str, str] = {}
|
||||
user_file_id_to_token_count: dict[str, int | None] = {}
|
||||
for user_file_id in updatable_ids:
|
||||
contents = content_by_file.get(user_file_id)
|
||||
if contents:
|
||||
combined_content = " ".join(contents)
|
||||
user_file_chunks = [
|
||||
chunk
|
||||
for chunk in chunks_with_embeddings
|
||||
if chunk.source_document.id == user_file_id
|
||||
]
|
||||
if user_file_chunks:
|
||||
combined_content = " ".join(
|
||||
[chunk.content for chunk in user_file_chunks]
|
||||
)
|
||||
user_file_id_to_raw_text[str(user_file_id)] = combined_content
|
||||
token_count = (
|
||||
len(llm_tokenizer.encode(combined_content)) if llm_tokenizer else 0
|
||||
@@ -174,16 +181,28 @@ class UserFileIndexingAdapter:
|
||||
user_file_id_to_raw_text[str(user_file_id)] = ""
|
||||
user_file_id_to_token_count[str(user_file_id)] = None
|
||||
|
||||
return UserFileChunkEnricher(
|
||||
user_file_id_to_access=user_file_id_to_access,
|
||||
user_file_id_to_project_ids=user_file_id_to_project_ids,
|
||||
user_file_id_to_persona_ids=user_file_id_to_persona_ids,
|
||||
access_aware_chunks = [
|
||||
DocMetadataAwareIndexChunk.from_index_chunk(
|
||||
index_chunk=chunk,
|
||||
access=user_file_id_to_access.get(chunk.source_document.id, no_access),
|
||||
document_sets=set(),
|
||||
user_project=user_file_id_to_project_ids.get(
|
||||
chunk.source_document.id, []
|
||||
),
|
||||
personas=user_file_id_to_persona_ids.get(chunk.source_document.id, []),
|
||||
boost=DEFAULT_BOOST,
|
||||
tenant_id=tenant_id,
|
||||
aggregated_chunk_boost_factor=chunk_content_scores[chunk_num],
|
||||
)
|
||||
for chunk_num, chunk in enumerate(chunks_with_embeddings)
|
||||
]
|
||||
|
||||
return BuildMetadataAwareChunksResult(
|
||||
chunks=access_aware_chunks,
|
||||
doc_id_to_previous_chunk_cnt=user_file_id_to_previous_chunk_cnt,
|
||||
doc_id_to_new_chunk_cnt=dict(doc_id_to_new_chunk_cnt),
|
||||
doc_id_to_new_chunk_cnt=user_file_id_to_new_chunk_cnt,
|
||||
user_file_id_to_raw_text=user_file_id_to_raw_text,
|
||||
user_file_id_to_token_count=user_file_id_to_token_count,
|
||||
no_access=no_access,
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
|
||||
def _notify_assistant_owners_if_files_ready(
|
||||
@@ -227,9 +246,8 @@ class UserFileIndexingAdapter:
|
||||
context: DocumentBatchPrepareContext,
|
||||
updatable_chunk_data: list[UpdatableChunkData], # noqa: ARG002
|
||||
filtered_documents: list[Document], # noqa: ARG002
|
||||
enrichment: ChunkEnrichmentContext,
|
||||
result: BuildMetadataAwareChunksResult,
|
||||
) -> None:
|
||||
assert isinstance(enrichment, UserFileChunkEnricher)
|
||||
user_file_ids = [doc.id for doc in context.updatable_docs]
|
||||
|
||||
user_files = (
|
||||
@@ -245,10 +263,8 @@ class UserFileIndexingAdapter:
|
||||
user_file.last_project_sync_at = datetime.datetime.now(
|
||||
datetime.timezone.utc
|
||||
)
|
||||
user_file.chunk_count = enrichment.doc_id_to_new_chunk_cnt.get(
|
||||
str(user_file.id), 0
|
||||
)
|
||||
user_file.token_count = enrichment.user_file_id_to_token_count[
|
||||
user_file.chunk_count = result.doc_id_to_new_chunk_cnt[str(user_file.id)]
|
||||
user_file.token_count = result.user_file_id_to_token_count[
|
||||
str(user_file.id)
|
||||
]
|
||||
|
||||
@@ -260,54 +276,8 @@ class UserFileIndexingAdapter:
|
||||
# Store the plaintext in the file store for faster retrieval
|
||||
# NOTE: this creates its own session to avoid committing the overall
|
||||
# transaction.
|
||||
for user_file_id, raw_text in enrichment.user_file_id_to_raw_text.items():
|
||||
for user_file_id, raw_text in result.user_file_id_to_raw_text.items():
|
||||
store_user_file_plaintext(
|
||||
user_file_id=UUID(user_file_id),
|
||||
plaintext_content=raw_text,
|
||||
)
|
||||
|
||||
|
||||
class UserFileChunkEnricher:
|
||||
"""Pre-computed metadata for per-chunk enrichment of user-uploaded files."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
user_file_id_to_access: dict[str, DocumentAccess],
|
||||
user_file_id_to_project_ids: dict[str, list[int]],
|
||||
user_file_id_to_persona_ids: dict[str, list[int]],
|
||||
doc_id_to_previous_chunk_cnt: dict[str, int],
|
||||
doc_id_to_new_chunk_cnt: dict[str, int],
|
||||
user_file_id_to_raw_text: dict[str, str],
|
||||
user_file_id_to_token_count: dict[str, int | None],
|
||||
no_access: DocumentAccess,
|
||||
tenant_id: str,
|
||||
) -> None:
|
||||
self._user_file_id_to_access = user_file_id_to_access
|
||||
self._user_file_id_to_project_ids = user_file_id_to_project_ids
|
||||
self._user_file_id_to_persona_ids = user_file_id_to_persona_ids
|
||||
self._no_access = no_access
|
||||
self._tenant_id = tenant_id
|
||||
self.doc_id_to_previous_chunk_cnt = doc_id_to_previous_chunk_cnt
|
||||
self.doc_id_to_new_chunk_cnt = doc_id_to_new_chunk_cnt
|
||||
self.user_file_id_to_raw_text = user_file_id_to_raw_text
|
||||
self.user_file_id_to_token_count = user_file_id_to_token_count
|
||||
|
||||
def enrich_chunk(
|
||||
self, chunk: IndexChunk, score: float
|
||||
) -> DocMetadataAwareIndexChunk:
|
||||
return DocMetadataAwareIndexChunk.from_index_chunk(
|
||||
index_chunk=chunk,
|
||||
access=self._user_file_id_to_access.get(
|
||||
chunk.source_document.id, self._no_access
|
||||
),
|
||||
document_sets=set(),
|
||||
user_project=self._user_file_id_to_project_ids.get(
|
||||
chunk.source_document.id, []
|
||||
),
|
||||
personas=self._user_file_id_to_persona_ids.get(
|
||||
chunk.source_document.id, []
|
||||
),
|
||||
boost=DEFAULT_BOOST,
|
||||
tenant_id=self._tenant_id,
|
||||
aggregated_chunk_boost_factor=score,
|
||||
)
|
||||
|
||||
@@ -1,11 +1,5 @@
|
||||
import pickle
|
||||
import tempfile
|
||||
from collections import defaultdict
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Generator
|
||||
from collections.abc import Iterator
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
from typing import Protocol
|
||||
|
||||
from pydantic import BaseModel
|
||||
@@ -15,7 +9,6 @@ from sqlalchemy.orm import Session
|
||||
from onyx.configs.app_configs import DEFAULT_CONTEXTUAL_RAG_LLM_NAME
|
||||
from onyx.configs.app_configs import DEFAULT_CONTEXTUAL_RAG_LLM_PROVIDER
|
||||
from onyx.configs.app_configs import ENABLE_CONTEXTUAL_RAG
|
||||
from onyx.configs.app_configs import MAX_CHUNKS_PER_DOC_BATCH
|
||||
from onyx.configs.app_configs import MAX_DOCUMENT_CHARS
|
||||
from onyx.configs.app_configs import MAX_TOKENS_FOR_FULL_INCLUSION
|
||||
from onyx.configs.app_configs import USE_CHUNK_SUMMARY
|
||||
@@ -54,8 +47,6 @@ from onyx.indexing.chunker import Chunker
|
||||
from onyx.indexing.embedder import embed_chunks_with_failure_handling
|
||||
from onyx.indexing.embedder import IndexingEmbedder
|
||||
from onyx.indexing.models import DocAwareChunk
|
||||
from onyx.indexing.models import DocMetadataAwareIndexChunk
|
||||
from onyx.indexing.models import IndexChunk
|
||||
from onyx.indexing.models import IndexingBatchAdapter
|
||||
from onyx.indexing.models import UpdatableChunkData
|
||||
from onyx.indexing.vector_db_insertion import write_chunks_to_vector_db_with_backoff
|
||||
@@ -72,7 +63,6 @@ from onyx.natural_language_processing.utils import tokenizer_trim_middle
|
||||
from onyx.prompts.contextual_retrieval import CONTEXTUAL_RAG_PROMPT1
|
||||
from onyx.prompts.contextual_retrieval import CONTEXTUAL_RAG_PROMPT2
|
||||
from onyx.prompts.contextual_retrieval import DOCUMENT_SUMMARY_PROMPT
|
||||
from onyx.utils.batching import batch_generator
|
||||
from onyx.utils.logger import setup_logger
|
||||
from onyx.utils.postgres_sanitization import sanitize_documents_for_postgres
|
||||
from onyx.utils.threadpool_concurrency import run_functions_tuples_in_parallel
|
||||
@@ -101,21 +91,6 @@ class IndexingPipelineResult(BaseModel):
|
||||
|
||||
failures: list[ConnectorFailure]
|
||||
|
||||
@classmethod
|
||||
def empty(cls, total_docs: int) -> "IndexingPipelineResult":
|
||||
return cls(
|
||||
new_docs=0,
|
||||
total_docs=total_docs,
|
||||
total_chunks=0,
|
||||
failures=[],
|
||||
)
|
||||
|
||||
|
||||
class ChunkEmbeddingResult(BaseModel):
|
||||
embedding_path: Path
|
||||
successful_chunk_ids: list[tuple[int, str]] # (chunk_id, document_id)
|
||||
connector_failures: list[ConnectorFailure]
|
||||
|
||||
|
||||
class IndexingPipelineProtocol(Protocol):
|
||||
def __call__(
|
||||
@@ -164,105 +139,6 @@ def _upsert_documents_in_db(
|
||||
)
|
||||
|
||||
|
||||
def embed_chunks_in_batches(
|
||||
chunks: list[DocAwareChunk],
|
||||
embedder: IndexingEmbedder,
|
||||
tenant_id: str,
|
||||
request_id: str | None,
|
||||
) -> ChunkEmbeddingResult:
|
||||
"""Embeds chunks in batches of MAX_CHUNKS_PER_DOC_BATCH, spilling each batch to disk.
|
||||
|
||||
For each batch:
|
||||
1. Embed the chunks via embed_chunks_with_failure_handling
|
||||
2. Pickle the resulting IndexChunks to a temp file
|
||||
3. Clear the batch from memory
|
||||
|
||||
Returns:
|
||||
- Path to the temp directory containing one pickle file per batch
|
||||
- Accumulated embedding failures across all batches
|
||||
"""
|
||||
tmpdir = Path(tempfile.mkdtemp(prefix="onyx_embeddings_"))
|
||||
successful_chunk_ids: list[tuple[int, str]] = []
|
||||
all_embedding_failures: list[ConnectorFailure] = []
|
||||
|
||||
for batch_idx, chunk_batch in enumerate(
|
||||
batch_generator(chunks, MAX_CHUNKS_PER_DOC_BATCH)
|
||||
):
|
||||
logger.debug(f"Embedding batch {batch_idx}: {len(chunk_batch)} chunks")
|
||||
|
||||
chunks_with_embeddings, embedding_failures = embed_chunks_with_failure_handling(
|
||||
chunks=chunk_batch,
|
||||
embedder=embedder,
|
||||
tenant_id=tenant_id,
|
||||
request_id=request_id,
|
||||
)
|
||||
all_embedding_failures.extend(embedding_failures)
|
||||
|
||||
# Track which chunks succeeded by excluding failed doc IDs
|
||||
failed_doc_ids = {
|
||||
f.failed_document.document_id
|
||||
for f in embedding_failures
|
||||
if f.failed_document
|
||||
}
|
||||
successful_chunk_ids.extend(
|
||||
(c.chunk_id, c.source_document.id)
|
||||
for c in chunk_batch
|
||||
if c.source_document.id not in failed_doc_ids
|
||||
)
|
||||
|
||||
# Spill embeddings to disk
|
||||
batch_file = tmpdir / f"batch_{batch_idx}.pkl"
|
||||
with open(batch_file, "wb") as f:
|
||||
pickle.dump(chunks_with_embeddings, f)
|
||||
|
||||
# Free memory
|
||||
del chunks_with_embeddings
|
||||
|
||||
return ChunkEmbeddingResult(
|
||||
embedding_path=tmpdir,
|
||||
successful_chunk_ids=successful_chunk_ids,
|
||||
connector_failures=all_embedding_failures,
|
||||
)
|
||||
|
||||
|
||||
class EmbedStream:
|
||||
def __init__(self, tmpdir: Path) -> None:
|
||||
self._tmpdir = tmpdir
|
||||
|
||||
def stream(self) -> Iterator[IndexChunk]:
|
||||
for batch_file in sorted(
|
||||
self._tmpdir.glob("batch_*.pkl"),
|
||||
key=lambda p: int(p.stem.removeprefix("batch_")),
|
||||
):
|
||||
with open(batch_file, "rb") as f:
|
||||
batch: list[IndexChunk] = pickle.load(f)
|
||||
yield from batch
|
||||
|
||||
|
||||
@contextmanager
|
||||
def use_embed_stream(
|
||||
tmpdir: Path,
|
||||
) -> Generator[EmbedStream, None, None]:
|
||||
"""Context manager that provides a factory for creating chunk iterators.
|
||||
|
||||
Each call to stream() returns a fresh generator over the embedded chunks
|
||||
on disk, so the data can be iterated multiple times (e.g. once per
|
||||
document_index). Files are cleaned up when the context manager exits.
|
||||
|
||||
Usage:
|
||||
with use_embed_stream(embedding_path) as embed_stream:
|
||||
for document_index in document_indices:
|
||||
for chunk in embed_stream.stream():
|
||||
...
|
||||
"""
|
||||
try:
|
||||
yield EmbedStream(tmpdir)
|
||||
finally:
|
||||
for batch_file in tmpdir.glob("batch_*.pkl"):
|
||||
batch_file.unlink(missing_ok=True)
|
||||
tmpdir.rmdir()
|
||||
|
||||
|
||||
def get_doc_ids_to_update(
|
||||
documents: list[Document], db_docs: list[DBDocument]
|
||||
) -> list[Document]:
|
||||
@@ -761,29 +637,6 @@ def add_contextual_summaries(
|
||||
return chunks
|
||||
|
||||
|
||||
def _verify_indexing_completeness(
|
||||
insertion_records: list[DocumentInsertionRecord],
|
||||
write_failures: list[ConnectorFailure],
|
||||
embedding_failed_doc_ids: set[str],
|
||||
updatable_ids: list[str],
|
||||
document_index_name: str,
|
||||
) -> None:
|
||||
"""Verify that every updatable document was either indexed or reported as failed."""
|
||||
all_returned_doc_ids = (
|
||||
{r.document_id for r in insertion_records}
|
||||
| {f.failed_document.document_id for f in write_failures if f.failed_document}
|
||||
| embedding_failed_doc_ids
|
||||
)
|
||||
if all_returned_doc_ids != set(updatable_ids):
|
||||
raise RuntimeError(
|
||||
f"Some documents were not successfully indexed. "
|
||||
f"Updatable IDs: {updatable_ids}, "
|
||||
f"Returned IDs: {all_returned_doc_ids}. "
|
||||
f"This should never happen. "
|
||||
f"This occured for document index {document_index_name}"
|
||||
)
|
||||
|
||||
|
||||
@log_function_time(debug_only=True)
|
||||
def index_doc_batch(
|
||||
*,
|
||||
@@ -819,7 +672,12 @@ def index_doc_batch(
|
||||
filtered_documents = filter_fnc(document_batch)
|
||||
context = adapter.prepare(filtered_documents, ignore_time_skip)
|
||||
if not context:
|
||||
return IndexingPipelineResult.empty(len(filtered_documents))
|
||||
return IndexingPipelineResult(
|
||||
new_docs=0,
|
||||
total_docs=len(filtered_documents),
|
||||
total_chunks=0,
|
||||
failures=[],
|
||||
)
|
||||
|
||||
# Convert documents to IndexingDocument objects with processed section
|
||||
# logger.debug("Processing image sections")
|
||||
@@ -858,98 +716,117 @@ def index_doc_batch(
|
||||
)
|
||||
|
||||
logger.debug("Starting embedding")
|
||||
embedding_result = embed_chunks_in_batches(
|
||||
chunks=chunks,
|
||||
embedder=embedder,
|
||||
tenant_id=tenant_id,
|
||||
request_id=request_id,
|
||||
chunks_with_embeddings, embedding_failures = (
|
||||
embed_chunks_with_failure_handling(
|
||||
chunks=chunks,
|
||||
embedder=embedder,
|
||||
tenant_id=tenant_id,
|
||||
request_id=request_id,
|
||||
)
|
||||
if chunks
|
||||
else ([], [])
|
||||
)
|
||||
|
||||
chunk_content_scores = [1.0] * len(chunks_with_embeddings)
|
||||
|
||||
updatable_ids = [doc.id for doc in context.updatable_docs]
|
||||
updatable_chunk_data = [
|
||||
UpdatableChunkData(
|
||||
chunk_id=chunk_id,
|
||||
document_id=document_id,
|
||||
boost_score=1.0,
|
||||
chunk_id=chunk.chunk_id,
|
||||
document_id=chunk.source_document.id,
|
||||
boost_score=score,
|
||||
)
|
||||
for chunk_id, document_id in embedding_result.successful_chunk_ids
|
||||
for chunk, score in zip(chunks_with_embeddings, chunk_content_scores)
|
||||
]
|
||||
|
||||
# Acquires a lock on the documents so that no other process can modify them
|
||||
# NOTE: don't need to acquire till here, since this is when the actual race condition
|
||||
# with Vespa can occur.
|
||||
with (
|
||||
adapter.lock_context(context.updatable_docs),
|
||||
use_embed_stream(embedding_result.embedding_path) as embed_stream,
|
||||
):
|
||||
enricher = adapter.prepare_enrichment(
|
||||
with adapter.lock_context(context.updatable_docs):
|
||||
# we're concerned about race conditions where multiple simultaneous indexings might result
|
||||
# in one set of metadata overwriting another one in vespa.
|
||||
# we still write data here for the immediate and most likely correct sync, but
|
||||
# to resolve this, an update of the last modified field at the end of this loop
|
||||
# always triggers a final metadata sync via the celery queue
|
||||
result = adapter.build_metadata_aware_chunks(
|
||||
chunks_with_embeddings=chunks_with_embeddings,
|
||||
chunk_content_scores=chunk_content_scores,
|
||||
tenant_id=tenant_id,
|
||||
context=context,
|
||||
tenant_id=tenant_id,
|
||||
chunks=chunks,
|
||||
)
|
||||
|
||||
index_batch_params = IndexBatchParams(
|
||||
doc_id_to_previous_chunk_cnt=enricher.doc_id_to_previous_chunk_cnt,
|
||||
doc_id_to_new_chunk_cnt=enricher.doc_id_to_new_chunk_cnt,
|
||||
tenant_id=tenant_id,
|
||||
large_chunks_enabled=chunker.enable_large_chunks,
|
||||
)
|
||||
|
||||
embedding_failed_doc_ids = {
|
||||
f.failed_document.document_id
|
||||
for f in embedding_result.connector_failures
|
||||
if f.failed_document
|
||||
}
|
||||
short_descriptor_list = [chunk.to_short_descriptor() for chunk in result.chunks]
|
||||
short_descriptor_log = str(short_descriptor_list)[:1024]
|
||||
logger.debug(f"Indexing the following chunks: {short_descriptor_log}")
|
||||
|
||||
primary_doc_idx_insertion_records: list[DocumentInsertionRecord] | None = None
|
||||
primary_doc_idx_vector_db_write_failures: list[ConnectorFailure] | None = None
|
||||
|
||||
for document_index in document_indices:
|
||||
# A document will not be spread across different batches, so all the
|
||||
# documents with chunks in this set, are fully represented by the chunks
|
||||
# in this set
|
||||
def _enriched_stream() -> Iterator[DocMetadataAwareIndexChunk]:
|
||||
for chunk in embed_stream.stream():
|
||||
yield enricher.enrich_chunk(chunk, 1.0)
|
||||
|
||||
insertion_records, write_failures = write_chunks_to_vector_db_with_backoff(
|
||||
(
|
||||
insertion_records,
|
||||
vector_db_write_failures,
|
||||
) = write_chunks_to_vector_db_with_backoff(
|
||||
document_index=document_index,
|
||||
chunks=_enriched_stream(),
|
||||
index_batch_params=index_batch_params,
|
||||
chunks=result.chunks,
|
||||
index_batch_params=IndexBatchParams(
|
||||
doc_id_to_previous_chunk_cnt=result.doc_id_to_previous_chunk_cnt,
|
||||
doc_id_to_new_chunk_cnt=result.doc_id_to_new_chunk_cnt,
|
||||
tenant_id=tenant_id,
|
||||
large_chunks_enabled=chunker.enable_large_chunks,
|
||||
),
|
||||
)
|
||||
|
||||
_verify_indexing_completeness(
|
||||
insertion_records=insertion_records,
|
||||
write_failures=write_failures,
|
||||
embedding_failed_doc_ids=embedding_failed_doc_ids,
|
||||
updatable_ids=updatable_ids,
|
||||
document_index_name=document_index.__class__.__name__,
|
||||
all_returned_doc_ids: set[str] = (
|
||||
{record.document_id for record in insertion_records}
|
||||
.union(
|
||||
{
|
||||
record.failed_document.document_id
|
||||
for record in vector_db_write_failures
|
||||
if record.failed_document
|
||||
}
|
||||
)
|
||||
.union(
|
||||
{
|
||||
record.failed_document.document_id
|
||||
for record in embedding_failures
|
||||
if record.failed_document
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
if all_returned_doc_ids != set(updatable_ids):
|
||||
raise RuntimeError(
|
||||
f"Some documents were not successfully indexed. "
|
||||
f"Updatable IDs: {updatable_ids}, "
|
||||
f"Returned IDs: {all_returned_doc_ids}. "
|
||||
"This should never happen."
|
||||
f"This occured for document index {document_index.__class__.__name__}"
|
||||
)
|
||||
# We treat the first document index we got as the primary one used
|
||||
# for reporting the state of indexing.
|
||||
if primary_doc_idx_insertion_records is None:
|
||||
primary_doc_idx_insertion_records = insertion_records
|
||||
if primary_doc_idx_vector_db_write_failures is None:
|
||||
primary_doc_idx_vector_db_write_failures = write_failures
|
||||
primary_doc_idx_vector_db_write_failures = vector_db_write_failures
|
||||
|
||||
adapter.post_index(
|
||||
context=context,
|
||||
updatable_chunk_data=updatable_chunk_data,
|
||||
filtered_documents=filtered_documents,
|
||||
enrichment=enricher,
|
||||
result=result,
|
||||
)
|
||||
|
||||
assert primary_doc_idx_insertion_records is not None
|
||||
assert primary_doc_idx_vector_db_write_failures is not None
|
||||
return IndexingPipelineResult(
|
||||
new_docs=sum(
|
||||
1 for r in primary_doc_idx_insertion_records if not r.already_existed
|
||||
new_docs=len(
|
||||
[r for r in primary_doc_idx_insertion_records if not r.already_existed]
|
||||
),
|
||||
total_docs=len(filtered_documents),
|
||||
total_chunks=len(embedding_result.successful_chunk_ids),
|
||||
failures=primary_doc_idx_vector_db_write_failures
|
||||
+ embedding_result.connector_failures,
|
||||
total_chunks=len(chunks_with_embeddings),
|
||||
failures=primary_doc_idx_vector_db_write_failures + embedding_failures,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -235,16 +235,12 @@ class UpdatableChunkData(BaseModel):
|
||||
boost_score: float
|
||||
|
||||
|
||||
class ChunkEnrichmentContext(Protocol):
|
||||
"""Returned by prepare_enrichment. Holds pre-computed metadata lookups
|
||||
and provides per-chunk enrichment."""
|
||||
|
||||
class BuildMetadataAwareChunksResult(BaseModel):
|
||||
chunks: list[DocMetadataAwareIndexChunk]
|
||||
doc_id_to_previous_chunk_cnt: dict[str, int]
|
||||
doc_id_to_new_chunk_cnt: dict[str, int]
|
||||
|
||||
def enrich_chunk(
|
||||
self, chunk: IndexChunk, score: float
|
||||
) -> DocMetadataAwareIndexChunk: ...
|
||||
user_file_id_to_raw_text: dict[str, str]
|
||||
user_file_id_to_token_count: dict[str, int | None]
|
||||
|
||||
|
||||
class IndexingBatchAdapter(Protocol):
|
||||
@@ -258,24 +254,18 @@ class IndexingBatchAdapter(Protocol):
|
||||
) -> Generator[TransactionalContext, None, None]:
|
||||
"""Provide a transaction/row-lock context for critical updates."""
|
||||
|
||||
def prepare_enrichment(
|
||||
def build_metadata_aware_chunks(
|
||||
self,
|
||||
context: "DocumentBatchPrepareContext",
|
||||
chunks_with_embeddings: list[IndexChunk],
|
||||
chunk_content_scores: list[float],
|
||||
tenant_id: str,
|
||||
chunks: list[DocAwareChunk],
|
||||
) -> ChunkEnrichmentContext:
|
||||
"""Prepare per-chunk enrichment data (access, document sets, boost, etc.).
|
||||
|
||||
Precondition: ``chunks`` have already been through the embedding step
|
||||
(i.e. they are ``IndexChunk`` instances with populated embeddings,
|
||||
passed here as the base ``DocAwareChunk`` type).
|
||||
"""
|
||||
...
|
||||
context: "DocumentBatchPrepareContext",
|
||||
) -> BuildMetadataAwareChunksResult: ...
|
||||
|
||||
def post_index(
|
||||
self,
|
||||
context: "DocumentBatchPrepareContext",
|
||||
updatable_chunk_data: list[UpdatableChunkData],
|
||||
filtered_documents: list[Document],
|
||||
enrichment: ChunkEnrichmentContext,
|
||||
result: BuildMetadataAwareChunksResult,
|
||||
) -> None: ...
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""
|
||||
External dependency unit tests for UserFileIndexingAdapter metadata writing.
|
||||
|
||||
Validates that prepare_enrichment produces DocMetadataAwareIndexChunk
|
||||
Validates that build_metadata_aware_chunks produces DocMetadataAwareIndexChunk
|
||||
objects with both `user_project` and `personas` fields populated correctly
|
||||
based on actual DB associations.
|
||||
|
||||
@@ -127,7 +127,7 @@ def _make_index_chunk(user_file: UserFile) -> IndexChunk:
|
||||
|
||||
|
||||
class TestAdapterWritesBothMetadataFields:
|
||||
"""prepare_enrichment must populate user_project AND personas."""
|
||||
"""build_metadata_aware_chunks must populate user_project AND personas."""
|
||||
|
||||
@patch(
|
||||
"onyx.indexing.adapters.user_file_indexing_adapter.get_default_llm",
|
||||
@@ -153,13 +153,15 @@ class TestAdapterWritesBothMetadataFields:
|
||||
doc = chunk.source_document
|
||||
context = DocumentBatchPrepareContext(updatable_docs=[doc], id_to_boost_map={})
|
||||
|
||||
enricher = adapter.prepare_enrichment(
|
||||
context=context,
|
||||
result = adapter.build_metadata_aware_chunks(
|
||||
chunks_with_embeddings=[chunk],
|
||||
chunk_content_scores=[1.0],
|
||||
tenant_id=TEST_TENANT_ID,
|
||||
chunks=[chunk],
|
||||
context=context,
|
||||
)
|
||||
aware_chunk = enricher.enrich_chunk(chunk, 1.0)
|
||||
|
||||
assert len(result.chunks) == 1
|
||||
aware_chunk = result.chunks[0]
|
||||
assert persona.id in aware_chunk.personas
|
||||
assert aware_chunk.user_project == []
|
||||
|
||||
@@ -188,13 +190,15 @@ class TestAdapterWritesBothMetadataFields:
|
||||
updatable_docs=[chunk.source_document], id_to_boost_map={}
|
||||
)
|
||||
|
||||
enricher = adapter.prepare_enrichment(
|
||||
context=context,
|
||||
result = adapter.build_metadata_aware_chunks(
|
||||
chunks_with_embeddings=[chunk],
|
||||
chunk_content_scores=[1.0],
|
||||
tenant_id=TEST_TENANT_ID,
|
||||
chunks=[chunk],
|
||||
context=context,
|
||||
)
|
||||
aware_chunk = enricher.enrich_chunk(chunk, 1.0)
|
||||
|
||||
assert len(result.chunks) == 1
|
||||
aware_chunk = result.chunks[0]
|
||||
assert project.id in aware_chunk.user_project
|
||||
assert aware_chunk.personas == []
|
||||
|
||||
@@ -225,13 +229,14 @@ class TestAdapterWritesBothMetadataFields:
|
||||
updatable_docs=[chunk.source_document], id_to_boost_map={}
|
||||
)
|
||||
|
||||
enricher = adapter.prepare_enrichment(
|
||||
context=context,
|
||||
result = adapter.build_metadata_aware_chunks(
|
||||
chunks_with_embeddings=[chunk],
|
||||
chunk_content_scores=[1.0],
|
||||
tenant_id=TEST_TENANT_ID,
|
||||
chunks=[chunk],
|
||||
context=context,
|
||||
)
|
||||
aware_chunk = enricher.enrich_chunk(chunk, 1.0)
|
||||
|
||||
aware_chunk = result.chunks[0]
|
||||
assert persona.id in aware_chunk.personas
|
||||
assert project.id in aware_chunk.user_project
|
||||
|
||||
@@ -256,13 +261,14 @@ class TestAdapterWritesBothMetadataFields:
|
||||
updatable_docs=[chunk.source_document], id_to_boost_map={}
|
||||
)
|
||||
|
||||
enricher = adapter.prepare_enrichment(
|
||||
context=context,
|
||||
result = adapter.build_metadata_aware_chunks(
|
||||
chunks_with_embeddings=[chunk],
|
||||
chunk_content_scores=[1.0],
|
||||
tenant_id=TEST_TENANT_ID,
|
||||
chunks=[chunk],
|
||||
context=context,
|
||||
)
|
||||
aware_chunk = enricher.enrich_chunk(chunk, 1.0)
|
||||
|
||||
aware_chunk = result.chunks[0]
|
||||
assert aware_chunk.personas == []
|
||||
assert aware_chunk.user_project == []
|
||||
|
||||
@@ -294,11 +300,12 @@ class TestAdapterWritesBothMetadataFields:
|
||||
updatable_docs=[chunk.source_document], id_to_boost_map={}
|
||||
)
|
||||
|
||||
enricher = adapter.prepare_enrichment(
|
||||
context=context,
|
||||
result = adapter.build_metadata_aware_chunks(
|
||||
chunks_with_embeddings=[chunk],
|
||||
chunk_content_scores=[1.0],
|
||||
tenant_id=TEST_TENANT_ID,
|
||||
chunks=[chunk],
|
||||
context=context,
|
||||
)
|
||||
aware_chunk = enricher.enrich_chunk(chunk, 1.0)
|
||||
|
||||
aware_chunk = result.chunks[0]
|
||||
assert set(aware_chunk.personas) == {persona_a.id, persona_b.id}
|
||||
|
||||
@@ -0,0 +1,47 @@
|
||||
from sqlalchemy import inspect
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from onyx.db.chat import create_chat_session
|
||||
from onyx.db.chat import get_chat_session_by_id
|
||||
from onyx.db.models import Persona
|
||||
from onyx.db.models import UserProject
|
||||
from tests.external_dependency_unit.conftest import create_test_user
|
||||
|
||||
|
||||
def test_eager_load_persona_loads_relationships(db_session: Session) -> None:
|
||||
"""Verify that eager_load_persona pre-loads persona, its collections, and project."""
|
||||
user = create_test_user(db_session, "eager-load")
|
||||
persona = Persona(name="eager-load-test", description="test")
|
||||
project = UserProject(name="eager-load-project", user_id=user.id)
|
||||
db_session.add_all([persona, project])
|
||||
db_session.flush()
|
||||
|
||||
chat_session = create_chat_session(
|
||||
db_session=db_session,
|
||||
description="test",
|
||||
user_id=None,
|
||||
persona_id=persona.id,
|
||||
project_id=project.id,
|
||||
)
|
||||
|
||||
loaded = get_chat_session_by_id(
|
||||
chat_session_id=chat_session.id,
|
||||
user_id=None,
|
||||
db_session=db_session,
|
||||
eager_load_persona=True,
|
||||
)
|
||||
|
||||
try:
|
||||
tmp = inspect(loaded)
|
||||
assert tmp is not None
|
||||
unloaded = tmp.unloaded
|
||||
assert "persona" not in unloaded
|
||||
assert "project" not in unloaded
|
||||
|
||||
tmp = inspect(loaded.persona)
|
||||
assert tmp is not None
|
||||
persona_unloaded = tmp.unloaded
|
||||
assert "tools" not in persona_unloaded
|
||||
assert "user_files" not in persona_unloaded
|
||||
finally:
|
||||
db_session.rollback()
|
||||
@@ -6,7 +6,6 @@ These tests assume Vespa and OpenSearch are running.
|
||||
import time
|
||||
import uuid
|
||||
from collections.abc import Generator
|
||||
from collections.abc import Iterator
|
||||
|
||||
import httpx
|
||||
import pytest
|
||||
@@ -22,7 +21,6 @@ from onyx.document_index.opensearch.opensearch_document_index import (
|
||||
)
|
||||
from onyx.document_index.vespa.index import VespaIndex
|
||||
from onyx.document_index.vespa.vespa_document_index import VespaDocumentIndex
|
||||
from onyx.indexing.models import DocMetadataAwareIndexChunk
|
||||
from tests.external_dependency_unit.constants import TEST_TENANT_ID
|
||||
from tests.external_dependency_unit.document_index.conftest import EMBEDDING_DIM
|
||||
from tests.external_dependency_unit.document_index.conftest import make_chunk
|
||||
@@ -203,25 +201,3 @@ class TestDocumentIndexNew:
|
||||
assert len(result_map) == 2
|
||||
assert result_map[existing_doc] is True
|
||||
assert result_map[new_doc] is False
|
||||
|
||||
def test_index_accepts_generator(
|
||||
self,
|
||||
document_indices: list[DocumentIndexNew],
|
||||
tenant_context: None, # noqa: ARG002
|
||||
) -> None:
|
||||
"""index() accepts a generator (any iterable), not just a list."""
|
||||
for document_index in document_indices:
|
||||
doc_id = f"test_gen_{uuid.uuid4().hex[:8]}"
|
||||
metadata = make_indexing_metadata([doc_id], old_counts=[0], new_counts=[3])
|
||||
|
||||
def chunk_gen() -> Iterator[DocMetadataAwareIndexChunk]:
|
||||
for i in range(3):
|
||||
yield make_chunk(doc_id, chunk_id=i)
|
||||
|
||||
results = document_index.index(
|
||||
chunks=chunk_gen(), indexing_metadata=metadata
|
||||
)
|
||||
|
||||
assert len(results) == 1
|
||||
assert results[0].document_id == doc_id
|
||||
assert results[0].already_existed is False
|
||||
|
||||
@@ -5,7 +5,6 @@ These tests assume Vespa and OpenSearch are running.
|
||||
|
||||
import time
|
||||
from collections.abc import Generator
|
||||
from collections.abc import Iterator
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -167,29 +166,3 @@ class TestDocumentIndexOld:
|
||||
batch_retrieval=True,
|
||||
)
|
||||
assert len(inference_chunks) == 0
|
||||
|
||||
def test_index_accepts_generator(
|
||||
self,
|
||||
document_indices: list[DocumentIndex],
|
||||
tenant_context: None, # noqa: ARG002
|
||||
) -> None:
|
||||
"""index() accepts a generator (any iterable), not just a list."""
|
||||
for document_index in document_indices:
|
||||
|
||||
def chunk_gen() -> Iterator[DocMetadataAwareIndexChunk]:
|
||||
for i in range(3):
|
||||
yield make_chunk("test_doc_gen", chunk_id=i)
|
||||
|
||||
index_batch_params = IndexBatchParams(
|
||||
doc_id_to_previous_chunk_cnt={"test_doc_gen": 0},
|
||||
doc_id_to_new_chunk_cnt={"test_doc_gen": 3},
|
||||
tenant_id=get_current_tenant_id(),
|
||||
large_chunks_enabled=False,
|
||||
)
|
||||
|
||||
results = document_index.index(chunk_gen(), index_batch_params)
|
||||
|
||||
assert len(results) == 1
|
||||
record = results.pop()
|
||||
assert record.document_id == "test_doc_gen"
|
||||
assert record.already_existed is False
|
||||
|
||||
@@ -143,8 +143,8 @@ def use_mock_search_pipeline(
|
||||
db_session: Session | None = None, # noqa: ARG001
|
||||
auto_detect_filters: bool = False, # noqa: ARG001
|
||||
llm: LLM | None = None, # noqa: ARG001
|
||||
project_id: int | None = None, # noqa: ARG001
|
||||
persona_id: int | None = None, # noqa: ARG001
|
||||
project_id_filter: int | None = None, # noqa: ARG001
|
||||
persona_id_filter: int | None = None, # noqa: ARG001
|
||||
# Pre-fetched data (used by SearchTool to avoid DB access in parallel calls)
|
||||
acl_filters: list[str] | None = None, # noqa: ARG001
|
||||
embedding_model: EmbeddingModel | None = None, # noqa: ARG001
|
||||
|
||||
53
backend/tests/unit/ee/onyx/db/test_user_group_rename.py
Normal file
53
backend/tests/unit/ee/onyx/db/test_user_group_rename.py
Normal file
@@ -0,0 +1,53 @@
|
||||
"""Tests for user group rename DB operation."""
|
||||
|
||||
from unittest.mock import MagicMock
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from ee.onyx.db.user_group import rename_user_group
|
||||
from onyx.db.models import UserGroup
|
||||
|
||||
|
||||
class TestRenameUserGroup:
|
||||
"""Tests for rename_user_group function."""
|
||||
|
||||
@patch("ee.onyx.db.user_group.DISABLE_VECTOR_DB", False)
|
||||
@patch(
|
||||
"ee.onyx.db.user_group._mark_user_group__cc_pair_relationships_outdated__no_commit"
|
||||
)
|
||||
def test_rename_succeeds_and_triggers_sync(
|
||||
self, mock_mark_outdated: MagicMock
|
||||
) -> None:
|
||||
mock_session = MagicMock()
|
||||
mock_group = MagicMock(spec=UserGroup)
|
||||
mock_group.name = "Old Name"
|
||||
mock_group.is_up_to_date = True
|
||||
mock_session.scalar.return_value = mock_group
|
||||
|
||||
result = rename_user_group(mock_session, user_group_id=1, new_name="New Name")
|
||||
|
||||
assert result.name == "New Name"
|
||||
assert result.is_up_to_date is False
|
||||
mock_mark_outdated.assert_called_once()
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
def test_rename_group_not_found(self) -> None:
|
||||
mock_session = MagicMock()
|
||||
mock_session.scalar.return_value = None
|
||||
|
||||
with pytest.raises(ValueError, match="not found"):
|
||||
rename_user_group(mock_session, user_group_id=999, new_name="New Name")
|
||||
|
||||
mock_session.commit.assert_not_called()
|
||||
|
||||
def test_rename_group_syncing_raises(self) -> None:
|
||||
mock_session = MagicMock()
|
||||
mock_group = MagicMock(spec=UserGroup)
|
||||
mock_group.is_up_to_date = False
|
||||
mock_session.scalar.return_value = mock_group
|
||||
|
||||
with pytest.raises(ValueError, match="currently syncing"):
|
||||
rename_user_group(mock_session, user_group_id=1, new_name="New Name")
|
||||
|
||||
mock_session.commit.assert_not_called()
|
||||
@@ -0,0 +1,216 @@
|
||||
"""
|
||||
Unit tests for the check_available_tenants task.
|
||||
|
||||
Tests verify:
|
||||
- Provisioning loop calls pre_provision_tenant the correct number of times
|
||||
- Batch size is capped at _MAX_TENANTS_PER_RUN
|
||||
- A failure in one provisioning call does not stop subsequent calls
|
||||
- No provisioning happens when pool is already full
|
||||
- TARGET_AVAILABLE_TENANTS is respected
|
||||
"""
|
||||
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
from ee.onyx.background.celery.tasks.tenant_provisioning.tasks import (
|
||||
_MAX_TENANTS_PER_RUN,
|
||||
)
|
||||
from ee.onyx.background.celery.tasks.tenant_provisioning.tasks import (
|
||||
check_available_tenants,
|
||||
)
|
||||
|
||||
# Access the underlying function directly, bypassing Celery's task wrapper
|
||||
# which injects `self` as the first argument when bind=True.
|
||||
_check_available_tenants = check_available_tenants.run
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def _enable_multi_tenant(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||
monkeypatch.setattr(
|
||||
"ee.onyx.background.celery.tasks.tenant_provisioning.tasks.MULTI_TENANT",
|
||||
True,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def mock_redis(monkeypatch: pytest.MonkeyPatch) -> MagicMock:
|
||||
mock_lock = MagicMock()
|
||||
mock_lock.acquire.return_value = True
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.lock.return_value = mock_lock
|
||||
|
||||
monkeypatch.setattr(
|
||||
"ee.onyx.background.celery.tasks.tenant_provisioning.tasks.get_redis_client",
|
||||
lambda tenant_id: mock_client, # noqa: ARG005
|
||||
)
|
||||
return mock_client
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def mock_pre_provision(monkeypatch: pytest.MonkeyPatch) -> MagicMock:
|
||||
mock = MagicMock(return_value=True)
|
||||
monkeypatch.setattr(
|
||||
"ee.onyx.background.celery.tasks.tenant_provisioning.tasks.pre_provision_tenant",
|
||||
mock,
|
||||
)
|
||||
return mock
|
||||
|
||||
|
||||
def _mock_available_count(monkeypatch: pytest.MonkeyPatch, count: int) -> None:
|
||||
"""Set up the DB session mock to return a specific available tenant count."""
|
||||
mock_session = MagicMock()
|
||||
mock_session.__enter__ = MagicMock(return_value=mock_session)
|
||||
mock_session.__exit__ = MagicMock(return_value=False)
|
||||
mock_session.query.return_value.count.return_value = count
|
||||
|
||||
monkeypatch.setattr(
|
||||
"ee.onyx.background.celery.tasks.tenant_provisioning.tasks.get_session_with_shared_schema",
|
||||
lambda: mock_session,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_enable_multi_tenant", "mock_redis")
|
||||
class TestCheckAvailableTenants:
|
||||
def test_provisions_all_needed_tenants(
|
||||
self,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
mock_pre_provision: MagicMock,
|
||||
) -> None:
|
||||
"""When pool has 2 and target is 5, should provision 3."""
|
||||
monkeypatch.setattr(
|
||||
"ee.onyx.background.celery.tasks.tenant_provisioning.tasks.TARGET_AVAILABLE_TENANTS",
|
||||
5,
|
||||
)
|
||||
_mock_available_count(monkeypatch, 2)
|
||||
|
||||
_check_available_tenants()
|
||||
|
||||
assert mock_pre_provision.call_count == 3
|
||||
|
||||
def test_batch_capped_at_max_per_run(
|
||||
self,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
mock_pre_provision: MagicMock,
|
||||
) -> None:
|
||||
"""When pool needs more than _MAX_TENANTS_PER_RUN, cap the batch."""
|
||||
monkeypatch.setattr(
|
||||
"ee.onyx.background.celery.tasks.tenant_provisioning.tasks.TARGET_AVAILABLE_TENANTS",
|
||||
20,
|
||||
)
|
||||
_mock_available_count(monkeypatch, 0)
|
||||
|
||||
_check_available_tenants()
|
||||
|
||||
assert mock_pre_provision.call_count == _MAX_TENANTS_PER_RUN
|
||||
|
||||
def test_no_provisioning_when_pool_full(
|
||||
self,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
mock_pre_provision: MagicMock,
|
||||
) -> None:
|
||||
"""When pool already meets target, should not provision anything."""
|
||||
monkeypatch.setattr(
|
||||
"ee.onyx.background.celery.tasks.tenant_provisioning.tasks.TARGET_AVAILABLE_TENANTS",
|
||||
5,
|
||||
)
|
||||
_mock_available_count(monkeypatch, 5)
|
||||
|
||||
_check_available_tenants()
|
||||
|
||||
assert mock_pre_provision.call_count == 0
|
||||
|
||||
def test_no_provisioning_when_pool_exceeds_target(
|
||||
self,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
mock_pre_provision: MagicMock,
|
||||
) -> None:
|
||||
"""When pool exceeds target, should not provision anything."""
|
||||
monkeypatch.setattr(
|
||||
"ee.onyx.background.celery.tasks.tenant_provisioning.tasks.TARGET_AVAILABLE_TENANTS",
|
||||
5,
|
||||
)
|
||||
_mock_available_count(monkeypatch, 8)
|
||||
|
||||
_check_available_tenants()
|
||||
|
||||
assert mock_pre_provision.call_count == 0
|
||||
|
||||
def test_failure_does_not_stop_remaining(
|
||||
self,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
mock_pre_provision: MagicMock,
|
||||
) -> None:
|
||||
"""If one provisioning fails, the rest should still be attempted."""
|
||||
monkeypatch.setattr(
|
||||
"ee.onyx.background.celery.tasks.tenant_provisioning.tasks.TARGET_AVAILABLE_TENANTS",
|
||||
5,
|
||||
)
|
||||
_mock_available_count(monkeypatch, 0)
|
||||
|
||||
# Fail on calls 2 and 4 (1-indexed)
|
||||
call_count = 0
|
||||
|
||||
def side_effect() -> bool:
|
||||
nonlocal call_count
|
||||
call_count += 1
|
||||
if call_count in (2, 4):
|
||||
raise RuntimeError("provisioning failed")
|
||||
return True
|
||||
|
||||
mock_pre_provision.side_effect = side_effect
|
||||
|
||||
_check_available_tenants()
|
||||
|
||||
# All 5 should be attempted despite 2 failures
|
||||
assert mock_pre_provision.call_count == 5
|
||||
|
||||
def test_skips_when_not_multi_tenant(
|
||||
self,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
mock_pre_provision: MagicMock,
|
||||
) -> None:
|
||||
"""Should not provision when multi-tenancy is disabled."""
|
||||
monkeypatch.setattr(
|
||||
"ee.onyx.background.celery.tasks.tenant_provisioning.tasks.MULTI_TENANT",
|
||||
False,
|
||||
)
|
||||
|
||||
_check_available_tenants()
|
||||
|
||||
assert mock_pre_provision.call_count == 0
|
||||
|
||||
def test_skips_when_lock_not_acquired(
|
||||
self,
|
||||
mock_redis: MagicMock,
|
||||
mock_pre_provision: MagicMock,
|
||||
) -> None:
|
||||
"""Should skip when another instance holds the lock."""
|
||||
mock_redis.lock.return_value.acquire.return_value = False
|
||||
|
||||
_check_available_tenants()
|
||||
|
||||
assert mock_pre_provision.call_count == 0
|
||||
|
||||
def test_lock_release_failure_does_not_raise(
|
||||
self,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
mock_redis: MagicMock,
|
||||
mock_pre_provision: MagicMock,
|
||||
) -> None:
|
||||
"""LockNotOwnedError on release should be caught, not propagated."""
|
||||
from redis.exceptions import LockNotOwnedError
|
||||
|
||||
monkeypatch.setattr(
|
||||
"ee.onyx.background.celery.tasks.tenant_provisioning.tasks.TARGET_AVAILABLE_TENANTS",
|
||||
5,
|
||||
)
|
||||
_mock_available_count(monkeypatch, 4)
|
||||
|
||||
mock_redis.lock.return_value.release.side_effect = LockNotOwnedError("expired")
|
||||
|
||||
# Should not raise
|
||||
_check_available_tenants()
|
||||
|
||||
assert mock_pre_provision.call_count == 1
|
||||
@@ -1,4 +1,12 @@
|
||||
import pytest
|
||||
|
||||
from onyx.chat.process_message import _resolve_query_processing_hook_result
|
||||
from onyx.chat.process_message import remove_answer_citations
|
||||
from onyx.error_handling.error_codes import OnyxErrorCode
|
||||
from onyx.error_handling.exceptions import OnyxError
|
||||
from onyx.hooks.executor import HookSkipped
|
||||
from onyx.hooks.executor import HookSoftFailed
|
||||
from onyx.hooks.points.query_processing import QueryProcessingResponse
|
||||
|
||||
|
||||
def test_remove_answer_citations_strips_http_markdown_citation() -> None:
|
||||
@@ -32,3 +40,81 @@ def test_remove_answer_citations_preserves_non_citation_markdown_links() -> None
|
||||
remove_answer_citations(answer)
|
||||
== "See [reference](https://example.com/Function_(mathematics)) for context."
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Query Processing hook response handling (_resolve_query_processing_hook_result)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def test_hook_skipped_leaves_message_text_unchanged() -> None:
|
||||
result = _resolve_query_processing_hook_result(HookSkipped(), "original query")
|
||||
assert result == "original query"
|
||||
|
||||
|
||||
def test_hook_soft_failed_leaves_message_text_unchanged() -> None:
|
||||
result = _resolve_query_processing_hook_result(HookSoftFailed(), "original query")
|
||||
assert result == "original query"
|
||||
|
||||
|
||||
def test_null_query_raises_query_rejected() -> None:
|
||||
with pytest.raises(OnyxError) as exc_info:
|
||||
_resolve_query_processing_hook_result(
|
||||
QueryProcessingResponse(query=None), "original query"
|
||||
)
|
||||
assert exc_info.value.error_code is OnyxErrorCode.QUERY_REJECTED
|
||||
|
||||
|
||||
def test_empty_string_query_raises_query_rejected() -> None:
|
||||
"""Empty string is falsy — must be treated as rejection, same as None."""
|
||||
with pytest.raises(OnyxError) as exc_info:
|
||||
_resolve_query_processing_hook_result(
|
||||
QueryProcessingResponse(query=""), "original query"
|
||||
)
|
||||
assert exc_info.value.error_code is OnyxErrorCode.QUERY_REJECTED
|
||||
|
||||
|
||||
def test_whitespace_only_query_raises_query_rejected() -> None:
|
||||
"""Whitespace-only string is truthy but meaningless — must be treated as rejection."""
|
||||
with pytest.raises(OnyxError) as exc_info:
|
||||
_resolve_query_processing_hook_result(
|
||||
QueryProcessingResponse(query=" "), "original query"
|
||||
)
|
||||
assert exc_info.value.error_code is OnyxErrorCode.QUERY_REJECTED
|
||||
|
||||
|
||||
def test_absent_query_field_raises_query_rejected() -> None:
|
||||
"""query defaults to None when not provided."""
|
||||
with pytest.raises(OnyxError) as exc_info:
|
||||
_resolve_query_processing_hook_result(
|
||||
QueryProcessingResponse(), "original query"
|
||||
)
|
||||
assert exc_info.value.error_code is OnyxErrorCode.QUERY_REJECTED
|
||||
|
||||
|
||||
def test_rejection_message_surfaced_in_error_when_provided() -> None:
|
||||
with pytest.raises(OnyxError) as exc_info:
|
||||
_resolve_query_processing_hook_result(
|
||||
QueryProcessingResponse(
|
||||
query=None, rejection_message="Queries about X are not allowed."
|
||||
),
|
||||
"original query",
|
||||
)
|
||||
assert "Queries about X are not allowed." in str(exc_info.value)
|
||||
|
||||
|
||||
def test_fallback_rejection_message_when_none() -> None:
|
||||
"""No rejection_message → generic fallback used in OnyxError detail."""
|
||||
with pytest.raises(OnyxError) as exc_info:
|
||||
_resolve_query_processing_hook_result(
|
||||
QueryProcessingResponse(query=None, rejection_message=None),
|
||||
"original query",
|
||||
)
|
||||
assert "No rejection reason was provided." in str(exc_info.value)
|
||||
|
||||
|
||||
def test_nonempty_query_rewrites_message_text() -> None:
|
||||
result = _resolve_query_processing_hook_result(
|
||||
QueryProcessingResponse(query="rewritten query"), "original query"
|
||||
)
|
||||
assert result == "rewritten query"
|
||||
|
||||
@@ -1,226 +0,0 @@
|
||||
from unittest.mock import MagicMock
|
||||
from unittest.mock import patch
|
||||
|
||||
from onyx.access.models import DocumentAccess
|
||||
from onyx.configs.constants import DocumentSource
|
||||
from onyx.connectors.models import Document
|
||||
from onyx.connectors.models import TextSection
|
||||
from onyx.document_index.interfaces_new import IndexingMetadata
|
||||
from onyx.document_index.interfaces_new import TenantState
|
||||
from onyx.document_index.opensearch.opensearch_document_index import (
|
||||
OpenSearchDocumentIndex,
|
||||
)
|
||||
from onyx.indexing.models import DocMetadataAwareIndexChunk
|
||||
|
||||
|
||||
def _make_chunk(
|
||||
doc_id: str,
|
||||
chunk_id: int,
|
||||
) -> DocMetadataAwareIndexChunk:
|
||||
"""Creates a minimal DocMetadataAwareIndexChunk for testing."""
|
||||
doc = Document(
|
||||
id=doc_id,
|
||||
sections=[TextSection(text="test", link="http://test.com")],
|
||||
source=DocumentSource.FILE,
|
||||
semantic_identifier="test_doc",
|
||||
metadata={},
|
||||
)
|
||||
access = DocumentAccess.build(
|
||||
user_emails=[],
|
||||
user_groups=[],
|
||||
external_user_emails=[],
|
||||
external_user_group_ids=[],
|
||||
is_public=True,
|
||||
)
|
||||
return DocMetadataAwareIndexChunk(
|
||||
chunk_id=chunk_id,
|
||||
blurb="test",
|
||||
content="test content",
|
||||
source_links={0: "http://test.com"},
|
||||
image_file_id=None,
|
||||
section_continuation=False,
|
||||
source_document=doc,
|
||||
title_prefix="",
|
||||
metadata_suffix_semantic="",
|
||||
metadata_suffix_keyword="",
|
||||
mini_chunk_texts=None,
|
||||
large_chunk_id=None,
|
||||
doc_summary="",
|
||||
chunk_context="",
|
||||
contextual_rag_reserved_tokens=0,
|
||||
embeddings={"full_embedding": [0.1] * 10, "mini_chunk_embeddings": []},
|
||||
title_embedding=[0.1] * 10,
|
||||
tenant_id="test_tenant",
|
||||
access=access,
|
||||
document_sets=set(),
|
||||
user_project=[],
|
||||
personas=[],
|
||||
boost=0,
|
||||
aggregated_chunk_boost_factor=1.0,
|
||||
ancestor_hierarchy_node_ids=[],
|
||||
)
|
||||
|
||||
|
||||
def _make_index() -> OpenSearchDocumentIndex:
|
||||
"""Creates an OpenSearchDocumentIndex with a mocked client."""
|
||||
mock_client = MagicMock()
|
||||
mock_client.bulk_index_documents = MagicMock()
|
||||
|
||||
tenant_state = TenantState(tenant_id="test_tenant", multitenant=False)
|
||||
|
||||
index = OpenSearchDocumentIndex.__new__(OpenSearchDocumentIndex)
|
||||
index._index_name = "test_index"
|
||||
index._client = mock_client
|
||||
index._tenant_state = tenant_state
|
||||
|
||||
return index
|
||||
|
||||
|
||||
def _make_metadata(doc_id: str, chunk_count: int) -> IndexingMetadata:
|
||||
return IndexingMetadata(
|
||||
doc_id_to_chunk_cnt_diff={
|
||||
doc_id: IndexingMetadata.ChunkCounts(
|
||||
old_chunk_cnt=0,
|
||||
new_chunk_cnt=chunk_count,
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@patch(
|
||||
"onyx.document_index.opensearch.opensearch_document_index.MAX_CHUNKS_PER_DOC_BATCH",
|
||||
100,
|
||||
)
|
||||
def test_single_doc_under_batch_limit_flushes_once() -> None:
|
||||
"""A document with fewer chunks than MAX_CHUNKS_PER_DOC_BATCH should flush once."""
|
||||
index = _make_index()
|
||||
doc_id = "doc_1"
|
||||
num_chunks = 50
|
||||
chunks = [_make_chunk(doc_id, i) for i in range(num_chunks)]
|
||||
metadata = _make_metadata(doc_id, num_chunks)
|
||||
|
||||
with patch.object(index, "delete", return_value=0):
|
||||
index.index(chunks, metadata)
|
||||
|
||||
assert index._client.bulk_index_documents.call_count == 1
|
||||
batch_arg = index._client.bulk_index_documents.call_args_list[0]
|
||||
assert len(batch_arg.kwargs["documents"]) == num_chunks
|
||||
|
||||
|
||||
@patch(
|
||||
"onyx.document_index.opensearch.opensearch_document_index.MAX_CHUNKS_PER_DOC_BATCH",
|
||||
100,
|
||||
)
|
||||
def test_single_doc_over_batch_limit_flushes_multiple_times() -> None:
|
||||
"""A document with more chunks than MAX_CHUNKS_PER_DOC_BATCH should flush multiple times."""
|
||||
index = _make_index()
|
||||
doc_id = "doc_1"
|
||||
num_chunks = 250
|
||||
chunks = [_make_chunk(doc_id, i) for i in range(num_chunks)]
|
||||
metadata = _make_metadata(doc_id, num_chunks)
|
||||
|
||||
with patch.object(index, "delete", return_value=0):
|
||||
index.index(chunks, metadata)
|
||||
|
||||
# 250 chunks / 100 per batch = 3 flushes (100 + 100 + 50)
|
||||
assert index._client.bulk_index_documents.call_count == 3
|
||||
batch_sizes = [
|
||||
len(call.kwargs["documents"])
|
||||
for call in index._client.bulk_index_documents.call_args_list
|
||||
]
|
||||
assert batch_sizes == [100, 100, 50]
|
||||
|
||||
|
||||
@patch(
|
||||
"onyx.document_index.opensearch.opensearch_document_index.MAX_CHUNKS_PER_DOC_BATCH",
|
||||
100,
|
||||
)
|
||||
def test_single_doc_exactly_at_batch_limit() -> None:
|
||||
"""A document with exactly MAX_CHUNKS_PER_DOC_BATCH chunks should flush once
|
||||
(the flush happens on the next chunk, not at the boundary)."""
|
||||
index = _make_index()
|
||||
doc_id = "doc_1"
|
||||
num_chunks = 100
|
||||
chunks = [_make_chunk(doc_id, i) for i in range(num_chunks)]
|
||||
metadata = _make_metadata(doc_id, num_chunks)
|
||||
|
||||
with patch.object(index, "delete", return_value=0):
|
||||
index.index(chunks, metadata)
|
||||
|
||||
# 100 chunks hit the >= check on chunk 101 which doesn't exist,
|
||||
# so final flush handles all 100
|
||||
# Actually: the elif fires when len(current_chunks) >= 100, which happens
|
||||
# when current_chunks has 100 items and the 101st chunk arrives.
|
||||
# With exactly 100 chunks, the 100th chunk makes len == 99, then appended -> 100.
|
||||
# No 101st chunk arrives, so the final flush handles all 100.
|
||||
assert index._client.bulk_index_documents.call_count == 1
|
||||
|
||||
|
||||
@patch(
|
||||
"onyx.document_index.opensearch.opensearch_document_index.MAX_CHUNKS_PER_DOC_BATCH",
|
||||
100,
|
||||
)
|
||||
def test_single_doc_one_over_batch_limit() -> None:
|
||||
"""101 chunks for one doc: first 100 flushed when the 101st arrives, then
|
||||
the 101st is flushed at the end."""
|
||||
index = _make_index()
|
||||
doc_id = "doc_1"
|
||||
num_chunks = 101
|
||||
chunks = [_make_chunk(doc_id, i) for i in range(num_chunks)]
|
||||
metadata = _make_metadata(doc_id, num_chunks)
|
||||
|
||||
with patch.object(index, "delete", return_value=0):
|
||||
index.index(chunks, metadata)
|
||||
|
||||
assert index._client.bulk_index_documents.call_count == 2
|
||||
batch_sizes = [
|
||||
len(call.kwargs["documents"])
|
||||
for call in index._client.bulk_index_documents.call_args_list
|
||||
]
|
||||
assert batch_sizes == [100, 1]
|
||||
|
||||
|
||||
@patch(
|
||||
"onyx.document_index.opensearch.opensearch_document_index.MAX_CHUNKS_PER_DOC_BATCH",
|
||||
100,
|
||||
)
|
||||
def test_multiple_docs_each_under_limit_flush_per_doc() -> None:
|
||||
"""Multiple documents each under the batch limit should flush once per document."""
|
||||
index = _make_index()
|
||||
chunks = []
|
||||
for doc_idx in range(3):
|
||||
doc_id = f"doc_{doc_idx}"
|
||||
for chunk_idx in range(50):
|
||||
chunks.append(_make_chunk(doc_id, chunk_idx))
|
||||
|
||||
metadata = IndexingMetadata(
|
||||
doc_id_to_chunk_cnt_diff={
|
||||
f"doc_{i}": IndexingMetadata.ChunkCounts(old_chunk_cnt=0, new_chunk_cnt=50)
|
||||
for i in range(3)
|
||||
},
|
||||
)
|
||||
|
||||
with patch.object(index, "delete", return_value=0):
|
||||
index.index(chunks, metadata)
|
||||
|
||||
# 3 documents = 3 flushes (one per doc boundary + final)
|
||||
assert index._client.bulk_index_documents.call_count == 3
|
||||
|
||||
|
||||
@patch(
|
||||
"onyx.document_index.opensearch.opensearch_document_index.MAX_CHUNKS_PER_DOC_BATCH",
|
||||
100,
|
||||
)
|
||||
def test_delete_called_once_per_document() -> None:
|
||||
"""Even with multiple flushes for a single document, delete should only be
|
||||
called once per document."""
|
||||
index = _make_index()
|
||||
doc_id = "doc_1"
|
||||
num_chunks = 250
|
||||
chunks = [_make_chunk(doc_id, i) for i in range(num_chunks)]
|
||||
metadata = _make_metadata(doc_id, num_chunks)
|
||||
|
||||
with patch.object(index, "delete", return_value=0) as mock_delete:
|
||||
index.index(chunks, metadata)
|
||||
|
||||
mock_delete.assert_called_once_with(doc_id, None)
|
||||
@@ -1,152 +0,0 @@
|
||||
"""Unit tests for VespaDocumentIndex.index().
|
||||
|
||||
These tests mock all external I/O (HTTP calls, thread pools) and verify
|
||||
the streaming logic, ID cleaning/mapping, and DocumentInsertionRecord
|
||||
construction.
|
||||
"""
|
||||
|
||||
from unittest.mock import MagicMock
|
||||
from unittest.mock import patch
|
||||
|
||||
from onyx.access.models import DocumentAccess
|
||||
from onyx.configs.constants import DocumentSource
|
||||
from onyx.connectors.models import Document
|
||||
from onyx.connectors.models import TextSection
|
||||
from onyx.document_index.interfaces import EnrichedDocumentIndexingInfo
|
||||
from onyx.document_index.interfaces_new import IndexingMetadata
|
||||
from onyx.document_index.interfaces_new import TenantState
|
||||
from onyx.document_index.vespa.vespa_document_index import VespaDocumentIndex
|
||||
from onyx.indexing.models import ChunkEmbedding
|
||||
from onyx.indexing.models import DocMetadataAwareIndexChunk
|
||||
from onyx.indexing.models import IndexChunk
|
||||
|
||||
|
||||
def _make_chunk(
|
||||
doc_id: str,
|
||||
chunk_id: int = 0,
|
||||
content: str = "test content",
|
||||
) -> DocMetadataAwareIndexChunk:
|
||||
doc = Document(
|
||||
id=doc_id,
|
||||
semantic_identifier="test_doc",
|
||||
sections=[TextSection(text=content, link=None)],
|
||||
source=DocumentSource.NOT_APPLICABLE,
|
||||
metadata={},
|
||||
)
|
||||
index_chunk = IndexChunk(
|
||||
chunk_id=chunk_id,
|
||||
blurb=content[:50],
|
||||
content=content,
|
||||
source_links=None,
|
||||
image_file_id=None,
|
||||
section_continuation=False,
|
||||
source_document=doc,
|
||||
title_prefix="",
|
||||
metadata_suffix_semantic="",
|
||||
metadata_suffix_keyword="",
|
||||
contextual_rag_reserved_tokens=0,
|
||||
doc_summary="",
|
||||
chunk_context="",
|
||||
mini_chunk_texts=None,
|
||||
large_chunk_id=None,
|
||||
embeddings=ChunkEmbedding(
|
||||
full_embedding=[0.1] * 10,
|
||||
mini_chunk_embeddings=[],
|
||||
),
|
||||
title_embedding=None,
|
||||
)
|
||||
access = DocumentAccess.build(
|
||||
user_emails=[],
|
||||
user_groups=[],
|
||||
external_user_emails=[],
|
||||
external_user_group_ids=[],
|
||||
is_public=True,
|
||||
)
|
||||
return DocMetadataAwareIndexChunk.from_index_chunk(
|
||||
index_chunk=index_chunk,
|
||||
access=access,
|
||||
document_sets=set(),
|
||||
user_project=[],
|
||||
personas=[],
|
||||
boost=0,
|
||||
aggregated_chunk_boost_factor=1.0,
|
||||
tenant_id="test_tenant",
|
||||
)
|
||||
|
||||
|
||||
def _make_indexing_metadata(
|
||||
doc_ids: list[str],
|
||||
old_counts: list[int],
|
||||
new_counts: list[int],
|
||||
) -> IndexingMetadata:
|
||||
return IndexingMetadata(
|
||||
doc_id_to_chunk_cnt_diff={
|
||||
doc_id: IndexingMetadata.ChunkCounts(
|
||||
old_chunk_cnt=old,
|
||||
new_chunk_cnt=new,
|
||||
)
|
||||
for doc_id, old, new in zip(doc_ids, old_counts, new_counts)
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _stub_enrich(
|
||||
doc_id: str,
|
||||
old_chunk_cnt: int,
|
||||
) -> EnrichedDocumentIndexingInfo:
|
||||
"""Build an EnrichedDocumentIndexingInfo that says 'no chunks to delete'
|
||||
when old_chunk_cnt == 0, or 'has existing chunks' otherwise."""
|
||||
return EnrichedDocumentIndexingInfo(
|
||||
doc_id=doc_id,
|
||||
chunk_start_index=0,
|
||||
old_version=False,
|
||||
chunk_end_index=old_chunk_cnt,
|
||||
)
|
||||
|
||||
|
||||
@patch("onyx.document_index.vespa.vespa_document_index.batch_index_vespa_chunks")
|
||||
@patch("onyx.document_index.vespa.vespa_document_index.delete_vespa_chunks")
|
||||
@patch(
|
||||
"onyx.document_index.vespa.vespa_document_index.get_document_chunk_ids",
|
||||
return_value=[],
|
||||
)
|
||||
@patch("onyx.document_index.vespa.vespa_document_index._enrich_basic_chunk_info")
|
||||
@patch(
|
||||
"onyx.document_index.vespa.vespa_document_index.BATCH_SIZE",
|
||||
3,
|
||||
)
|
||||
def test_index_respects_batch_size(
|
||||
mock_enrich: MagicMock,
|
||||
mock_get_chunk_ids: MagicMock, # noqa: ARG001
|
||||
mock_delete: MagicMock, # noqa: ARG001
|
||||
mock_batch_index: MagicMock,
|
||||
) -> None:
|
||||
"""When chunks exceed BATCH_SIZE, batch_index_vespa_chunks is called
|
||||
multiple times with correctly sized batches."""
|
||||
mock_enrich.return_value = _stub_enrich("doc1", old_chunk_cnt=0)
|
||||
|
||||
index = VespaDocumentIndex(
|
||||
index_name="test_index",
|
||||
tenant_state=TenantState(tenant_id="test_tenant", multitenant=False),
|
||||
large_chunks_enabled=False,
|
||||
httpx_client=MagicMock(),
|
||||
)
|
||||
|
||||
chunks = [_make_chunk("doc1", chunk_id=i) for i in range(7)]
|
||||
metadata = _make_indexing_metadata(["doc1"], old_counts=[0], new_counts=[7])
|
||||
|
||||
results = index.index(chunks=chunks, indexing_metadata=metadata)
|
||||
|
||||
assert len(results) == 1
|
||||
|
||||
# With BATCH_SIZE=3 and 7 chunks: batches of 3, 3, 1
|
||||
assert mock_batch_index.call_count == 3
|
||||
batch_sizes = [len(c.kwargs["chunks"]) for c in mock_batch_index.call_args_list]
|
||||
assert batch_sizes == [3, 3, 1]
|
||||
|
||||
# Verify all chunks are accounted for and in order
|
||||
all_indexed = [
|
||||
chunk for c in mock_batch_index.call_args_list for chunk in c.kwargs["chunks"]
|
||||
]
|
||||
assert len(all_indexed) == 7
|
||||
assert [c.chunk_id for c in all_indexed] == list(range(7))
|
||||
@@ -7,6 +7,7 @@ from unittest.mock import patch
|
||||
|
||||
import httpx
|
||||
import pytest
|
||||
from pydantic import BaseModel
|
||||
|
||||
from onyx.db.enums import HookFailStrategy
|
||||
from onyx.db.enums import HookPoint
|
||||
@@ -15,13 +16,15 @@ from onyx.error_handling.exceptions import OnyxError
|
||||
from onyx.hooks.executor import execute_hook
|
||||
from onyx.hooks.executor import HookSkipped
|
||||
from onyx.hooks.executor import HookSoftFailed
|
||||
from onyx.hooks.points.query_processing import QueryProcessingResponse
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_PAYLOAD: dict[str, Any] = {"query": "test", "user_email": "u@example.com"}
|
||||
_RESPONSE_PAYLOAD: dict[str, Any] = {"rewritten_query": "better test"}
|
||||
# A valid QueryProcessingResponse payload — used by success-path tests.
|
||||
_RESPONSE_PAYLOAD: dict[str, Any] = {"query": "better test"}
|
||||
|
||||
|
||||
def _make_hook(
|
||||
@@ -33,6 +36,7 @@ def _make_hook(
|
||||
fail_strategy: HookFailStrategy = HookFailStrategy.SOFT,
|
||||
hook_id: int = 1,
|
||||
is_reachable: bool | None = None,
|
||||
hook_point: HookPoint = HookPoint.QUERY_PROCESSING,
|
||||
) -> MagicMock:
|
||||
hook = MagicMock()
|
||||
hook.is_active = is_active
|
||||
@@ -42,6 +46,7 @@ def _make_hook(
|
||||
hook.id = hook_id
|
||||
hook.fail_strategy = fail_strategy
|
||||
hook.is_reachable = is_reachable
|
||||
hook.hook_point = hook_point
|
||||
return hook
|
||||
|
||||
|
||||
@@ -140,6 +145,7 @@ def test_early_exit_returns_skipped_with_no_db_writes(
|
||||
db_session=db_session,
|
||||
hook_point=HookPoint.QUERY_PROCESSING,
|
||||
payload=_PAYLOAD,
|
||||
response_type=QueryProcessingResponse,
|
||||
)
|
||||
|
||||
assert isinstance(result, HookSkipped)
|
||||
@@ -152,7 +158,9 @@ def test_early_exit_returns_skipped_with_no_db_writes(
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def test_success_returns_payload_and_sets_reachable(db_session: MagicMock) -> None:
|
||||
def test_success_returns_validated_model_and_sets_reachable(
|
||||
db_session: MagicMock,
|
||||
) -> None:
|
||||
hook = _make_hook()
|
||||
|
||||
with (
|
||||
@@ -171,9 +179,11 @@ def test_success_returns_payload_and_sets_reachable(db_session: MagicMock) -> No
|
||||
db_session=db_session,
|
||||
hook_point=HookPoint.QUERY_PROCESSING,
|
||||
payload=_PAYLOAD,
|
||||
response_type=QueryProcessingResponse,
|
||||
)
|
||||
|
||||
assert result == _RESPONSE_PAYLOAD
|
||||
assert isinstance(result, QueryProcessingResponse)
|
||||
assert result.query == _RESPONSE_PAYLOAD["query"]
|
||||
_, update_kwargs = mock_update.call_args
|
||||
assert update_kwargs["is_reachable"] is True
|
||||
mock_log.assert_not_called()
|
||||
@@ -200,9 +210,11 @@ def test_success_skips_reachable_write_when_already_true(db_session: MagicMock)
|
||||
db_session=db_session,
|
||||
hook_point=HookPoint.QUERY_PROCESSING,
|
||||
payload=_PAYLOAD,
|
||||
response_type=QueryProcessingResponse,
|
||||
)
|
||||
|
||||
assert result == _RESPONSE_PAYLOAD
|
||||
assert isinstance(result, QueryProcessingResponse)
|
||||
assert result.query == _RESPONSE_PAYLOAD["query"]
|
||||
mock_update.assert_not_called()
|
||||
|
||||
|
||||
@@ -230,6 +242,7 @@ def test_non_dict_json_response_is_a_failure(db_session: MagicMock) -> None:
|
||||
db_session=db_session,
|
||||
hook_point=HookPoint.QUERY_PROCESSING,
|
||||
payload=_PAYLOAD,
|
||||
response_type=QueryProcessingResponse,
|
||||
)
|
||||
|
||||
assert isinstance(result, HookSoftFailed)
|
||||
@@ -265,6 +278,7 @@ def test_json_decode_failure_is_a_failure(db_session: MagicMock) -> None:
|
||||
db_session=db_session,
|
||||
hook_point=HookPoint.QUERY_PROCESSING,
|
||||
payload=_PAYLOAD,
|
||||
response_type=QueryProcessingResponse,
|
||||
)
|
||||
|
||||
assert isinstance(result, HookSoftFailed)
|
||||
@@ -388,6 +402,7 @@ def test_http_failure_paths(
|
||||
db_session=db_session,
|
||||
hook_point=HookPoint.QUERY_PROCESSING,
|
||||
payload=_PAYLOAD,
|
||||
response_type=QueryProcessingResponse,
|
||||
)
|
||||
assert exc_info.value.error_code is OnyxErrorCode.HOOK_EXECUTION_FAILED
|
||||
else:
|
||||
@@ -395,6 +410,7 @@ def test_http_failure_paths(
|
||||
db_session=db_session,
|
||||
hook_point=HookPoint.QUERY_PROCESSING,
|
||||
payload=_PAYLOAD,
|
||||
response_type=QueryProcessingResponse,
|
||||
)
|
||||
assert isinstance(result, expected_type)
|
||||
|
||||
@@ -442,6 +458,7 @@ def test_authorization_header(
|
||||
db_session=db_session,
|
||||
hook_point=HookPoint.QUERY_PROCESSING,
|
||||
payload=_PAYLOAD,
|
||||
response_type=QueryProcessingResponse,
|
||||
)
|
||||
|
||||
_, call_kwargs = mock_client.post.call_args
|
||||
@@ -457,16 +474,16 @@ def test_authorization_header(
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"http_exception,expected_result",
|
||||
"http_exception,expect_onyx_error",
|
||||
[
|
||||
pytest.param(None, _RESPONSE_PAYLOAD, id="success_path"),
|
||||
pytest.param(httpx.ConnectError("refused"), OnyxError, id="hard_fail_path"),
|
||||
pytest.param(None, False, id="success_path"),
|
||||
pytest.param(httpx.ConnectError("refused"), True, id="hard_fail_path"),
|
||||
],
|
||||
)
|
||||
def test_persist_session_failure_is_swallowed(
|
||||
db_session: MagicMock,
|
||||
http_exception: Exception | None,
|
||||
expected_result: Any,
|
||||
expect_onyx_error: bool,
|
||||
) -> None:
|
||||
"""DB session failure in _persist_result must not mask the real return value or OnyxError."""
|
||||
hook = _make_hook(fail_strategy=HookFailStrategy.HARD)
|
||||
@@ -489,12 +506,13 @@ def test_persist_session_failure_is_swallowed(
|
||||
side_effect=http_exception,
|
||||
)
|
||||
|
||||
if expected_result is OnyxError:
|
||||
if expect_onyx_error:
|
||||
with pytest.raises(OnyxError) as exc_info:
|
||||
execute_hook(
|
||||
db_session=db_session,
|
||||
hook_point=HookPoint.QUERY_PROCESSING,
|
||||
payload=_PAYLOAD,
|
||||
response_type=QueryProcessingResponse,
|
||||
)
|
||||
assert exc_info.value.error_code is OnyxErrorCode.HOOK_EXECUTION_FAILED
|
||||
else:
|
||||
@@ -502,8 +520,131 @@ def test_persist_session_failure_is_swallowed(
|
||||
db_session=db_session,
|
||||
hook_point=HookPoint.QUERY_PROCESSING,
|
||||
payload=_PAYLOAD,
|
||||
response_type=QueryProcessingResponse,
|
||||
)
|
||||
assert result == expected_result
|
||||
assert isinstance(result, QueryProcessingResponse)
|
||||
assert result.query == _RESPONSE_PAYLOAD["query"]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Response model validation
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class _StrictResponse(BaseModel):
|
||||
"""Strict model used to reliably trigger a ValidationError in tests."""
|
||||
|
||||
required_field: str # no default → missing key raises ValidationError
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"fail_strategy,expected_type",
|
||||
[
|
||||
pytest.param(
|
||||
HookFailStrategy.SOFT, HookSoftFailed, id="validation_failure_soft"
|
||||
),
|
||||
pytest.param(HookFailStrategy.HARD, OnyxError, id="validation_failure_hard"),
|
||||
],
|
||||
)
|
||||
def test_response_validation_failure_respects_fail_strategy(
|
||||
db_session: MagicMock,
|
||||
fail_strategy: HookFailStrategy,
|
||||
expected_type: type,
|
||||
) -> None:
|
||||
"""A response that fails response_model validation is treated like any other
|
||||
hook failure: logged, is_reachable left unchanged, fail_strategy respected."""
|
||||
hook = _make_hook(fail_strategy=fail_strategy)
|
||||
|
||||
with (
|
||||
patch("onyx.hooks.executor.HOOKS_AVAILABLE", True),
|
||||
patch(
|
||||
"onyx.hooks.executor.get_non_deleted_hook_by_hook_point",
|
||||
return_value=hook,
|
||||
),
|
||||
patch("onyx.hooks.executor.get_session_with_current_tenant"),
|
||||
patch("onyx.hooks.executor.update_hook__no_commit") as mock_update,
|
||||
patch("onyx.hooks.executor.create_hook_execution_log__no_commit") as mock_log,
|
||||
patch("httpx.Client") as mock_client_cls,
|
||||
):
|
||||
# Response payload is missing required_field → ValidationError
|
||||
_setup_client(mock_client_cls, response=_make_response(json_return={}))
|
||||
|
||||
if expected_type is OnyxError:
|
||||
with pytest.raises(OnyxError) as exc_info:
|
||||
execute_hook(
|
||||
db_session=db_session,
|
||||
hook_point=HookPoint.QUERY_PROCESSING,
|
||||
payload=_PAYLOAD,
|
||||
response_type=_StrictResponse,
|
||||
)
|
||||
assert exc_info.value.error_code is OnyxErrorCode.HOOK_EXECUTION_FAILED
|
||||
else:
|
||||
result = execute_hook(
|
||||
db_session=db_session,
|
||||
hook_point=HookPoint.QUERY_PROCESSING,
|
||||
payload=_PAYLOAD,
|
||||
response_type=_StrictResponse,
|
||||
)
|
||||
assert isinstance(result, HookSoftFailed)
|
||||
|
||||
# is_reachable must not be updated — server responded correctly
|
||||
mock_update.assert_not_called()
|
||||
# failure must be logged
|
||||
mock_log.assert_called_once()
|
||||
_, log_kwargs = mock_log.call_args
|
||||
assert log_kwargs["is_success"] is False
|
||||
assert "validation" in (log_kwargs["error_message"] or "").lower()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Outer soft-fail guard in execute_hook
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"fail_strategy,expected_type",
|
||||
[
|
||||
pytest.param(HookFailStrategy.SOFT, HookSoftFailed, id="unexpected_exc_soft"),
|
||||
pytest.param(HookFailStrategy.HARD, ValueError, id="unexpected_exc_hard"),
|
||||
],
|
||||
)
|
||||
def test_unexpected_exception_in_inner_respects_fail_strategy(
|
||||
db_session: MagicMock,
|
||||
fail_strategy: HookFailStrategy,
|
||||
expected_type: type,
|
||||
) -> None:
|
||||
"""An unexpected exception raised by _execute_hook_inner (not an OnyxError from
|
||||
HARD fail — e.g. a bug or an assertion error) must be swallowed and return
|
||||
HookSoftFailed for SOFT strategy, or re-raised for HARD strategy."""
|
||||
hook = _make_hook(fail_strategy=fail_strategy)
|
||||
|
||||
with (
|
||||
patch("onyx.hooks.executor.HOOKS_AVAILABLE", True),
|
||||
patch(
|
||||
"onyx.hooks.executor.get_non_deleted_hook_by_hook_point",
|
||||
return_value=hook,
|
||||
),
|
||||
patch(
|
||||
"onyx.hooks.executor._execute_hook_inner",
|
||||
side_effect=ValueError("unexpected bug"),
|
||||
),
|
||||
):
|
||||
if expected_type is HookSoftFailed:
|
||||
result = execute_hook(
|
||||
db_session=db_session,
|
||||
hook_point=HookPoint.QUERY_PROCESSING,
|
||||
payload=_PAYLOAD,
|
||||
response_type=QueryProcessingResponse,
|
||||
)
|
||||
assert isinstance(result, HookSoftFailed)
|
||||
else:
|
||||
with pytest.raises(ValueError, match="unexpected bug"):
|
||||
execute_hook(
|
||||
db_session=db_session,
|
||||
hook_point=HookPoint.QUERY_PROCESSING,
|
||||
payload=_PAYLOAD,
|
||||
response_type=QueryProcessingResponse,
|
||||
)
|
||||
|
||||
|
||||
def test_is_reachable_failure_does_not_prevent_log(db_session: MagicMock) -> None:
|
||||
@@ -535,6 +676,7 @@ def test_is_reachable_failure_does_not_prevent_log(db_session: MagicMock) -> Non
|
||||
db_session=db_session,
|
||||
hook_point=HookPoint.QUERY_PROCESSING,
|
||||
payload=_PAYLOAD,
|
||||
response_type=QueryProcessingResponse,
|
||||
)
|
||||
|
||||
assert isinstance(result, HookSoftFailed)
|
||||
|
||||
@@ -116,7 +116,7 @@ def _run_adapter_build(
|
||||
project_ids_map: dict[str, list[int]],
|
||||
persona_ids_map: dict[str, list[int]],
|
||||
) -> list[DocMetadataAwareIndexChunk]:
|
||||
"""Helper that runs UserFileIndexingAdapter.prepare_enrichment + enrich_chunk
|
||||
"""Helper that runs UserFileIndexingAdapter.build_metadata_aware_chunks
|
||||
with all external dependencies mocked."""
|
||||
from onyx.indexing.adapters.user_file_indexing_adapter import (
|
||||
UserFileIndexingAdapter,
|
||||
@@ -155,16 +155,18 @@ def _run_adapter_build(
|
||||
side_effect=Exception("no LLM in tests"),
|
||||
),
|
||||
):
|
||||
enricher = adapter.prepare_enrichment(
|
||||
context=context,
|
||||
result = adapter.build_metadata_aware_chunks(
|
||||
chunks_with_embeddings=[chunk],
|
||||
chunk_content_scores=[1.0],
|
||||
tenant_id="test_tenant",
|
||||
chunks=[chunk],
|
||||
context=context,
|
||||
)
|
||||
return [enricher.enrich_chunk(chunk, 1.0)]
|
||||
|
||||
return result.chunks
|
||||
|
||||
|
||||
def test_prepare_enrichment_includes_persona_ids() -> None:
|
||||
"""UserFileIndexingAdapter.prepare_enrichment writes persona IDs
|
||||
def test_build_metadata_aware_chunks_includes_persona_ids() -> None:
|
||||
"""UserFileIndexingAdapter.build_metadata_aware_chunks writes persona IDs
|
||||
fetched from the DB into each chunk's metadata."""
|
||||
file_id = str(uuid4())
|
||||
persona_ids = [5, 12]
|
||||
@@ -181,7 +183,7 @@ def test_prepare_enrichment_includes_persona_ids() -> None:
|
||||
assert chunks[0].user_project == project_ids
|
||||
|
||||
|
||||
def test_prepare_enrichment_missing_file_defaults_to_empty() -> None:
|
||||
def test_build_metadata_aware_chunks_missing_file_defaults_to_empty() -> None:
|
||||
"""When a file has no persona or project associations in the DB, the
|
||||
adapter should default to empty lists (not KeyError or None)."""
|
||||
file_id = str(uuid4())
|
||||
|
||||
@@ -23,6 +23,12 @@ upstream web_server {
|
||||
# Conditionally include MCP upstream configuration
|
||||
include /etc/nginx/conf.d/mcp_upstream.conf.inc;
|
||||
|
||||
# WebSocket support: only set Connection "upgrade" for actual upgrade requests
|
||||
map $http_upgrade $connection_upgrade {
|
||||
default upgrade;
|
||||
'' close;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80 default_server;
|
||||
|
||||
@@ -46,8 +52,10 @@ server {
|
||||
proxy_set_header X-Forwarded-Port $server_port;
|
||||
proxy_set_header Host $host;
|
||||
|
||||
# need to use 1.1 to support chunked transfers
|
||||
# need to use 1.1 to support chunked transfers and WebSocket
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
proxy_buffering off;
|
||||
|
||||
# timeout settings
|
||||
|
||||
@@ -23,6 +23,12 @@ upstream web_server {
|
||||
# Conditionally include MCP upstream configuration
|
||||
include /etc/nginx/conf.d/mcp_upstream.conf.inc;
|
||||
|
||||
# WebSocket support: only set Connection "upgrade" for actual upgrade requests
|
||||
map $http_upgrade $connection_upgrade {
|
||||
default upgrade;
|
||||
'' close;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80 default_server;
|
||||
|
||||
@@ -47,8 +53,10 @@ server {
|
||||
proxy_set_header X-Forwarded-Port $server_port;
|
||||
proxy_set_header Host $host;
|
||||
|
||||
# need to use 1.1 to support chunked transfers
|
||||
# need to use 1.1 to support chunked transfers and WebSocket
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
proxy_buffering off;
|
||||
|
||||
# we don't want nginx trying to do something clever with
|
||||
@@ -92,6 +100,8 @@ server {
|
||||
proxy_set_header Host $host;
|
||||
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
proxy_buffering off;
|
||||
# we don't want nginx trying to do something clever with
|
||||
# redirects, we set the Host: header above already.
|
||||
|
||||
@@ -23,6 +23,12 @@ upstream web_server {
|
||||
# Conditionally include MCP upstream configuration
|
||||
include /etc/nginx/conf.d/mcp_upstream.conf.inc;
|
||||
|
||||
# WebSocket support: only set Connection "upgrade" for actual upgrade requests
|
||||
map $http_upgrade $connection_upgrade {
|
||||
default upgrade;
|
||||
'' close;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80 default_server;
|
||||
|
||||
@@ -47,8 +53,10 @@ server {
|
||||
proxy_set_header X-Forwarded-Port $server_port;
|
||||
proxy_set_header Host $host;
|
||||
|
||||
# need to use 1.1 to support chunked transfers
|
||||
# need to use 1.1 to support chunked transfers and WebSocket
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
proxy_buffering off;
|
||||
|
||||
# timeout settings
|
||||
@@ -106,6 +114,8 @@ server {
|
||||
proxy_set_header Host $host;
|
||||
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
proxy_buffering off;
|
||||
|
||||
# timeout settings
|
||||
|
||||
@@ -28,6 +28,12 @@ data:
|
||||
}
|
||||
{{- end }}
|
||||
|
||||
# WebSocket support: only set Connection "upgrade" for actual upgrade requests
|
||||
map $http_upgrade $connection_upgrade {
|
||||
default upgrade;
|
||||
'' close;
|
||||
}
|
||||
|
||||
server.conf: |
|
||||
server {
|
||||
listen 1024;
|
||||
@@ -65,6 +71,8 @@ data:
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
proxy_set_header Host $host;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
proxy_buffering off;
|
||||
proxy_redirect off;
|
||||
# timeout settings
|
||||
|
||||
@@ -10,7 +10,7 @@ data:
|
||||
#!/usr/bin/env sh
|
||||
set -eu
|
||||
|
||||
HOST="${POSTGRES_HOST:-localhost}"
|
||||
HOST="${PGINTO_HOST:-${POSTGRES_HOST:-localhost}}"
|
||||
PORT="${POSTGRES_PORT:-5432}"
|
||||
USER="${POSTGRES_USER:-postgres}"
|
||||
DB="${POSTGRES_DB:-postgres}"
|
||||
|
||||
@@ -282,7 +282,7 @@ nginx:
|
||||
# The ingress-nginx subchart doesn't auto-detect our custom ConfigMap changes.
|
||||
# Workaround: Helm upgrade will restart if the following annotation value changes.
|
||||
podAnnotations:
|
||||
onyx.app/nginx-config-version: "1"
|
||||
onyx.app/nginx-config-version: "2"
|
||||
|
||||
# Propagate DOMAIN into nginx so server_name continues to use the same env var
|
||||
extraEnvs:
|
||||
|
||||
@@ -83,6 +83,14 @@
|
||||
"scope": [],
|
||||
"rule": "Code changes must consider both regular Onyx deployments and Onyx lite deployments. Lite deployments disable the vector DB, Redis, model servers, and background workers by default, use PostgreSQL-backed cache/auth/file storage, and rely on the API server to handle background work. Do not assume those services are available unless the code path is explicitly limited to full deployments."
|
||||
},
|
||||
{
|
||||
"scope": ["web/**"],
|
||||
"rule": "In Onyx's Next.js app, the `app/ee/admin/` directory is a filesystem convention for Enterprise Edition route overrides — it does NOT add an `/ee/` prefix to the URL. Both `app/admin/groups/page.tsx` and `app/ee/admin/groups/page.tsx` serve the same URL `/admin/groups`. Hardcoded `/admin/...` paths in router.push() calls are correct and do NOT break EE deployments. Do not flag hardcoded admin paths as bugs."
|
||||
},
|
||||
{
|
||||
"scope": ["web/**"],
|
||||
"rule": "In Onyx, each API key creates a unique user row in the database with a unique `user_id` (UUID). There is a 1:1 mapping between API keys and their backing user records. Multiple API keys do NOT share the same `user_id`. Do not flag potential duplicate row IDs when using `user_id` from API key descriptors."
|
||||
},
|
||||
{
|
||||
"scope": ["backend/**/*.py"],
|
||||
"rule": "Never raise HTTPException directly in business code. Use `raise OnyxError(OnyxErrorCode.XXX, \"message\")` from `onyx.error_handling.exceptions`. A global FastAPI exception handler converts OnyxError into structured JSON responses with {\"error_code\": \"...\", \"detail\": \"...\"}. Error codes are defined in `onyx.error_handling.error_codes.OnyxErrorCode`. For upstream errors with dynamic HTTP status codes, use `status_code_override`: `raise OnyxError(OnyxErrorCode.BAD_GATEWAY, detail, status_code_override=upstream_status)`."
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
import "@opal/components/tooltip.css";
|
||||
import { Interactive, type InteractiveStatelessProps } from "@opal/core";
|
||||
import {
|
||||
Disabled,
|
||||
Interactive,
|
||||
type InteractiveStatelessProps,
|
||||
} from "@opal/core";
|
||||
import type { ContainerSizeVariants, ExtremaSizeVariants } from "@opal/types";
|
||||
import type { TooltipSide } from "@opal/components";
|
||||
import type { IconFunctionComponent } from "@opal/types";
|
||||
@@ -32,9 +36,6 @@ type ButtonProps = InteractiveStatelessProps &
|
||||
*/
|
||||
size?: ContainerSizeVariants;
|
||||
|
||||
/** HTML button type. When provided, Container renders a `<button>` element. */
|
||||
type?: "submit" | "button" | "reset";
|
||||
|
||||
/** Tooltip text shown on hover. */
|
||||
tooltip?: string;
|
||||
|
||||
@@ -43,6 +44,9 @@ type ButtonProps = InteractiveStatelessProps &
|
||||
|
||||
/** Which side the tooltip appears on. */
|
||||
tooltipSide?: TooltipSide;
|
||||
|
||||
/** Wraps the button in a Disabled context. `false` overrides parent contexts. */
|
||||
disabled?: boolean;
|
||||
};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
@@ -59,6 +63,7 @@ function Button({
|
||||
tooltip,
|
||||
tooltipSide = "top",
|
||||
responsiveHideText = false,
|
||||
disabled,
|
||||
...interactiveProps
|
||||
}: ButtonProps) {
|
||||
const isLarge = size === "lg";
|
||||
@@ -76,7 +81,7 @@ function Button({
|
||||
) : null;
|
||||
|
||||
const button = (
|
||||
<Interactive.Stateless {...interactiveProps}>
|
||||
<Interactive.Stateless type={type} {...interactiveProps}>
|
||||
<Interactive.Container
|
||||
type={type}
|
||||
border={interactiveProps.prominence === "secondary"}
|
||||
@@ -102,9 +107,7 @@ function Button({
|
||||
</Interactive.Stateless>
|
||||
);
|
||||
|
||||
if (!tooltip) return button;
|
||||
|
||||
return (
|
||||
const result = tooltip ? (
|
||||
<TooltipPrimitive.Root>
|
||||
<TooltipPrimitive.Trigger asChild>{button}</TooltipPrimitive.Trigger>
|
||||
<TooltipPrimitive.Portal>
|
||||
@@ -117,7 +120,15 @@ function Button({
|
||||
</TooltipPrimitive.Content>
|
||||
</TooltipPrimitive.Portal>
|
||||
</TooltipPrimitive.Root>
|
||||
) : (
|
||||
button
|
||||
);
|
||||
|
||||
if (disabled != null) {
|
||||
return <Disabled disabled={disabled}>{result}</Disabled>;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
export { Button, type ButtonProps };
|
||||
|
||||
8
web/lib/opal/src/components/buttons/chevron.css
Normal file
8
web/lib/opal/src/components/buttons/chevron.css
Normal file
@@ -0,0 +1,8 @@
|
||||
.opal-button-chevron {
|
||||
transition: rotate 200ms ease;
|
||||
}
|
||||
|
||||
.interactive[data-interaction="hover"] .opal-button-chevron,
|
||||
.interactive[data-interaction="active"] .opal-button-chevron {
|
||||
rotate: -180deg;
|
||||
}
|
||||
22
web/lib/opal/src/components/buttons/chevron.tsx
Normal file
22
web/lib/opal/src/components/buttons/chevron.tsx
Normal file
@@ -0,0 +1,22 @@
|
||||
import "@opal/components/buttons/chevron.css";
|
||||
import type { IconProps } from "@opal/types";
|
||||
import { SvgChevronDownSmall } from "@opal/icons";
|
||||
import { cn } from "@opal/utils";
|
||||
|
||||
/**
|
||||
* Chevron icon that rotates 180° when its parent `.interactive` enters
|
||||
* hover / active state. Shared by OpenButton, FilterButton, and any
|
||||
* future button that needs an animated dropdown indicator.
|
||||
*
|
||||
* Stable component identity — never causes React to remount the SVG.
|
||||
*/
|
||||
function ChevronIcon({ className, ...props }: IconProps) {
|
||||
return (
|
||||
<SvgChevronDownSmall
|
||||
className={cn(className, "opal-button-chevron")}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export { ChevronIcon };
|
||||
@@ -0,0 +1,107 @@
|
||||
import type { Meta, StoryObj } from "@storybook/react";
|
||||
import { FilterButton } from "@opal/components";
|
||||
import { Disabled as DisabledProvider } from "@opal/core";
|
||||
import { SvgUser, SvgActions, SvgTag } from "@opal/icons";
|
||||
import * as TooltipPrimitive from "@radix-ui/react-tooltip";
|
||||
|
||||
const meta: Meta<typeof FilterButton> = {
|
||||
title: "opal/components/FilterButton",
|
||||
component: FilterButton,
|
||||
tags: ["autodocs"],
|
||||
decorators: [
|
||||
(Story) => (
|
||||
<TooltipPrimitive.Provider>
|
||||
<Story />
|
||||
</TooltipPrimitive.Provider>
|
||||
),
|
||||
],
|
||||
};
|
||||
|
||||
export default meta;
|
||||
type Story = StoryObj<typeof FilterButton>;
|
||||
|
||||
export const Empty: Story = {
|
||||
args: {
|
||||
icon: SvgUser,
|
||||
children: "Everyone",
|
||||
},
|
||||
};
|
||||
|
||||
export const Active: Story = {
|
||||
args: {
|
||||
icon: SvgUser,
|
||||
active: true,
|
||||
children: "By alice@example.com",
|
||||
onClear: () => console.log("clear"),
|
||||
},
|
||||
};
|
||||
|
||||
export const Open: Story = {
|
||||
args: {
|
||||
icon: SvgActions,
|
||||
interaction: "hover",
|
||||
children: "All Actions",
|
||||
},
|
||||
};
|
||||
|
||||
export const ActiveOpen: Story = {
|
||||
args: {
|
||||
icon: SvgActions,
|
||||
active: true,
|
||||
interaction: "hover",
|
||||
children: "2 selected",
|
||||
onClear: () => console.log("clear"),
|
||||
},
|
||||
};
|
||||
|
||||
export const Disabled: Story = {
|
||||
args: {
|
||||
icon: SvgTag,
|
||||
children: "All Tags",
|
||||
},
|
||||
decorators: [
|
||||
(Story) => (
|
||||
<DisabledProvider disabled>
|
||||
<Story />
|
||||
</DisabledProvider>
|
||||
),
|
||||
],
|
||||
};
|
||||
|
||||
export const DisabledActive: Story = {
|
||||
args: {
|
||||
icon: SvgTag,
|
||||
active: true,
|
||||
children: "2 tags",
|
||||
onClear: () => console.log("clear"),
|
||||
},
|
||||
decorators: [
|
||||
(Story) => (
|
||||
<DisabledProvider disabled>
|
||||
<Story />
|
||||
</DisabledProvider>
|
||||
),
|
||||
],
|
||||
};
|
||||
|
||||
export const StateComparison: Story = {
|
||||
render: () => (
|
||||
<div style={{ display: "flex", gap: 12, alignItems: "center" }}>
|
||||
<FilterButton icon={SvgUser} onClear={() => undefined}>
|
||||
Everyone
|
||||
</FilterButton>
|
||||
<FilterButton icon={SvgUser} active onClear={() => console.log("clear")}>
|
||||
By alice@example.com
|
||||
</FilterButton>
|
||||
</div>
|
||||
),
|
||||
};
|
||||
|
||||
export const WithTooltip: Story = {
|
||||
args: {
|
||||
icon: SvgUser,
|
||||
children: "Everyone",
|
||||
tooltip: "Filter by creator",
|
||||
tooltipSide: "bottom",
|
||||
},
|
||||
};
|
||||
70
web/lib/opal/src/components/buttons/filter-button/README.md
Normal file
70
web/lib/opal/src/components/buttons/filter-button/README.md
Normal file
@@ -0,0 +1,70 @@
|
||||
# FilterButton
|
||||
|
||||
**Import:** `import { FilterButton, type FilterButtonProps } from "@opal/components";`
|
||||
|
||||
A stateful filter trigger with a built-in chevron (when empty) and a clear button (when selected). Hardcodes `variant="select-filter"` and delegates to `Interactive.Stateful`, adding automatic open-state detection from Radix `data-state`. Designed to sit inside a `Popover.Trigger` for filter dropdowns.
|
||||
|
||||
## Relationship to OpenButton
|
||||
|
||||
FilterButton shares a similar call stack to `OpenButton`:
|
||||
|
||||
```
|
||||
Interactive.Stateful → Interactive.Container → content row (icon + label + trailing indicator)
|
||||
```
|
||||
|
||||
FilterButton is a **narrower, filter-specific** variant:
|
||||
|
||||
- It hardcodes `variant="select-filter"` (OpenButton uses `"select-heavy"`)
|
||||
- It exposes `active?: boolean` instead of the raw `state` prop (maps to `"selected"` / `"empty"` internally)
|
||||
- When active, the chevron is hidden via `visibility` and an absolutely-positioned clear `Button` with `prominence="tertiary"` overlays it — placed as a sibling outside the `<button>` to avoid nesting buttons
|
||||
- It uses the shared `ChevronIcon` from `buttons/chevron` (same as OpenButton)
|
||||
- It does not support `foldable`, `size`, or `width` — it is always `"lg"`
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
div.relative <- bounding wrapper
|
||||
Interactive.Stateful <- variant="select-filter", interaction, state
|
||||
└─ Interactive.Container (button) <- height="lg", default rounding/padding
|
||||
└─ div.interactive-foreground
|
||||
├─ div > Icon (interactive-foreground-icon)
|
||||
├─ <span> label text
|
||||
└─ ChevronIcon (when empty)
|
||||
OR spacer div (when selected — reserves chevron space)
|
||||
div.absolute <- clear Button overlay (when selected)
|
||||
└─ Button (SvgX, size="2xs", prominence="tertiary")
|
||||
```
|
||||
|
||||
- **Open-state detection** reads `data-state="open"` injected by Radix triggers (e.g. `Popover.Trigger`), falling back to the explicit `interaction` prop.
|
||||
- **Chevron rotation** uses the shared `ChevronIcon` component and `buttons/chevron.css`, which rotates 180deg when `data-interaction="hover"`.
|
||||
- **Clear button** is absolutely positioned outside the `<button>` element tree to avoid invalid nested `<button>` elements. An invisible spacer inside the button reserves the same space so layout doesn't shift between states.
|
||||
|
||||
## Props
|
||||
|
||||
| Prop | Type | Default | Description |
|
||||
|------|------|---------|-------------|
|
||||
| `icon` | `IconFunctionComponent` | **required** | Left icon component |
|
||||
| `children` | `string` | **required** | Label text between icon and trailing indicator |
|
||||
| `active` | `boolean` | `false` | Whether the filter has an active selection |
|
||||
| `onClear` | `() => void` | **required** | Called when the clear (X) button is clicked |
|
||||
| `interaction` | `"rest" \| "hover" \| "active"` | auto | JS-controlled interaction override. Falls back to Radix `data-state="open"`. |
|
||||
| `tooltip` | `string` | — | Tooltip text shown on hover |
|
||||
| `tooltipSide` | `TooltipSide` | `"top"` | Which side the tooltip appears on |
|
||||
|
||||
## Usage
|
||||
|
||||
```tsx
|
||||
import { FilterButton } from "@opal/components";
|
||||
import { SvgUser } from "@opal/icons";
|
||||
|
||||
// Inside a Popover (auto-detects open state)
|
||||
<Popover.Trigger asChild>
|
||||
<FilterButton
|
||||
icon={SvgUser}
|
||||
active={hasSelection}
|
||||
onClear={() => clearSelection()}
|
||||
>
|
||||
{hasSelection ? selectionLabel : "Everyone"}
|
||||
</FilterButton>
|
||||
</Popover.Trigger>
|
||||
```
|
||||
120
web/lib/opal/src/components/buttons/filter-button/components.tsx
Normal file
120
web/lib/opal/src/components/buttons/filter-button/components.tsx
Normal file
@@ -0,0 +1,120 @@
|
||||
import {
|
||||
Interactive,
|
||||
type InteractiveStatefulInteraction,
|
||||
type InteractiveStatefulProps,
|
||||
} from "@opal/core";
|
||||
import type { TooltipSide } from "@opal/components";
|
||||
import type { IconFunctionComponent } from "@opal/types";
|
||||
import { SvgX } from "@opal/icons";
|
||||
import * as TooltipPrimitive from "@radix-ui/react-tooltip";
|
||||
import { iconWrapper } from "@opal/components/buttons/icon-wrapper";
|
||||
import { ChevronIcon } from "@opal/components/buttons/chevron";
|
||||
import { Button } from "@opal/components/buttons/button/components";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
interface FilterButtonProps
|
||||
extends Omit<InteractiveStatefulProps, "variant" | "state"> {
|
||||
/** Left icon — always visible. */
|
||||
icon: IconFunctionComponent;
|
||||
|
||||
/** Label text between icon and trailing indicator. */
|
||||
children: string;
|
||||
|
||||
/** Whether the filter has an active selection. @default false */
|
||||
active?: boolean;
|
||||
|
||||
/** Called when the clear (X) button is clicked in active state. */
|
||||
onClear: () => void;
|
||||
|
||||
/** Tooltip text shown on hover. */
|
||||
tooltip?: string;
|
||||
|
||||
/** Which side the tooltip appears on. */
|
||||
tooltipSide?: TooltipSide;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// FilterButton
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function FilterButton({
|
||||
icon: Icon,
|
||||
children,
|
||||
onClear,
|
||||
tooltip,
|
||||
tooltipSide = "top",
|
||||
active = false,
|
||||
interaction,
|
||||
...statefulProps
|
||||
}: FilterButtonProps) {
|
||||
// Derive open state: explicit prop > Radix data-state (injected via Slot chain)
|
||||
const dataState = (statefulProps as Record<string, unknown>)["data-state"] as
|
||||
| string
|
||||
| undefined;
|
||||
const resolvedInteraction: InteractiveStatefulInteraction =
|
||||
interaction ?? (dataState === "open" ? "hover" : "rest");
|
||||
|
||||
const button = (
|
||||
<div className="relative">
|
||||
<Interactive.Stateful
|
||||
{...statefulProps}
|
||||
variant="select-filter"
|
||||
interaction={resolvedInteraction}
|
||||
state={active ? "selected" : "empty"}
|
||||
>
|
||||
<Interactive.Container type="button">
|
||||
<div className="interactive-foreground flex flex-row items-center gap-1">
|
||||
{iconWrapper(Icon, "lg", true)}
|
||||
<span className="whitespace-nowrap font-main-ui-action">
|
||||
{children}
|
||||
</span>
|
||||
<div style={{ visibility: active ? "hidden" : "visible" }}>
|
||||
{iconWrapper(ChevronIcon, "lg", true)}
|
||||
</div>
|
||||
</div>
|
||||
</Interactive.Container>
|
||||
</Interactive.Stateful>
|
||||
|
||||
{active && (
|
||||
<div className="absolute right-2 top-1/2 -translate-y-1/2">
|
||||
{/* Force hover state so the X stays visually prominent against
|
||||
the inverted selected background — without this it renders
|
||||
dimmed and looks disabled. */}
|
||||
<Button
|
||||
icon={SvgX}
|
||||
size="2xs"
|
||||
prominence="tertiary"
|
||||
tooltip="Clear filter"
|
||||
interaction="hover"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onClear();
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
|
||||
if (!tooltip) return button;
|
||||
|
||||
return (
|
||||
<TooltipPrimitive.Root>
|
||||
<TooltipPrimitive.Trigger asChild>{button}</TooltipPrimitive.Trigger>
|
||||
<TooltipPrimitive.Portal>
|
||||
<TooltipPrimitive.Content
|
||||
className="opal-tooltip"
|
||||
side={tooltipSide}
|
||||
sideOffset={4}
|
||||
>
|
||||
{tooltip}
|
||||
</TooltipPrimitive.Content>
|
||||
</TooltipPrimitive.Portal>
|
||||
</TooltipPrimitive.Root>
|
||||
);
|
||||
}
|
||||
|
||||
export { FilterButton, type FilterButtonProps };
|
||||
@@ -1,8 +1,5 @@
|
||||
import "@opal/components/tooltip.css";
|
||||
import {
|
||||
Interactive,
|
||||
type InteractiveStatefulState,
|
||||
type InteractiveStatefulInteraction,
|
||||
type InteractiveStatefulProps,
|
||||
InteractiveContainerRoundingVariant,
|
||||
} from "@opal/core";
|
||||
@@ -22,40 +19,26 @@ type ContentPassthroughProps = DistributiveOmit<
|
||||
"paddingVariant" | "widthVariant" | "ref" | "withInteractive"
|
||||
>;
|
||||
|
||||
type LineItemButtonOwnProps = {
|
||||
type LineItemButtonOwnProps = Pick<
|
||||
InteractiveStatefulProps,
|
||||
| "state"
|
||||
| "interaction"
|
||||
| "onClick"
|
||||
| "href"
|
||||
| "target"
|
||||
| "group"
|
||||
| "ref"
|
||||
| "type"
|
||||
> & {
|
||||
/** Interactive select variant. @default "select-light" */
|
||||
selectVariant?: "select-light" | "select-heavy";
|
||||
|
||||
/** Value state. @default "empty" */
|
||||
state?: InteractiveStatefulState;
|
||||
|
||||
/** JS-controllable interaction state override. @default "rest" */
|
||||
interaction?: InteractiveStatefulInteraction;
|
||||
|
||||
/** Click handler. */
|
||||
onClick?: InteractiveStatefulProps["onClick"];
|
||||
|
||||
/** When provided, renders an anchor instead of a div. */
|
||||
href?: string;
|
||||
|
||||
/** Anchor target (e.g. "_blank"). */
|
||||
target?: string;
|
||||
|
||||
/** Interactive group key. */
|
||||
group?: string;
|
||||
|
||||
/** Forwarded ref. */
|
||||
ref?: React.Ref<HTMLElement>;
|
||||
|
||||
/** Corner rounding preset (height is always content-driven). @default "default" */
|
||||
roundingVariant?: InteractiveContainerRoundingVariant;
|
||||
|
||||
/** Container width. @default "full" */
|
||||
width?: ExtremaSizeVariants;
|
||||
|
||||
/** HTML button type. @default "button" */
|
||||
type?: "submit" | "button" | "reset";
|
||||
|
||||
/** Tooltip text shown on hover. */
|
||||
tooltip?: string;
|
||||
|
||||
@@ -79,11 +62,11 @@ function LineItemButton({
|
||||
target,
|
||||
group,
|
||||
ref,
|
||||
type = "button",
|
||||
|
||||
// Sizing
|
||||
roundingVariant = "default",
|
||||
width = "full",
|
||||
type = "button",
|
||||
tooltip,
|
||||
tooltipSide = "top",
|
||||
|
||||
|
||||
@@ -40,13 +40,6 @@ export const Open: Story = {
|
||||
},
|
||||
};
|
||||
|
||||
export const Disabled: Story = {
|
||||
args: {
|
||||
disabled: true,
|
||||
children: "Disabled",
|
||||
},
|
||||
};
|
||||
|
||||
export const Foldable: Story = {
|
||||
args: {
|
||||
foldable: true,
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import "@opal/components/buttons/open-button/styles.css";
|
||||
import "@opal/components/tooltip.css";
|
||||
import {
|
||||
Interactive,
|
||||
useDisabled,
|
||||
@@ -9,24 +7,11 @@ import {
|
||||
import type { ContainerSizeVariants, ExtremaSizeVariants } from "@opal/types";
|
||||
import type { InteractiveContainerRoundingVariant } from "@opal/core";
|
||||
import type { TooltipSide } from "@opal/components";
|
||||
import type { IconFunctionComponent, IconProps } from "@opal/types";
|
||||
import { SvgChevronDownSmall } from "@opal/icons";
|
||||
import type { IconFunctionComponent } from "@opal/types";
|
||||
import * as TooltipPrimitive from "@radix-ui/react-tooltip";
|
||||
import { cn } from "@opal/utils";
|
||||
import { iconWrapper } from "@opal/components/buttons/icon-wrapper";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Chevron (stable identity — never causes React to remount the SVG)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function ChevronIcon({ className, ...props }: IconProps) {
|
||||
return (
|
||||
<SvgChevronDownSmall
|
||||
className={cn(className, "opal-open-button-chevron")}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
}
|
||||
import { ChevronIcon } from "@opal/components/buttons/chevron";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
.opal-open-button-chevron {
|
||||
transition: rotate 200ms ease;
|
||||
}
|
||||
|
||||
.interactive[data-interaction="hover"] .opal-open-button-chevron,
|
||||
.interactive[data-interaction="active"] .opal-open-button-chevron {
|
||||
rotate: -180deg;
|
||||
}
|
||||
@@ -1,5 +1,4 @@
|
||||
import "@opal/components/buttons/select-button/styles.css";
|
||||
import "@opal/components/tooltip.css";
|
||||
import {
|
||||
Interactive,
|
||||
useDisabled,
|
||||
@@ -50,9 +49,6 @@ type SelectButtonProps = InteractiveStatefulProps &
|
||||
*/
|
||||
size?: ContainerSizeVariants;
|
||||
|
||||
/** HTML button type. Container renders a `<button>` element. */
|
||||
type?: "submit" | "button" | "reset";
|
||||
|
||||
/** Tooltip text shown on hover. */
|
||||
tooltip?: string;
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import "@opal/components/tooltip.css";
|
||||
|
||||
/* Shared types */
|
||||
export type TooltipSide = "top" | "bottom" | "left" | "right";
|
||||
|
||||
@@ -19,6 +21,12 @@ export {
|
||||
type OpenButtonProps,
|
||||
} from "@opal/components/buttons/open-button/components";
|
||||
|
||||
/* FilterButton */
|
||||
export {
|
||||
FilterButton,
|
||||
type FilterButtonProps,
|
||||
} from "@opal/components/buttons/filter-button/components";
|
||||
|
||||
/* LineItemButton */
|
||||
export {
|
||||
LineItemButton,
|
||||
|
||||
@@ -32,7 +32,13 @@ function ColumnVisibilityPopover<TData extends RowData>({
|
||||
// User-defined columns only (exclude internal qualifier/actions)
|
||||
const dataColumns = table
|
||||
.getAllLeafColumns()
|
||||
.filter((col) => !col.id.startsWith("__") && col.id !== "qualifier");
|
||||
.filter(
|
||||
(col) =>
|
||||
!col.id.startsWith("__") &&
|
||||
col.id !== "qualifier" &&
|
||||
typeof col.columnDef.header === "string" &&
|
||||
col.columnDef.header.trim() !== ""
|
||||
);
|
||||
|
||||
return (
|
||||
<Popover open={open} onOpenChange={setOpen}>
|
||||
|
||||
@@ -88,9 +88,12 @@ function HoverableRoot({
|
||||
ref,
|
||||
onMouseEnter: consumerMouseEnter,
|
||||
onMouseLeave: consumerMouseLeave,
|
||||
onFocusCapture: consumerFocusCapture,
|
||||
onBlurCapture: consumerBlurCapture,
|
||||
...props
|
||||
}: HoverableRootProps) {
|
||||
const [hovered, setHovered] = useState(false);
|
||||
const [focused, setFocused] = useState(false);
|
||||
|
||||
const onMouseEnter = useCallback(
|
||||
(e: React.MouseEvent<HTMLDivElement>) => {
|
||||
@@ -108,16 +111,40 @@ function HoverableRoot({
|
||||
[consumerMouseLeave]
|
||||
);
|
||||
|
||||
const onFocusCapture = useCallback(
|
||||
(e: React.FocusEvent<HTMLDivElement>) => {
|
||||
setFocused(true);
|
||||
consumerFocusCapture?.(e);
|
||||
},
|
||||
[consumerFocusCapture]
|
||||
);
|
||||
|
||||
const onBlurCapture = useCallback(
|
||||
(e: React.FocusEvent<HTMLDivElement>) => {
|
||||
if (
|
||||
!(e.relatedTarget instanceof Node) ||
|
||||
!e.currentTarget.contains(e.relatedTarget)
|
||||
) {
|
||||
setFocused(false);
|
||||
}
|
||||
consumerBlurCapture?.(e);
|
||||
},
|
||||
[consumerBlurCapture]
|
||||
);
|
||||
|
||||
const active = hovered || focused;
|
||||
const GroupContext = getOrCreateContext(group);
|
||||
|
||||
return (
|
||||
<GroupContext.Provider value={hovered}>
|
||||
<GroupContext.Provider value={active}>
|
||||
<div
|
||||
{...props}
|
||||
ref={ref}
|
||||
className={cn(widthVariants[widthVariant])}
|
||||
onMouseEnter={onMouseEnter}
|
||||
onMouseLeave={onMouseLeave}
|
||||
onFocusCapture={onFocusCapture}
|
||||
onBlurCapture={onBlurCapture}
|
||||
>
|
||||
{children}
|
||||
</div>
|
||||
|
||||
@@ -16,3 +16,15 @@
|
||||
.hoverable-item[data-hoverable-variant="opacity-on-hover"][data-hoverable-local="true"]:hover {
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
/* Focus — item (or a focusable descendant) receives keyboard focus */
|
||||
.hoverable-item[data-hoverable-variant="opacity-on-hover"]:has(:focus-visible) {
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
/* Focus ring on keyboard focus */
|
||||
.hoverable-item:focus-visible {
|
||||
outline: 2px solid var(--border-04);
|
||||
outline-offset: 2px;
|
||||
border-radius: 0.25rem;
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ import type { Route } from "next";
|
||||
import "@opal/core/interactive/shared.css";
|
||||
import React from "react";
|
||||
import { cn } from "@opal/utils";
|
||||
import type { WithoutStyles } from "@opal/types";
|
||||
import type { ButtonType, WithoutStyles } from "@opal/types";
|
||||
import {
|
||||
containerSizeVariants,
|
||||
type ContainerSizeVariants,
|
||||
@@ -52,7 +52,7 @@ interface InteractiveContainerProps
|
||||
*
|
||||
* Mutually exclusive with `href`.
|
||||
*/
|
||||
type?: "submit" | "button" | "reset";
|
||||
type?: ButtonType;
|
||||
|
||||
/**
|
||||
* When `true`, applies a 1px border using the theme's border color.
|
||||
|
||||
@@ -8,7 +8,7 @@ Stateful interactive surface primitive for elements that maintain a value state
|
||||
|
||||
| Prop | Type | Default | Description |
|
||||
|------|------|---------|-------------|
|
||||
| `variant` | `"select-light" \| "select-heavy" \| "sidebar"` | `"select-heavy"` | Color variant |
|
||||
| `variant` | `"select-light" \| "select-heavy" \| "select-tinted" \| "select-filter" \| "sidebar"` | `"select-heavy"` | Color variant |
|
||||
| `state` | `"empty" \| "filled" \| "selected"` | `"empty"` | Current value state |
|
||||
| `interaction` | `"rest" \| "hover" \| "active"` | `"rest"` | JS-controlled interaction override |
|
||||
| `group` | `string` | — | Tailwind group class for `group-hover:*` |
|
||||
|
||||
@@ -4,7 +4,7 @@ import React from "react";
|
||||
import { Slot } from "@radix-ui/react-slot";
|
||||
import { cn } from "@opal/utils";
|
||||
import { useDisabled } from "@opal/core/disabled/components";
|
||||
import type { WithoutStyles } from "@opal/types";
|
||||
import type { ButtonType, WithoutStyles } from "@opal/types";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
@@ -14,6 +14,7 @@ type InteractiveStatefulVariant =
|
||||
| "select-light"
|
||||
| "select-heavy"
|
||||
| "select-tinted"
|
||||
| "select-filter"
|
||||
| "sidebar";
|
||||
type InteractiveStatefulState = "empty" | "filled" | "selected";
|
||||
type InteractiveStatefulInteraction = "rest" | "hover" | "active";
|
||||
@@ -30,6 +31,8 @@ interface InteractiveStatefulProps
|
||||
*
|
||||
* - `"select-light"` — transparent selected background (for inline toggles)
|
||||
* - `"select-heavy"` — tinted selected background (for list rows, model pickers)
|
||||
* - `"select-tinted"` — like select-heavy but with a tinted rest background
|
||||
* - `"select-filter"` — like select-tinted for empty/filled; selected state uses inverted tint backgrounds and inverted text (for filter buttons)
|
||||
* - `"sidebar"` — for sidebar navigation items
|
||||
*
|
||||
* @default "select-heavy"
|
||||
@@ -63,6 +66,13 @@ interface InteractiveStatefulProps
|
||||
*/
|
||||
group?: string;
|
||||
|
||||
/**
|
||||
* HTML button type. When set to `"submit"`, `"button"`, or `"reset"`, the
|
||||
* element is treated as inherently interactive for cursor styling purposes
|
||||
* even without an explicit `onClick` or `href`.
|
||||
*/
|
||||
type?: ButtonType;
|
||||
|
||||
/**
|
||||
* URL to navigate to when clicked. Passed through Slot to the child.
|
||||
*/
|
||||
@@ -94,6 +104,7 @@ function InteractiveStateful({
|
||||
state = "empty",
|
||||
interaction = "rest",
|
||||
group,
|
||||
type,
|
||||
href,
|
||||
target,
|
||||
...props
|
||||
@@ -104,7 +115,7 @@ function InteractiveStateful({
|
||||
// so Radix Slot-injected handlers don't bypass this guard.
|
||||
const classes = cn(
|
||||
"interactive",
|
||||
!props.onClick && !href && "!cursor-default !select-auto",
|
||||
!props.onClick && !href && !type && "!cursor-default !select-auto",
|
||||
group
|
||||
);
|
||||
|
||||
|
||||
@@ -308,6 +308,89 @@
|
||||
--interactive-foreground-icon: var(--action-link-03);
|
||||
}
|
||||
|
||||
/* ===========================================================================
|
||||
Select-Filter — empty/filled identical to Select-Tinted;
|
||||
selected uses inverted tint backgrounds and inverted text
|
||||
=========================================================================== */
|
||||
|
||||
/* ---------------------------------------------------------------------------
|
||||
Select-Filter — Empty & Filled (identical colors)
|
||||
--------------------------------------------------------------------------- */
|
||||
.interactive[data-interactive-variant="select-filter"]:is(
|
||||
[data-interactive-state="empty"],
|
||||
[data-interactive-state="filled"]
|
||||
) {
|
||||
@apply bg-background-tint-01;
|
||||
--interactive-foreground: var(--text-02);
|
||||
--interactive-foreground-icon: var(--text-02);
|
||||
}
|
||||
.interactive[data-interactive-variant="select-filter"]:is(
|
||||
[data-interactive-state="empty"],
|
||||
[data-interactive-state="filled"]
|
||||
):hover:not([data-disabled]),
|
||||
.interactive[data-interactive-variant="select-filter"]:is(
|
||||
[data-interactive-state="empty"],
|
||||
[data-interactive-state="filled"]
|
||||
)[data-interaction="hover"]:not([data-disabled]) {
|
||||
@apply bg-background-tint-02;
|
||||
--interactive-foreground: var(--text-04);
|
||||
--interactive-foreground-icon: var(--text-04);
|
||||
}
|
||||
.interactive[data-interactive-variant="select-filter"]:is(
|
||||
[data-interactive-state="empty"],
|
||||
[data-interactive-state="filled"]
|
||||
):active:not([data-disabled]),
|
||||
.interactive[data-interactive-variant="select-filter"]:is(
|
||||
[data-interactive-state="empty"],
|
||||
[data-interactive-state="filled"]
|
||||
)[data-interaction="active"]:not([data-disabled]) {
|
||||
@apply bg-background-neutral-00;
|
||||
--interactive-foreground: var(--text-05);
|
||||
--interactive-foreground-icon: var(--text-05);
|
||||
}
|
||||
.interactive[data-interactive-variant="select-filter"]:is(
|
||||
[data-interactive-state="empty"],
|
||||
[data-interactive-state="filled"]
|
||||
)[data-disabled] {
|
||||
@apply bg-transparent;
|
||||
--interactive-foreground: var(--text-01);
|
||||
--interactive-foreground-icon: var(--text-01);
|
||||
}
|
||||
|
||||
/* ---------------------------------------------------------------------------
|
||||
Select-Filter — Selected
|
||||
--------------------------------------------------------------------------- */
|
||||
.interactive[data-interactive-variant="select-filter"][data-interactive-state="selected"] {
|
||||
@apply bg-background-tint-inverted-03;
|
||||
--interactive-foreground: var(--text-inverted-05);
|
||||
--interactive-foreground-icon: var(--text-inverted-05);
|
||||
}
|
||||
.interactive[data-interactive-variant="select-filter"][data-interactive-state="selected"]:hover:not(
|
||||
[data-disabled]
|
||||
),
|
||||
.interactive[data-interactive-variant="select-filter"][data-interactive-state="selected"][data-interaction="hover"]:not(
|
||||
[data-disabled]
|
||||
) {
|
||||
@apply bg-background-tint-inverted-04;
|
||||
--interactive-foreground: var(--text-inverted-05);
|
||||
--interactive-foreground-icon: var(--text-inverted-05);
|
||||
}
|
||||
.interactive[data-interactive-variant="select-filter"][data-interactive-state="selected"]:active:not(
|
||||
[data-disabled]
|
||||
),
|
||||
.interactive[data-interactive-variant="select-filter"][data-interactive-state="selected"][data-interaction="active"]:not(
|
||||
[data-disabled]
|
||||
) {
|
||||
@apply bg-background-tint-inverted-04;
|
||||
--interactive-foreground: var(--text-inverted-04);
|
||||
--interactive-foreground-icon: var(--text-inverted-04);
|
||||
}
|
||||
.interactive[data-interactive-variant="select-filter"][data-interactive-state="selected"][data-disabled] {
|
||||
@apply bg-background-neutral-04;
|
||||
--interactive-foreground: var(--text-inverted-04);
|
||||
--interactive-foreground-icon: var(--text-inverted-02);
|
||||
}
|
||||
|
||||
/* ===========================================================================
|
||||
Sidebar
|
||||
=========================================================================== */
|
||||
|
||||
@@ -4,7 +4,7 @@ import React from "react";
|
||||
import { Slot } from "@radix-ui/react-slot";
|
||||
import { cn } from "@opal/utils";
|
||||
import { useDisabled } from "@opal/core/disabled/components";
|
||||
import type { WithoutStyles } from "@opal/types";
|
||||
import type { ButtonType, WithoutStyles } from "@opal/types";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
@@ -53,6 +53,13 @@ interface InteractiveStatelessProps
|
||||
*/
|
||||
group?: string;
|
||||
|
||||
/**
|
||||
* HTML button type. When set to `"submit"`, `"button"`, or `"reset"`, the
|
||||
* element is treated as inherently interactive for cursor styling purposes
|
||||
* even without an explicit `onClick` or `href`.
|
||||
*/
|
||||
type?: ButtonType;
|
||||
|
||||
/**
|
||||
* URL to navigate to when clicked. Passed through Slot to the child.
|
||||
*/
|
||||
@@ -85,6 +92,7 @@ function InteractiveStateless({
|
||||
prominence = "primary",
|
||||
interaction = "rest",
|
||||
group,
|
||||
type,
|
||||
href,
|
||||
target,
|
||||
...props
|
||||
@@ -95,7 +103,7 @@ function InteractiveStateless({
|
||||
// so Radix Slot-injected handlers don't bypass this guard.
|
||||
const classes = cn(
|
||||
"interactive",
|
||||
!props.onClick && !href && "!cursor-default !select-auto",
|
||||
!props.onClick && !href && !type && "!cursor-default !select-auto",
|
||||
group
|
||||
);
|
||||
|
||||
|
||||
20
web/lib/opal/src/icons/eye-off.tsx
Normal file
20
web/lib/opal/src/icons/eye-off.tsx
Normal file
@@ -0,0 +1,20 @@
|
||||
import type { IconProps } from "@opal/types";
|
||||
const SvgEyeOff = ({ size, ...props }: IconProps) => (
|
||||
<svg
|
||||
width={size}
|
||||
height={size}
|
||||
viewBox="0 0 16 16"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
stroke="currentColor"
|
||||
{...props}
|
||||
>
|
||||
<path
|
||||
d="M11.78 11.78C10.6922 12.6092 9.36761 13.0685 8 13.0909C3.54545 13.0909 1 8 1 8C1.79157 6.52484 2.88945 5.23602 4.22 4.22M11.78 11.78L9.34909 9.34909M11.78 11.78L15 15M4.22 4.22L1 1M4.22 4.22L6.65091 6.65091M6.66364 3.06182C7.10167 2.95929 7.55013 2.90803 8 2.90909C12.4545 2.90909 15 8 15 8C14.6137 8.72266 14.153 9.40301 13.6255 10.03M9.34909 9.34909L6.65091 6.65091M9.34909 9.34909C8.99954 9.72422 8.49873 9.94737 7.98606 9.95641C6.922 9.97519 6.02481 9.078 6.04358 8.01394C6.05263 7.50127 6.27578 7.00046 6.65091 6.65091"
|
||||
strokeWidth={1.5}
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
/>
|
||||
</svg>
|
||||
);
|
||||
export default SvgEyeOff;
|
||||
@@ -68,6 +68,7 @@ export { default as SvgExpand } from "@opal/icons/expand";
|
||||
export { default as SvgExternalLink } from "@opal/icons/external-link";
|
||||
export { default as SvgEye } from "@opal/icons/eye";
|
||||
export { default as SvgEyeClosed } from "@opal/icons/eye-closed";
|
||||
export { default as SvgEyeOff } from "@opal/icons/eye-off";
|
||||
export { default as SvgFiles } from "@opal/icons/files";
|
||||
export { default as SvgFileBraces } from "@opal/icons/file-braces";
|
||||
export { default as SvgFileChartPie } from "@opal/icons/file-chart-pie";
|
||||
|
||||
@@ -32,6 +32,8 @@ interface ContentMdPresetConfig {
|
||||
optionalFont: string;
|
||||
/** Aux icon size = lineHeight − 2 × p-0.5. */
|
||||
auxIconSize: string;
|
||||
/** Left indent for the description so it aligns with the title (past the icon). */
|
||||
descriptionIndent: string;
|
||||
}
|
||||
|
||||
interface ContentMdProps {
|
||||
@@ -85,6 +87,7 @@ const CONTENT_MD_PRESETS: Record<ContentMdSizePreset, ContentMdPresetConfig> = {
|
||||
editButtonPadding: "p-0",
|
||||
optionalFont: "font-main-content-muted",
|
||||
auxIconSize: "1.25rem",
|
||||
descriptionIndent: "1.625rem",
|
||||
},
|
||||
"main-ui": {
|
||||
iconSize: "1rem",
|
||||
@@ -97,6 +100,7 @@ const CONTENT_MD_PRESETS: Record<ContentMdSizePreset, ContentMdPresetConfig> = {
|
||||
editButtonPadding: "p-0",
|
||||
optionalFont: "font-main-ui-muted",
|
||||
auxIconSize: "1rem",
|
||||
descriptionIndent: "1.375rem",
|
||||
},
|
||||
secondary: {
|
||||
iconSize: "0.75rem",
|
||||
@@ -109,6 +113,7 @@ const CONTENT_MD_PRESETS: Record<ContentMdSizePreset, ContentMdPresetConfig> = {
|
||||
editButtonPadding: "p-0",
|
||||
optionalFont: "font-secondary-action",
|
||||
auxIconSize: "0.75rem",
|
||||
descriptionIndent: "1.125rem",
|
||||
},
|
||||
};
|
||||
|
||||
@@ -163,22 +168,25 @@ function ContentMd({
|
||||
data-interactive={withInteractive || undefined}
|
||||
style={{ gap: config.gap }}
|
||||
>
|
||||
{Icon && (
|
||||
<div
|
||||
className={cn(
|
||||
"opal-content-md-icon-container shrink-0",
|
||||
config.iconContainerPadding
|
||||
)}
|
||||
style={{ minHeight: config.lineHeight }}
|
||||
>
|
||||
<Icon
|
||||
className={cn("opal-content-md-icon", config.iconColorClass)}
|
||||
style={{ width: config.iconSize, height: config.iconSize }}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
<div
|
||||
className="opal-content-md-header"
|
||||
data-editing={editing || undefined}
|
||||
>
|
||||
{Icon && (
|
||||
<div
|
||||
className={cn(
|
||||
"opal-content-md-icon-container shrink-0",
|
||||
config.iconContainerPadding
|
||||
)}
|
||||
style={{ minHeight: config.lineHeight }}
|
||||
>
|
||||
<Icon
|
||||
className={cn("opal-content-md-icon", config.iconColorClass)}
|
||||
style={{ width: config.iconSize, height: config.iconSize }}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="opal-content-md-body">
|
||||
<div className="opal-content-md-title-row">
|
||||
{editing ? (
|
||||
<div className="opal-content-md-input-sizer">
|
||||
@@ -274,13 +282,16 @@ function ContentMd({
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{description && (
|
||||
<div className="opal-content-md-description font-secondary-body text-text-03">
|
||||
{description}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{description && (
|
||||
<div
|
||||
className="opal-content-md-description font-secondary-body text-text-03"
|
||||
style={Icon ? { paddingLeft: config.descriptionIndent } : undefined}
|
||||
>
|
||||
{description}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -224,7 +224,16 @@
|
||||
--------------------------------------------------------------------------- */
|
||||
|
||||
.opal-content-md {
|
||||
@apply flex flex-row items-start;
|
||||
@apply flex flex-col items-start;
|
||||
}
|
||||
|
||||
.opal-content-md-header {
|
||||
@apply flex flex-row items-center w-full;
|
||||
}
|
||||
|
||||
.opal-content-md-header[data-editing] {
|
||||
@apply rounded-08;
|
||||
box-shadow: inset 0 0 0 1px var(--border-02);
|
||||
}
|
||||
|
||||
/* ---------------------------------------------------------------------------
|
||||
@@ -237,15 +246,6 @@
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
/* ---------------------------------------------------------------------------
|
||||
Body column
|
||||
--------------------------------------------------------------------------- */
|
||||
|
||||
.opal-content-md-body {
|
||||
@apply flex flex-1 flex-col items-start;
|
||||
min-width: 0.0625rem;
|
||||
}
|
||||
|
||||
/* ---------------------------------------------------------------------------
|
||||
Title row — title (or input) + edit button
|
||||
--------------------------------------------------------------------------- */
|
||||
@@ -267,6 +267,7 @@
|
||||
.opal-content-md-input-sizer {
|
||||
display: inline-grid;
|
||||
align-items: stretch;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.opal-content-md-input-sizer > * {
|
||||
|
||||
@@ -86,6 +86,15 @@ export interface IconProps extends SVGProps<SVGSVGElement> {
|
||||
/** Strips `className` and `style` from a props type to enforce design-system styling. */
|
||||
export type WithoutStyles<T> = Omit<T, "className" | "style">;
|
||||
|
||||
/**
|
||||
* HTML button `type` attribute values.
|
||||
*
|
||||
* Used by interactive primitives and button-like components to indicate that
|
||||
* the element is inherently interactive for cursor-styling purposes, even
|
||||
* without an explicit `onClick` or `href`.
|
||||
*/
|
||||
export type ButtonType = "submit" | "button" | "reset";
|
||||
|
||||
/** Like `Omit` but distributes over union types, preserving discriminated unions. */
|
||||
export type DistributiveOmit<T, K extends keyof any> = T extends any
|
||||
? Omit<T, K>
|
||||
|
||||
@@ -1,320 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import Text from "@/refresh-components/texts/Text";
|
||||
import { Persona } from "./interfaces";
|
||||
import { useRouter } from "next/navigation";
|
||||
import Checkbox from "@/refresh-components/inputs/Checkbox";
|
||||
import { toast } from "@/hooks/useToast";
|
||||
import { useState, useMemo, useEffect } from "react";
|
||||
import { UniqueIdentifier } from "@dnd-kit/core";
|
||||
import { DraggableTable } from "@/components/table/DraggableTable";
|
||||
import {
|
||||
deletePersona,
|
||||
personaComparator,
|
||||
togglePersonaFeatured,
|
||||
togglePersonaVisibility,
|
||||
} from "./lib";
|
||||
import { FiEdit2 } from "react-icons/fi";
|
||||
import { useUser } from "@/providers/UserProvider";
|
||||
import { Button } from "@opal/components";
|
||||
import ConfirmationModalLayout from "@/refresh-components/layouts/ConfirmationModalLayout";
|
||||
import { SvgAlertCircle, SvgTrash } from "@opal/icons";
|
||||
import type { Route } from "next";
|
||||
|
||||
function PersonaTypeDisplay({ persona }: { persona: Persona }) {
|
||||
if (persona.builtin_persona) {
|
||||
return <Text as="p">Built-In</Text>;
|
||||
}
|
||||
|
||||
if (persona.is_featured) {
|
||||
return <Text as="p">Featured</Text>;
|
||||
}
|
||||
|
||||
if (persona.is_public) {
|
||||
return <Text as="p">Public</Text>;
|
||||
}
|
||||
|
||||
if (persona.groups.length > 0 || persona.users.length > 0) {
|
||||
return <Text as="p">Shared</Text>;
|
||||
}
|
||||
|
||||
return (
|
||||
<Text as="p">Personal {persona.owner && <>({persona.owner.email})</>}</Text>
|
||||
);
|
||||
}
|
||||
|
||||
export function PersonasTable({
|
||||
personas,
|
||||
refreshPersonas,
|
||||
currentPage,
|
||||
pageSize,
|
||||
}: {
|
||||
personas: Persona[];
|
||||
refreshPersonas: () => void;
|
||||
currentPage: number;
|
||||
pageSize: number;
|
||||
}) {
|
||||
const router = useRouter();
|
||||
const { refreshUser, isAdmin } = useUser();
|
||||
|
||||
const editablePersonas = useMemo(() => {
|
||||
return personas.filter((p) => !p.builtin_persona);
|
||||
}, [personas]);
|
||||
|
||||
const editablePersonaIds = useMemo(() => {
|
||||
return new Set(editablePersonas.map((p) => p.id.toString()));
|
||||
}, [editablePersonas]);
|
||||
|
||||
const [finalPersonas, setFinalPersonas] = useState<Persona[]>([]);
|
||||
const [deleteModalOpen, setDeleteModalOpen] = useState(false);
|
||||
const [personaToDelete, setPersonaToDelete] = useState<Persona | null>(null);
|
||||
const [defaultModalOpen, setDefaultModalOpen] = useState(false);
|
||||
const [personaToToggleDefault, setPersonaToToggleDefault] =
|
||||
useState<Persona | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
const editable = editablePersonas.sort(personaComparator);
|
||||
const nonEditable = personas
|
||||
.filter((p) => !editablePersonaIds.has(p.id.toString()))
|
||||
.sort(personaComparator);
|
||||
setFinalPersonas([...editable, ...nonEditable]);
|
||||
}, [editablePersonas, personas, editablePersonaIds]);
|
||||
|
||||
const updatePersonaOrder = async (orderedPersonaIds: UniqueIdentifier[]) => {
|
||||
const reorderedPersonas = orderedPersonaIds.map(
|
||||
(id) => personas.find((persona) => persona.id.toString() === id)!
|
||||
);
|
||||
|
||||
setFinalPersonas(reorderedPersonas);
|
||||
|
||||
// Calculate display_priority based on current page.
|
||||
// Page 1 (items 0-9): priorities 0-9
|
||||
// Page 2 (items 10-19): priorities 10-19, etc.
|
||||
const pageStartIndex = (currentPage - 1) * pageSize;
|
||||
const displayPriorityMap = new Map<UniqueIdentifier, number>();
|
||||
orderedPersonaIds.forEach((personaId, ind) => {
|
||||
displayPriorityMap.set(personaId, pageStartIndex + ind);
|
||||
});
|
||||
|
||||
const response = await fetch("/api/admin/agents/display-priorities", {
|
||||
method: "PATCH",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
display_priority_map: Object.fromEntries(displayPriorityMap),
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
toast.error(`Failed to update persona order - ${await response.text()}`);
|
||||
setFinalPersonas(personas);
|
||||
await refreshPersonas();
|
||||
return;
|
||||
}
|
||||
|
||||
await refreshPersonas();
|
||||
await refreshUser();
|
||||
};
|
||||
|
||||
const openDeleteModal = (persona: Persona) => {
|
||||
setPersonaToDelete(persona);
|
||||
setDeleteModalOpen(true);
|
||||
};
|
||||
|
||||
const closeDeleteModal = () => {
|
||||
setDeleteModalOpen(false);
|
||||
setPersonaToDelete(null);
|
||||
};
|
||||
|
||||
const handleDeletePersona = async () => {
|
||||
if (personaToDelete) {
|
||||
const response = await deletePersona(personaToDelete.id);
|
||||
if (response.ok) {
|
||||
refreshPersonas();
|
||||
closeDeleteModal();
|
||||
} else {
|
||||
toast.error(`Failed to delete persona - ${await response.text()}`);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const openDefaultModal = (persona: Persona) => {
|
||||
setPersonaToToggleDefault(persona);
|
||||
setDefaultModalOpen(true);
|
||||
};
|
||||
|
||||
const closeDefaultModal = () => {
|
||||
setDefaultModalOpen(false);
|
||||
setPersonaToToggleDefault(null);
|
||||
};
|
||||
|
||||
const handleToggleDefault = async () => {
|
||||
if (personaToToggleDefault) {
|
||||
const response = await togglePersonaFeatured(
|
||||
personaToToggleDefault.id,
|
||||
personaToToggleDefault.is_featured
|
||||
);
|
||||
if (response.ok) {
|
||||
refreshPersonas();
|
||||
closeDefaultModal();
|
||||
} else {
|
||||
toast.error(`Failed to update persona - ${await response.text()}`);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div>
|
||||
{deleteModalOpen && personaToDelete && (
|
||||
<ConfirmationModalLayout
|
||||
icon={SvgAlertCircle}
|
||||
title="Delete Agent"
|
||||
onClose={closeDeleteModal}
|
||||
submit={<Button onClick={handleDeletePersona}>Delete</Button>}
|
||||
>
|
||||
{`Are you sure you want to delete ${personaToDelete.name}?`}
|
||||
</ConfirmationModalLayout>
|
||||
)}
|
||||
{defaultModalOpen &&
|
||||
personaToToggleDefault &&
|
||||
(() => {
|
||||
const isDefault = personaToToggleDefault.is_featured;
|
||||
|
||||
const title = isDefault
|
||||
? "Remove Featured Agent"
|
||||
: "Set Featured Agent";
|
||||
const buttonText = isDefault ? "Remove Feature" : "Set as Featured";
|
||||
const text = isDefault
|
||||
? `Are you sure you want to remove the featured status of ${personaToToggleDefault.name}?`
|
||||
: `Are you sure you want to set the featured status of ${personaToToggleDefault.name}?`;
|
||||
const additionalText = isDefault
|
||||
? `Removing "${personaToToggleDefault.name}" as a featured agent will not affect its visibility or accessibility.`
|
||||
: `Setting "${personaToToggleDefault.name}" as a featured agent will make it public and visible to all users. This action cannot be undone.`;
|
||||
|
||||
return (
|
||||
<ConfirmationModalLayout
|
||||
icon={SvgAlertCircle}
|
||||
title={title}
|
||||
onClose={closeDefaultModal}
|
||||
submit={
|
||||
<Button onClick={handleToggleDefault}>{buttonText}</Button>
|
||||
}
|
||||
>
|
||||
<div className="flex flex-col gap-2">
|
||||
<Text as="p">{text}</Text>
|
||||
<Text as="p" text03>
|
||||
{additionalText}
|
||||
</Text>
|
||||
</div>
|
||||
</ConfirmationModalLayout>
|
||||
);
|
||||
})()}
|
||||
|
||||
<DraggableTable
|
||||
headers={[
|
||||
"Name",
|
||||
"Description",
|
||||
"Type",
|
||||
"Featured Agent",
|
||||
"Is Visible",
|
||||
"Delete",
|
||||
]}
|
||||
isAdmin={isAdmin}
|
||||
rows={finalPersonas.map((persona) => {
|
||||
const isEditable = editablePersonas.includes(persona);
|
||||
return {
|
||||
id: persona.id.toString(),
|
||||
cells: [
|
||||
<div key="name" className="flex">
|
||||
{!persona.builtin_persona && (
|
||||
<FiEdit2
|
||||
className="mr-1 my-auto cursor-pointer"
|
||||
onClick={() =>
|
||||
router.push(
|
||||
`/app/agents/edit/${
|
||||
persona.id
|
||||
}?u=${Date.now()}&admin=true` as Route
|
||||
)
|
||||
}
|
||||
/>
|
||||
)}
|
||||
<p className="text font-medium whitespace-normal break-none">
|
||||
{persona.name}
|
||||
</p>
|
||||
</div>,
|
||||
<p
|
||||
key="description"
|
||||
className="whitespace-normal break-all max-w-2xl"
|
||||
>
|
||||
{persona.description}
|
||||
</p>,
|
||||
<PersonaTypeDisplay key={persona.id} persona={persona} />,
|
||||
<div
|
||||
key="featured"
|
||||
onClick={() => {
|
||||
openDefaultModal(persona);
|
||||
}}
|
||||
className={`
|
||||
px-1 py-0.5 rounded flex hover:bg-accent-background-hovered cursor-pointer select-none w-fit items-center gap-2
|
||||
`}
|
||||
>
|
||||
<div className="my-auto flex-none w-22">
|
||||
{!persona.is_featured ? (
|
||||
<div className="text-error">Not Featured</div>
|
||||
) : (
|
||||
"Featured"
|
||||
)}
|
||||
</div>
|
||||
<Checkbox checked={persona.is_featured} />
|
||||
</div>,
|
||||
<div
|
||||
key="is_visible"
|
||||
onClick={async () => {
|
||||
const response = await togglePersonaVisibility(
|
||||
persona.id,
|
||||
persona.is_listed
|
||||
);
|
||||
if (response.ok) {
|
||||
refreshPersonas();
|
||||
} else {
|
||||
toast.error(
|
||||
`Failed to update persona - ${await response.text()}`
|
||||
);
|
||||
}
|
||||
}}
|
||||
className={`
|
||||
px-1 py-0.5 rounded flex hover:bg-accent-background-hovered cursor-pointer select-none w-fit items-center gap-2
|
||||
`}
|
||||
>
|
||||
<div className="my-auto w-fit">
|
||||
{!persona.is_listed ? (
|
||||
<div className="text-error">Hidden</div>
|
||||
) : (
|
||||
"Visible"
|
||||
)}
|
||||
</div>
|
||||
<Checkbox checked={persona.is_listed} />
|
||||
</div>,
|
||||
<div key="edit" className="flex">
|
||||
<div className="mr-auto my-auto">
|
||||
{!persona.builtin_persona && isEditable ? (
|
||||
<Button
|
||||
icon={SvgTrash}
|
||||
prominence="tertiary"
|
||||
onClick={() => openDeleteModal(persona)}
|
||||
/>
|
||||
) : (
|
||||
<Text as="p">-</Text>
|
||||
)}
|
||||
</div>
|
||||
</div>,
|
||||
],
|
||||
staticModifiers: [[1, "lg:w-[250px] xl:w-[400px] 2xl:w-[550px]"]],
|
||||
};
|
||||
})}
|
||||
setRows={updatePersonaOrder}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,160 +1 @@
|
||||
"use client";
|
||||
|
||||
import { PersonasTable } from "./PersonaTable";
|
||||
import Text from "@/components/ui/text";
|
||||
import Title from "@/components/ui/title";
|
||||
import Separator from "@/refresh-components/Separator";
|
||||
import { SubLabel } from "@/components/Field";
|
||||
import * as SettingsLayouts from "@/layouts/settings-layouts";
|
||||
import CreateButton from "@/refresh-components/buttons/CreateButton";
|
||||
import { useAdminPersonas } from "@/hooks/useAdminPersonas";
|
||||
import { Persona } from "./interfaces";
|
||||
import { ThreeDotsLoader } from "@/components/Loading";
|
||||
import { ErrorCallout } from "@/components/ErrorCallout";
|
||||
import { ADMIN_ROUTES } from "@/lib/admin-routes";
|
||||
import { useState, useEffect } from "react";
|
||||
import { Pagination } from "@opal/components";
|
||||
|
||||
const route = ADMIN_ROUTES.AGENTS;
|
||||
const PAGE_SIZE = 20;
|
||||
|
||||
function MainContent({
|
||||
personas,
|
||||
totalItems,
|
||||
currentPage,
|
||||
onPageChange,
|
||||
refreshPersonas,
|
||||
}: {
|
||||
personas: Persona[];
|
||||
totalItems: number;
|
||||
currentPage: number;
|
||||
onPageChange: (page: number) => void;
|
||||
refreshPersonas: () => void;
|
||||
}) {
|
||||
// Filter out default/unified assistants.
|
||||
// NOTE: The backend should already exclude them if includeDefault = false is
|
||||
// provided. That change was made with the introduction of pagination; we keep
|
||||
// this filter here for now for backwards compatibility.
|
||||
const customPersonas = personas.filter((persona) => !persona.builtin_persona);
|
||||
const totalPages = Math.ceil(totalItems / PAGE_SIZE);
|
||||
|
||||
// Clamp currentPage when totalItems shrinks (e.g., deleting the last item on a page)
|
||||
useEffect(() => {
|
||||
if (currentPage > totalPages && totalPages > 0) {
|
||||
onPageChange(totalPages);
|
||||
}
|
||||
}, [currentPage, totalPages, onPageChange]);
|
||||
|
||||
return (
|
||||
<div>
|
||||
<Text className="mb-2">
|
||||
Agents are a way to build custom search/question-answering experiences
|
||||
for different use cases.
|
||||
</Text>
|
||||
<Text className="mt-2">They allow you to customize:</Text>
|
||||
<div className="text-sm">
|
||||
<ul className="list-disc mt-2 ml-4">
|
||||
<li>
|
||||
The prompt used by your LLM of choice to respond to the user query
|
||||
</li>
|
||||
<li>The documents that are used as context</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<Separator />
|
||||
|
||||
<Title>Create an Agent</Title>
|
||||
<CreateButton href="/app/agents/create?admin=true">
|
||||
New Agent
|
||||
</CreateButton>
|
||||
|
||||
<Separator />
|
||||
|
||||
<Title>Existing Agents</Title>
|
||||
{totalItems > 0 ? (
|
||||
<>
|
||||
<SubLabel>
|
||||
Agents will be displayed as options on the Chat / Search
|
||||
interfaces in the order they are displayed below. Agents marked as
|
||||
hidden will not be displayed. Editable agents are shown at the
|
||||
top.
|
||||
</SubLabel>
|
||||
<PersonasTable
|
||||
personas={customPersonas}
|
||||
refreshPersonas={refreshPersonas}
|
||||
currentPage={currentPage}
|
||||
pageSize={PAGE_SIZE}
|
||||
/>
|
||||
{totalPages > 1 && (
|
||||
<Pagination
|
||||
currentPage={currentPage}
|
||||
totalPages={totalPages}
|
||||
onChange={onPageChange}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
) : (
|
||||
<div className="mt-6 p-8 border border-border rounded-lg bg-background-weak text-center">
|
||||
<Text className="text-lg font-medium mb-2">
|
||||
No custom agents yet
|
||||
</Text>
|
||||
<Text className="text-subtle mb-3">
|
||||
Create your first agent to:
|
||||
</Text>
|
||||
<ul className="text-subtle text-sm list-disc text-left inline-block mb-3">
|
||||
<li>Build department-specific knowledge bases</li>
|
||||
<li>Create specialized research agents</li>
|
||||
<li>Set up compliance and policy advisors</li>
|
||||
</ul>
|
||||
<Text className="text-subtle text-sm mb-4">
|
||||
...and so much more!
|
||||
</Text>
|
||||
<CreateButton href="/app/agents/create?admin=true">
|
||||
Create Your First Agent
|
||||
</CreateButton>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default function Page() {
|
||||
const [currentPage, setCurrentPage] = useState(1);
|
||||
const { personas, totalItems, isLoading, error, refresh } = useAdminPersonas({
|
||||
pageNum: currentPage - 1, // Backend uses 0-indexed pages
|
||||
pageSize: PAGE_SIZE,
|
||||
});
|
||||
|
||||
return (
|
||||
<SettingsLayouts.Root>
|
||||
<SettingsLayouts.Header icon={route.icon} title={route.title} separator />
|
||||
|
||||
<SettingsLayouts.Body>
|
||||
{isLoading && <ThreeDotsLoader />}
|
||||
|
||||
{error && (
|
||||
<ErrorCallout
|
||||
errorTitle="Failed to load agents"
|
||||
errorMsg={
|
||||
error?.info?.message ||
|
||||
error?.info?.detail ||
|
||||
"An unknown error occurred"
|
||||
}
|
||||
/>
|
||||
)}
|
||||
|
||||
{!isLoading && !error && (
|
||||
<MainContent
|
||||
personas={personas}
|
||||
totalItems={totalItems}
|
||||
currentPage={currentPage}
|
||||
onPageChange={setCurrentPage}
|
||||
refreshPersonas={refresh}
|
||||
/>
|
||||
)}
|
||||
</SettingsLayouts.Body>
|
||||
</SettingsLayouts.Root>
|
||||
);
|
||||
}
|
||||
export { default } from "@/refresh-pages/admin/AgentsPage";
|
||||
|
||||
1
web/src/app/admin/groups/create/page.tsx
Normal file
1
web/src/app/admin/groups/create/page.tsx
Normal file
@@ -0,0 +1 @@
|
||||
export { default } from "@/refresh-pages/admin/GroupsPage/CreateGroupPage";
|
||||
@@ -28,7 +28,12 @@ export default function Layout({ children }: LayoutProps) {
|
||||
<SettingsLayouts.Header icon={SvgSliders} title="Settings" separator />
|
||||
|
||||
<SettingsLayouts.Body>
|
||||
<Section flexDirection="row" alignItems="start" gap={1.5}>
|
||||
<Section
|
||||
flexDirection="row"
|
||||
justifyContent="start"
|
||||
alignItems="start"
|
||||
gap={1.5}
|
||||
>
|
||||
{/* Left: Tab Navigation */}
|
||||
<div
|
||||
data-testid="settings-left-tab-navigation"
|
||||
|
||||
@@ -7,8 +7,11 @@ import { processRawChatHistory } from "@/app/app/services/lib";
|
||||
import { getLatestMessageChain } from "@/app/app/services/messageTree";
|
||||
import HumanMessage from "@/app/app/message/HumanMessage";
|
||||
import AgentMessage from "@/app/app/message/messageComponents/AgentMessage";
|
||||
import { Callout } from "@/components/ui/callout";
|
||||
import OnyxInitializingLoader from "@/components/OnyxInitializingLoader";
|
||||
import { Section } from "@/layouts/general-layouts";
|
||||
import { IllustrationContent } from "@opal/layouts";
|
||||
import SvgNotFound from "@opal/illustrations/not-found";
|
||||
import { Button } from "@opal/components";
|
||||
import { Persona } from "@/app/admin/agents/interfaces";
|
||||
import { MinimalOnyxDocument } from "@/lib/search/interfaces";
|
||||
import PreviewModal from "@/sections/modals/PreviewModal";
|
||||
@@ -33,12 +36,17 @@ export default function SharedChatDisplay({
|
||||
|
||||
if (!chatSession) {
|
||||
return (
|
||||
<div className="min-h-full w-full">
|
||||
<div className="mx-auto w-fit pt-8">
|
||||
<Callout type="danger" title="Shared Chat Not Found">
|
||||
Did not find a shared chat with the specified ID.
|
||||
</Callout>
|
||||
</div>
|
||||
<div className="h-full w-full flex flex-col items-center justify-center">
|
||||
<Section flexDirection="column" alignItems="center" gap={1}>
|
||||
<IllustrationContent
|
||||
illustration={SvgNotFound}
|
||||
title="Shared chat not found"
|
||||
description="Did not find a shared chat with the specified ID."
|
||||
/>
|
||||
<Button href="/app" prominence="secondary">
|
||||
Start a new chat
|
||||
</Button>
|
||||
</Section>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -51,12 +59,17 @@ export default function SharedChatDisplay({
|
||||
|
||||
if (firstMessage === undefined) {
|
||||
return (
|
||||
<div className="min-h-full w-full">
|
||||
<div className="mx-auto w-fit pt-8">
|
||||
<Callout type="danger" title="Shared Chat Not Found">
|
||||
No messages found in shared chat.
|
||||
</Callout>
|
||||
</div>
|
||||
<div className="h-full w-full flex flex-col items-center justify-center">
|
||||
<Section flexDirection="column" alignItems="center" gap={1}>
|
||||
<IllustrationContent
|
||||
illustration={SvgNotFound}
|
||||
title="Shared chat not found"
|
||||
description="No messages found in shared chat."
|
||||
/>
|
||||
<Button href="/app" prominence="secondary">
|
||||
Start a new chat
|
||||
</Button>
|
||||
</Section>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
1
web/src/app/ee/admin/groups/create/page.tsx
Normal file
1
web/src/app/ee/admin/groups/create/page.tsx
Normal file
@@ -0,0 +1 @@
|
||||
export { default } from "@/refresh-pages/admin/GroupsPage/CreateGroupPage";
|
||||
@@ -17,6 +17,7 @@ import StatsOverlayLoader from "@/components/dev/StatsOverlayLoader";
|
||||
import AppHealthBanner from "@/sections/AppHealthBanner";
|
||||
import CustomAnalyticsScript from "@/providers/CustomAnalyticsScript";
|
||||
import ProductGatingWrapper from "@/providers/ProductGatingWrapper";
|
||||
import SWRConfigProvider from "@/providers/SWRConfigProvider";
|
||||
|
||||
const hankenGrotesk = Hanken_Grotesk({
|
||||
subsets: ["latin"],
|
||||
@@ -79,21 +80,23 @@ export default function RootLayout({
|
||||
<div className="text-text min-h-screen bg-background">
|
||||
<TooltipProvider>
|
||||
<PHProvider>
|
||||
<AppHealthBanner />
|
||||
<AppProvider>
|
||||
<DynamicMetadata />
|
||||
<CustomAnalyticsScript />
|
||||
<Suspense fallback={null}>
|
||||
<PostHogPageView />
|
||||
</Suspense>
|
||||
<div id={MODAL_ROOT_ID} className="h-screen w-screen">
|
||||
<ProductGatingWrapper>{children}</ProductGatingWrapper>
|
||||
</div>
|
||||
{process.env.NEXT_PUBLIC_POSTHOG_KEY && <WebVitals />}
|
||||
{process.env.NEXT_PUBLIC_ENABLE_STATS === "true" && (
|
||||
<StatsOverlayLoader />
|
||||
)}
|
||||
</AppProvider>
|
||||
<SWRConfigProvider>
|
||||
<AppHealthBanner />
|
||||
<AppProvider>
|
||||
<DynamicMetadata />
|
||||
<CustomAnalyticsScript />
|
||||
<Suspense fallback={null}>
|
||||
<PostHogPageView />
|
||||
</Suspense>
|
||||
<div id={MODAL_ROOT_ID} className="h-screen w-screen">
|
||||
<ProductGatingWrapper>{children}</ProductGatingWrapper>
|
||||
</div>
|
||||
{process.env.NEXT_PUBLIC_POSTHOG_KEY && <WebVitals />}
|
||||
{process.env.NEXT_PUBLIC_ENABLE_STATS === "true" && (
|
||||
<StatsOverlayLoader />
|
||||
)}
|
||||
</AppProvider>
|
||||
</SWRConfigProvider>
|
||||
</PHProvider>
|
||||
</TooltipProvider>
|
||||
</div>
|
||||
|
||||
@@ -21,7 +21,7 @@ import Text from "@/refresh-components/texts/Text";
|
||||
import { Section } from "@/layouts/general-layouts";
|
||||
import Popover, { PopoverMenu } from "@/refresh-components/Popover";
|
||||
import { SvgCheck, SvgClock, SvgTag } from "@opal/icons";
|
||||
import FilterButton from "@/refresh-components/buttons/FilterButton";
|
||||
import { FilterButton } from "@opal/components";
|
||||
import InputTypeIn from "@/refresh-components/inputs/InputTypeIn";
|
||||
import useFilter from "@/hooks/useFilter";
|
||||
import { LineItemButton } from "@opal/components";
|
||||
@@ -217,7 +217,7 @@ export default function SearchUI({ onDocumentClick }: SearchResultsProps) {
|
||||
<Popover open={timeFilterOpen} onOpenChange={setTimeFilterOpen}>
|
||||
<Popover.Trigger asChild>
|
||||
<FilterButton
|
||||
leftIcon={SvgClock}
|
||||
icon={SvgClock}
|
||||
active={!!timeFilter}
|
||||
onClear={() => {
|
||||
setTimeFilter(null);
|
||||
@@ -253,7 +253,7 @@ export default function SearchUI({ onDocumentClick }: SearchResultsProps) {
|
||||
<Popover open={tagFilterOpen} onOpenChange={setTagFilterOpen}>
|
||||
<Popover.Trigger asChild>
|
||||
<FilterButton
|
||||
leftIcon={SvgTag}
|
||||
icon={SvgTag}
|
||||
active={selectedTags.length > 0}
|
||||
onClear={() => {
|
||||
setSelectedTags([]);
|
||||
|
||||
@@ -1,7 +1,15 @@
|
||||
import useSWR from "swr";
|
||||
import { errorHandlingFetcher } from "@/lib/fetcher";
|
||||
import { AuthType, NEXT_PUBLIC_CLOUD_ENABLED } from "@/lib/constants";
|
||||
|
||||
interface AuthTypeAPIResponse {
|
||||
auth_type: string;
|
||||
requires_verification: boolean;
|
||||
anonymous_user_enabled: boolean | null;
|
||||
password_min_length: number;
|
||||
has_users: boolean;
|
||||
oauth_enabled: boolean;
|
||||
}
|
||||
|
||||
export interface AuthTypeMetadata {
|
||||
authType: AuthType;
|
||||
autoRedirect: boolean;
|
||||
@@ -22,6 +30,24 @@ const DEFAULT_AUTH_TYPE_METADATA: AuthTypeMetadata = {
|
||||
oauthEnabled: false,
|
||||
};
|
||||
|
||||
async function fetchAuthTypeMetadata(url: string): Promise<AuthTypeMetadata> {
|
||||
const res = await fetch(url);
|
||||
if (!res.ok) throw new Error("Failed to fetch auth type metadata");
|
||||
const data: AuthTypeAPIResponse = await res.json();
|
||||
const authType = NEXT_PUBLIC_CLOUD_ENABLED
|
||||
? AuthType.CLOUD
|
||||
: (data.auth_type as AuthType);
|
||||
return {
|
||||
authType,
|
||||
autoRedirect: authType === AuthType.OIDC || authType === AuthType.SAML,
|
||||
requiresVerification: data.requires_verification,
|
||||
anonymousUserEnabled: data.anonymous_user_enabled,
|
||||
passwordMinLength: data.password_min_length,
|
||||
hasUsers: data.has_users,
|
||||
oauthEnabled: data.oauth_enabled,
|
||||
};
|
||||
}
|
||||
|
||||
export function useAuthTypeMetadata(): {
|
||||
authTypeMetadata: AuthTypeMetadata;
|
||||
isLoading: boolean;
|
||||
@@ -29,7 +55,7 @@ export function useAuthTypeMetadata(): {
|
||||
} {
|
||||
const { data, error, isLoading } = useSWR<AuthTypeMetadata>(
|
||||
"/api/auth/type",
|
||||
errorHandlingFetcher,
|
||||
fetchAuthTypeMetadata,
|
||||
{
|
||||
revalidateOnFocus: false,
|
||||
revalidateOnReconnect: false,
|
||||
@@ -37,14 +63,6 @@ export function useAuthTypeMetadata(): {
|
||||
}
|
||||
);
|
||||
|
||||
if (NEXT_PUBLIC_CLOUD_ENABLED && data) {
|
||||
return {
|
||||
authTypeMetadata: { ...data, authType: AuthType.CLOUD },
|
||||
isLoading,
|
||||
error,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
authTypeMetadata: data ?? DEFAULT_AUTH_TYPE_METADATA,
|
||||
isLoading,
|
||||
|
||||
@@ -61,6 +61,11 @@ interface UseChatSessionControllerProps {
|
||||
}) => Promise<void>;
|
||||
}
|
||||
|
||||
export type SessionFetchError = {
|
||||
type: "not_found" | "access_denied" | "unknown";
|
||||
detail: string;
|
||||
} | null;
|
||||
|
||||
export default function useChatSessionController({
|
||||
existingChatSessionId,
|
||||
searchParams,
|
||||
@@ -80,6 +85,8 @@ export default function useChatSessionController({
|
||||
const [currentSessionFileTokenCount, setCurrentSessionFileTokenCount] =
|
||||
useState<number>(0);
|
||||
const [projectFiles, setProjectFiles] = useState<ProjectFile[]>([]);
|
||||
const [sessionFetchError, setSessionFetchError] =
|
||||
useState<SessionFetchError>(null);
|
||||
// Store actions
|
||||
const updateSessionAndMessageTree = useChatSessionStore(
|
||||
(state) => state.updateSessionAndMessageTree
|
||||
@@ -151,6 +158,8 @@ export default function useChatSessionController({
|
||||
}
|
||||
|
||||
async function initialSessionFetch() {
|
||||
setSessionFetchError(null);
|
||||
|
||||
if (existingChatSessionId === null) {
|
||||
// Clear the current session in the store to show intro messages
|
||||
setCurrentSession(null);
|
||||
@@ -178,9 +187,42 @@ export default function useChatSessionController({
|
||||
setCurrentSession(existingChatSessionId);
|
||||
setIsFetchingChatMessages(existingChatSessionId, true);
|
||||
|
||||
const response = await fetch(
|
||||
`/api/chat/get-chat-session/${existingChatSessionId}`
|
||||
);
|
||||
let response: Response;
|
||||
try {
|
||||
response = await fetch(
|
||||
`/api/chat/get-chat-session/${existingChatSessionId}`
|
||||
);
|
||||
} catch (error) {
|
||||
setIsFetchingChatMessages(existingChatSessionId, false);
|
||||
console.error("Failed to fetch chat session", {
|
||||
chatSessionId: existingChatSessionId,
|
||||
error,
|
||||
});
|
||||
setSessionFetchError({
|
||||
type: "unknown",
|
||||
detail: "Failed to load chat session. Please check your connection.",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (!response.ok) {
|
||||
setIsFetchingChatMessages(existingChatSessionId, false);
|
||||
let detail = "An unexpected error occurred.";
|
||||
try {
|
||||
const errorBody = await response.json();
|
||||
detail = errorBody.detail || detail;
|
||||
} catch {
|
||||
// ignore parse errors
|
||||
}
|
||||
const type =
|
||||
response.status === 404
|
||||
? "not_found"
|
||||
: response.status === 403
|
||||
? "access_denied"
|
||||
: "unknown";
|
||||
setSessionFetchError({ type, detail });
|
||||
return;
|
||||
}
|
||||
|
||||
const session = await response.json();
|
||||
const chatSession = session as BackendChatSession;
|
||||
@@ -356,5 +398,6 @@ export default function useChatSessionController({
|
||||
currentSessionFileTokenCount,
|
||||
onMessageSelection,
|
||||
projectFiles,
|
||||
sessionFetchError,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -36,7 +36,11 @@ export function useMemoryManager({
|
||||
|
||||
setLocalMemories((prev) => {
|
||||
const emptyNewItems = prev.filter((m) => m.isNew && !m.content.trim());
|
||||
return [...emptyNewItems, ...existingMemories];
|
||||
const availableSlots = MAX_MEMORY_COUNT - existingMemories.length;
|
||||
return [
|
||||
...emptyNewItems.slice(0, Math.max(0, availableSlots)),
|
||||
...existingMemories,
|
||||
];
|
||||
});
|
||||
initialMemoriesRef.current = memories;
|
||||
}, [memories]);
|
||||
|
||||
@@ -123,6 +123,9 @@ export interface LLMProviderFormProps {
|
||||
open?: boolean;
|
||||
onOpenChange?: (open: boolean) => void;
|
||||
|
||||
/** The current default model name for this provider (from the global default). */
|
||||
defaultModelName?: string;
|
||||
|
||||
// Onboarding-specific (only when variant === "onboarding")
|
||||
onboardingState?: OnboardingState;
|
||||
onboardingActions?: OnboardingActions;
|
||||
|
||||
@@ -19,6 +19,29 @@ const DEFAULT_AUTH_ERROR_MSG =
|
||||
|
||||
const DEFAULT_ERROR_MSG = "An error occurred while fetching the data.";
|
||||
|
||||
/**
|
||||
* SWR `onErrorRetry` callback that suppresses automatic retries for
|
||||
* authentication errors (401/403). Pass this to any SWR hook whose endpoint
|
||||
* requires auth so that unauthenticated pages don't spam the backend.
|
||||
*/
|
||||
export const skipRetryOnAuthError: NonNullable<
|
||||
import("swr").SWRConfiguration["onErrorRetry"]
|
||||
> = (error, _key, _config, revalidate, { retryCount }) => {
|
||||
if (
|
||||
error instanceof FetchError &&
|
||||
(error.status === 401 || error.status === 403)
|
||||
)
|
||||
return;
|
||||
// For non-auth errors, retry with exponential backoff
|
||||
if (
|
||||
_config.errorRetryCount !== undefined &&
|
||||
retryCount >= _config.errorRetryCount
|
||||
)
|
||||
return;
|
||||
const delay = Math.min(2000 * 2 ** retryCount, 30000);
|
||||
setTimeout(() => revalidate({ retryCount }), delay);
|
||||
};
|
||||
|
||||
export const errorHandlingFetcher = async <T>(url: string): Promise<T> => {
|
||||
const res = await fetch(url);
|
||||
|
||||
|
||||
16
web/src/providers/SWRConfigProvider.tsx
Normal file
16
web/src/providers/SWRConfigProvider.tsx
Normal file
@@ -0,0 +1,16 @@
|
||||
"use client";
|
||||
|
||||
import { SWRConfig } from "swr";
|
||||
import { skipRetryOnAuthError } from "@/lib/fetcher";
|
||||
|
||||
export default function SWRConfigProvider({
|
||||
children,
|
||||
}: {
|
||||
children: React.ReactNode;
|
||||
}) {
|
||||
return (
|
||||
<SWRConfig value={{ onErrorRetry: skipRetryOnAuthError }}>
|
||||
{children}
|
||||
</SWRConfig>
|
||||
);
|
||||
}
|
||||
@@ -49,7 +49,9 @@ export function SettingsProvider({
|
||||
const [isMobile, setIsMobile] = useState<boolean | undefined>();
|
||||
const settingsLoading = coreSettingsLoading || enterpriseSettingsLoading;
|
||||
const vectorDbEnabled =
|
||||
!coreSettingsLoading && settings.vector_db_enabled !== false;
|
||||
!coreSettingsLoading &&
|
||||
!settingsError &&
|
||||
settings.vector_db_enabled !== false;
|
||||
const { ccPairs } = useCCPairs(vectorDbEnabled);
|
||||
|
||||
useEffect(() => {
|
||||
|
||||
@@ -120,6 +120,10 @@ export interface ModalContentProps
|
||||
> {
|
||||
width?: keyof typeof widthClasses;
|
||||
height?: keyof typeof heightClasses;
|
||||
/** Vertical placement of the modal. `"center"` (default) centers in the
|
||||
* viewport/container. `"top"` pins the modal near the top of the viewport,
|
||||
* matching the position used by CommandMenu. */
|
||||
position?: "center" | "top";
|
||||
preventAccidentalClose?: boolean;
|
||||
skipOverlay?: boolean;
|
||||
background?: "default" | "gray";
|
||||
@@ -136,6 +140,7 @@ const ModalContent = React.forwardRef<
|
||||
children,
|
||||
width = "md",
|
||||
height = "fit",
|
||||
position = "center",
|
||||
preventAccidentalClose = true,
|
||||
skipOverlay = false,
|
||||
background = "default",
|
||||
@@ -267,27 +272,39 @@ const ModalContent = React.forwardRef<
|
||||
|
||||
const { centerX, centerY, hasContainerCenter } = useContainerCenter();
|
||||
|
||||
const isTop = position === "top";
|
||||
|
||||
const animationClasses = cn(
|
||||
"data-[state=open]:fade-in-0 data-[state=closed]:fade-out-0",
|
||||
"data-[state=open]:zoom-in-95 data-[state=closed]:zoom-out-95",
|
||||
"data-[state=open]:slide-in-from-top-1/2 data-[state=closed]:slide-out-to-top-1/2",
|
||||
!isTop &&
|
||||
"data-[state=open]:slide-in-from-top-1/2 data-[state=closed]:slide-out-to-top-1/2",
|
||||
"duration-200"
|
||||
);
|
||||
|
||||
const containerStyle: React.CSSProperties | undefined = hasContainerCenter
|
||||
? ({
|
||||
left: centerX,
|
||||
top: centerY,
|
||||
"--tw-enter-translate-x": "-50%",
|
||||
"--tw-exit-translate-x": "-50%",
|
||||
"--tw-enter-translate-y": "-50%",
|
||||
"--tw-exit-translate-y": "-50%",
|
||||
} as React.CSSProperties)
|
||||
: undefined;
|
||||
const containerStyle: React.CSSProperties | undefined =
|
||||
hasContainerCenter && !isTop
|
||||
? ({
|
||||
left: centerX,
|
||||
top: centerY,
|
||||
"--tw-enter-translate-x": "-50%",
|
||||
"--tw-exit-translate-x": "-50%",
|
||||
"--tw-enter-translate-y": "-50%",
|
||||
"--tw-exit-translate-y": "-50%",
|
||||
} as React.CSSProperties)
|
||||
: hasContainerCenter && isTop
|
||||
? ({
|
||||
left: centerX,
|
||||
"--tw-enter-translate-x": "-50%",
|
||||
"--tw-exit-translate-x": "-50%",
|
||||
} as React.CSSProperties)
|
||||
: undefined;
|
||||
|
||||
const positionClasses = cn(
|
||||
"fixed -translate-x-1/2 -translate-y-1/2",
|
||||
!hasContainerCenter && "left-1/2 top-1/2"
|
||||
"fixed -translate-x-1/2",
|
||||
isTop
|
||||
? cn("top-[72px]", !hasContainerCenter && "left-1/2")
|
||||
: cn("-translate-y-1/2", !hasContainerCenter && "left-1/2 top-1/2")
|
||||
);
|
||||
|
||||
const dialogEventHandlers = {
|
||||
|
||||
@@ -142,6 +142,7 @@ function PopoverContent({
|
||||
collisionPadding={8}
|
||||
className={cn(
|
||||
"bg-background-neutral-00 p-1 z-popover rounded-12 border shadow-md data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2",
|
||||
"flex flex-col",
|
||||
"max-h-[var(--radix-popover-content-available-height)]",
|
||||
"overflow-hidden",
|
||||
widthClasses[width]
|
||||
@@ -226,7 +227,7 @@ export function PopoverMenu({
|
||||
});
|
||||
|
||||
return (
|
||||
<Section alignItems="stretch">
|
||||
<Section alignItems="stretch" height="auto" className="flex-1 min-h-0">
|
||||
<ShadowDiv
|
||||
scrollContainerRef={scrollContainerRef}
|
||||
className="flex flex-col gap-1 max-h-[20rem] w-full"
|
||||
|
||||
@@ -105,7 +105,7 @@ export default function ShadowDiv({
|
||||
}, [containerRef, checkScroll]);
|
||||
|
||||
return (
|
||||
<div className="relative min-h-0">
|
||||
<div className="relative min-h-0 flex flex-col">
|
||||
<div
|
||||
ref={containerRef}
|
||||
className={cn("overflow-y-auto", className)}
|
||||
|
||||
@@ -1,74 +0,0 @@
|
||||
import type { Meta, StoryObj } from "@storybook/react";
|
||||
import FilterButton from "./FilterButton";
|
||||
import { SvgFilter, SvgCalendar, SvgUser } from "@opal/icons";
|
||||
import * as TooltipPrimitive from "@radix-ui/react-tooltip";
|
||||
|
||||
const meta: Meta<typeof FilterButton> = {
|
||||
title: "refresh-components/buttons/FilterButton",
|
||||
component: FilterButton,
|
||||
tags: ["autodocs"],
|
||||
decorators: [
|
||||
(Story) => (
|
||||
<TooltipPrimitive.Provider>
|
||||
<Story />
|
||||
</TooltipPrimitive.Provider>
|
||||
),
|
||||
],
|
||||
};
|
||||
|
||||
export default meta;
|
||||
type Story = StoryObj<typeof FilterButton>;
|
||||
|
||||
export const Default: Story = {
|
||||
args: {
|
||||
leftIcon: SvgFilter,
|
||||
children: "Filter",
|
||||
},
|
||||
};
|
||||
|
||||
export const Active: Story = {
|
||||
args: {
|
||||
leftIcon: SvgFilter,
|
||||
active: true,
|
||||
children: "Source: Google Drive",
|
||||
onClear: () => {},
|
||||
},
|
||||
};
|
||||
|
||||
export const Transient: Story = {
|
||||
args: {
|
||||
leftIcon: SvgCalendar,
|
||||
transient: true,
|
||||
children: "Date Range",
|
||||
},
|
||||
};
|
||||
|
||||
export const WithoutLabel: Story = {
|
||||
args: {
|
||||
leftIcon: SvgFilter,
|
||||
},
|
||||
};
|
||||
|
||||
export const AllStates: Story = {
|
||||
render: () => (
|
||||
<div
|
||||
style={{
|
||||
display: "flex",
|
||||
gap: 12,
|
||||
alignItems: "center",
|
||||
flexWrap: "wrap",
|
||||
}}
|
||||
>
|
||||
<FilterButton leftIcon={SvgFilter}>Inactive</FilterButton>
|
||||
<FilterButton leftIcon={SvgFilter} transient>
|
||||
Transient
|
||||
</FilterButton>
|
||||
<FilterButton leftIcon={SvgCalendar} active onClear={() => {}}>
|
||||
Active Filter
|
||||
</FilterButton>
|
||||
<FilterButton leftIcon={SvgUser} active onClear={() => {}}>
|
||||
Author: John Doe
|
||||
</FilterButton>
|
||||
</div>
|
||||
),
|
||||
};
|
||||
@@ -1,113 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import React, { useState } from "react";
|
||||
import Text from "@/refresh-components/texts/Text";
|
||||
import { cn, noProp } from "@/lib/utils";
|
||||
import type { IconProps } from "@opal/types";
|
||||
import IconButton from "./IconButton";
|
||||
import { SvgChevronDownSmall, SvgX } from "@opal/icons";
|
||||
const buttonClasses = (transient?: boolean) =>
|
||||
({
|
||||
active: [
|
||||
"bg-background-tint-inverted-03",
|
||||
"hover:bg-background-tint-inverted-04",
|
||||
transient && "bg-background-tint-inverted-04",
|
||||
"active:bg-background-tint-inverted-02",
|
||||
],
|
||||
inactive: [
|
||||
"bg-background-tint-01",
|
||||
"hover:bg-background-tint-02",
|
||||
transient && "bg-background-tint-02",
|
||||
"active:bg-background-tint-00",
|
||||
],
|
||||
}) as const;
|
||||
|
||||
const textClasses = (transient?: boolean) => ({
|
||||
active: ["text-text-inverted-05"],
|
||||
inactive: [
|
||||
"text-text-03",
|
||||
"group-hover/FilterButton:text-text-04",
|
||||
transient && "text-text-04",
|
||||
"group-active/FilterButton:text-text-05",
|
||||
],
|
||||
});
|
||||
|
||||
const iconClasses = (transient?: boolean) =>
|
||||
({
|
||||
active: ["stroke-text-inverted-05"],
|
||||
inactive: [
|
||||
"stroke-text-03",
|
||||
"group-hover/FilterButton:stroke-text-04",
|
||||
transient && "stroke-text-04",
|
||||
"group-active/FilterButton:stroke-text-05",
|
||||
],
|
||||
}) as const;
|
||||
|
||||
export interface FilterButtonProps
|
||||
extends React.ButtonHTMLAttributes<HTMLButtonElement> {
|
||||
// Button states:
|
||||
active?: boolean;
|
||||
transient?: boolean;
|
||||
|
||||
leftIcon: React.FunctionComponent<IconProps>;
|
||||
onClear?: () => void;
|
||||
|
||||
children?: string;
|
||||
}
|
||||
|
||||
export default function FilterButton({
|
||||
active,
|
||||
transient,
|
||||
|
||||
leftIcon: LeftIcon,
|
||||
|
||||
onClick,
|
||||
onClear,
|
||||
children,
|
||||
className,
|
||||
...props
|
||||
}: FilterButtonProps) {
|
||||
const state = active ? "active" : "inactive";
|
||||
|
||||
return (
|
||||
<button
|
||||
className={cn(
|
||||
"p-2 h-fit rounded-12 group/FilterButton flex flex-row items-center justify-center gap-1 w-fit",
|
||||
buttonClasses(transient)[state],
|
||||
className
|
||||
)}
|
||||
onClick={onClick}
|
||||
{...props}
|
||||
>
|
||||
<div className="pr-0.5">
|
||||
<LeftIcon
|
||||
className={cn("w-[1rem] h-[1rem]", iconClasses(transient)[state])}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<Text as="p" nowrap className={cn(textClasses(transient)[state])}>
|
||||
{children}
|
||||
</Text>
|
||||
<div className="pl-0">
|
||||
{active ? (
|
||||
<IconButton
|
||||
icon={SvgX}
|
||||
onClick={noProp(onClear)}
|
||||
secondary
|
||||
className="!p-0 !rounded-04"
|
||||
/>
|
||||
) : (
|
||||
<div className="w-[1rem] h-[1rem]">
|
||||
<SvgChevronDownSmall
|
||||
className={cn(
|
||||
"w-[1rem] h-[1rem] transition-transform duration-200 ease-in-out",
|
||||
iconClasses(transient)[state],
|
||||
transient && "-rotate-180"
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</button>
|
||||
);
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
"use client";
|
||||
|
||||
import React from "react";
|
||||
import type { IconFunctionComponent, IconProps } from "@opal/types";
|
||||
import type { ButtonType, IconFunctionComponent, IconProps } from "@opal/types";
|
||||
import type { Route } from "next";
|
||||
import { Interactive } from "@opal/core";
|
||||
import { ContentAction } from "@opal/layouts";
|
||||
@@ -18,6 +18,7 @@ export interface SidebarTabProps {
|
||||
// Button properties:
|
||||
onClick?: React.MouseEventHandler<HTMLElement>;
|
||||
href?: string;
|
||||
type?: ButtonType;
|
||||
icon?: React.FunctionComponent<IconProps>;
|
||||
children?: React.ReactNode;
|
||||
rightChildren?: React.ReactNode;
|
||||
@@ -31,6 +32,7 @@ export default function SidebarTab({
|
||||
|
||||
onClick,
|
||||
href,
|
||||
type,
|
||||
icon,
|
||||
rightChildren,
|
||||
children,
|
||||
@@ -58,12 +60,14 @@ export default function SidebarTab({
|
||||
variant="sidebar"
|
||||
state={selected ? "selected" : "empty"}
|
||||
onClick={onClick}
|
||||
type="button"
|
||||
group="group/SidebarTab"
|
||||
>
|
||||
<Interactive.Container
|
||||
roundingVariant="compact"
|
||||
heightVariant="lg"
|
||||
widthVariant="full"
|
||||
type={type}
|
||||
>
|
||||
{href && (
|
||||
<Link
|
||||
|
||||
@@ -101,7 +101,19 @@ function MemoryItem({
|
||||
setIsFocused(false);
|
||||
void onBlur(originalIndex);
|
||||
}}
|
||||
rows={3}
|
||||
onKeyDown={(e) => {
|
||||
if (
|
||||
e.key === "Enter" &&
|
||||
!e.shiftKey &&
|
||||
!e.nativeEvent.isComposing
|
||||
) {
|
||||
e.preventDefault();
|
||||
textareaRef.current?.blur();
|
||||
}
|
||||
}}
|
||||
rows={1}
|
||||
autoResize
|
||||
maxRows={3}
|
||||
maxLength={MAX_MEMORY_LENGTH}
|
||||
resizable={false}
|
||||
className="bg-background-tint-01 hover:bg-background-tint-00 focus-within:bg-background-tint-00"
|
||||
@@ -149,6 +161,7 @@ interface MemoriesModalProps {
|
||||
initialTargetMemoryId?: number | null;
|
||||
initialTargetIndex?: number | null;
|
||||
highlightOnOpen?: boolean;
|
||||
focusNewLine?: boolean;
|
||||
}
|
||||
|
||||
export default function MemoriesModal({
|
||||
@@ -158,6 +171,7 @@ export default function MemoriesModal({
|
||||
initialTargetMemoryId,
|
||||
initialTargetIndex,
|
||||
highlightOnOpen = false,
|
||||
focusNewLine = false,
|
||||
}: MemoriesModalProps) {
|
||||
const close = useModalClose(onClose);
|
||||
const [focusMemoryId, setFocusMemoryId] = useState<number | null>(null);
|
||||
@@ -231,6 +245,19 @@ export default function MemoriesModal({
|
||||
onNotify: (message, type) => toast[type](message),
|
||||
});
|
||||
|
||||
// Always start with an empty card; optionally focus it (View/Add button)
|
||||
const hasAddedEmptyRef = useRef(false);
|
||||
useEffect(() => {
|
||||
if (hasAddedEmptyRef.current) return;
|
||||
hasAddedEmptyRef.current = true;
|
||||
|
||||
const id = handleAddMemory();
|
||||
if (id !== null && focusNewLine) {
|
||||
setFocusMemoryId(id);
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
const onAddLine = () => {
|
||||
const id = handleAddMemory();
|
||||
if (id !== null) {
|
||||
@@ -240,7 +267,7 @@ export default function MemoriesModal({
|
||||
|
||||
return (
|
||||
<Modal open onOpenChange={(open) => !open && close?.()}>
|
||||
<Modal.Content width="sm" height="lg">
|
||||
<Modal.Content width="sm" height="lg" position="top">
|
||||
<Modal.Header
|
||||
icon={SvgAddLines}
|
||||
title="Memory"
|
||||
|
||||
@@ -11,7 +11,7 @@ import InputTypeIn from "@/refresh-components/inputs/InputTypeIn";
|
||||
import * as SettingsLayouts from "@/layouts/settings-layouts";
|
||||
import TextSeparator from "@/refresh-components/TextSeparator";
|
||||
import Tabs from "@/refresh-components/Tabs";
|
||||
import FilterButton from "@/refresh-components/buttons/FilterButton";
|
||||
import { FilterButton } from "@opal/components";
|
||||
import Popover, { PopoverMenu } from "@/refresh-components/Popover";
|
||||
import LineItem from "@/refresh-components/buttons/LineItem";
|
||||
import { Button } from "@opal/components";
|
||||
@@ -465,9 +465,8 @@ export default function AgentsNavigationPage() {
|
||||
>
|
||||
<Popover.Trigger asChild>
|
||||
<FilterButton
|
||||
leftIcon={SvgUser}
|
||||
icon={SvgUser}
|
||||
active={selectedCreatorIds.size > 0}
|
||||
transient={creatorFilterOpen}
|
||||
onClear={() => setSelectedCreatorIds(new Set())}
|
||||
>
|
||||
{creatorFilterButtonText}
|
||||
@@ -537,8 +536,7 @@ export default function AgentsNavigationPage() {
|
||||
>
|
||||
<Popover.Trigger asChild>
|
||||
<FilterButton
|
||||
leftIcon={SvgActions}
|
||||
transient={actionsFilterOpen}
|
||||
icon={SvgActions}
|
||||
active={
|
||||
selectedActionIds.size > 0 || selectedMcpServerIds.size > 0
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import { personaIncludesRetrieval } from "@/app/app/services/lib";
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
|
||||
import { toast, useToastFromQuery } from "@/hooks/useToast";
|
||||
import { SEARCH_PARAM_NAMES } from "@/app/app/services/searchParams";
|
||||
import { Section } from "@/layouts/general-layouts";
|
||||
import { useFederatedConnectors, useFilters, useLlmManager } from "@/lib/hooks";
|
||||
import { useForcedTools } from "@/lib/hooks/useForcedTools";
|
||||
import OnyxInitializingLoader from "@/components/OnyxInitializingLoader";
|
||||
@@ -62,6 +63,9 @@ import { useShowOnboarding } from "@/hooks/useShowOnboarding";
|
||||
import * as AppLayouts from "@/layouts/app-layouts";
|
||||
import { SvgChevronDown, SvgFileText } from "@opal/icons";
|
||||
import { Button } from "@opal/components";
|
||||
import { IllustrationContent } from "@opal/layouts";
|
||||
import SvgNotFound from "@opal/illustrations/not-found";
|
||||
import SvgNoAccess from "@opal/illustrations/no-access";
|
||||
import Spacer from "@/refresh-components/Spacer";
|
||||
import useAppFocus from "@/hooks/useAppFocus";
|
||||
import { useQueryController } from "@/providers/QueryControllerProvider";
|
||||
@@ -381,23 +385,26 @@ export default function AppPage({ firstMessage }: ChatPageProps) {
|
||||
setSelectedAgentFromId,
|
||||
});
|
||||
|
||||
const { onMessageSelection, currentSessionFileTokenCount } =
|
||||
useChatSessionController({
|
||||
existingChatSessionId: currentChatSessionId,
|
||||
searchParams,
|
||||
filterManager,
|
||||
firstMessage,
|
||||
setSelectedAgentFromId,
|
||||
setSelectedDocuments,
|
||||
setCurrentMessageFiles,
|
||||
chatSessionIdRef,
|
||||
loadedIdSessionRef,
|
||||
chatInputBarRef,
|
||||
isInitialLoad,
|
||||
submitOnLoadPerformed,
|
||||
refreshChatSessions,
|
||||
onSubmit,
|
||||
});
|
||||
const {
|
||||
onMessageSelection,
|
||||
currentSessionFileTokenCount,
|
||||
sessionFetchError,
|
||||
} = useChatSessionController({
|
||||
existingChatSessionId: currentChatSessionId,
|
||||
searchParams,
|
||||
filterManager,
|
||||
firstMessage,
|
||||
setSelectedAgentFromId,
|
||||
setSelectedDocuments,
|
||||
setCurrentMessageFiles,
|
||||
chatSessionIdRef,
|
||||
loadedIdSessionRef,
|
||||
chatInputBarRef,
|
||||
isInitialLoad,
|
||||
submitOnLoadPerformed,
|
||||
refreshChatSessions,
|
||||
onSubmit,
|
||||
});
|
||||
|
||||
useSendMessageToParent();
|
||||
|
||||
@@ -679,7 +686,10 @@ export default function AppPage({ firstMessage }: ChatPageProps) {
|
||||
{/* ChatUI */}
|
||||
<Fade
|
||||
show={
|
||||
appFocus.isChat() && !!currentChatSessionId && !!liveAgent
|
||||
appFocus.isChat() &&
|
||||
!!currentChatSessionId &&
|
||||
!!liveAgent &&
|
||||
!sessionFetchError
|
||||
}
|
||||
className="h-full w-full flex flex-col items-center"
|
||||
>
|
||||
@@ -708,6 +718,45 @@ export default function AppPage({ firstMessage }: ChatPageProps) {
|
||||
</ChatScrollContainer>
|
||||
</Fade>
|
||||
|
||||
{/* Session fetch error (404 / 403) */}
|
||||
<Fade
|
||||
show={appFocus.isChat() && sessionFetchError !== null}
|
||||
className="h-full w-full flex flex-col items-center justify-center"
|
||||
>
|
||||
{sessionFetchError && (
|
||||
<Section
|
||||
flexDirection="column"
|
||||
alignItems="center"
|
||||
gap={1}
|
||||
>
|
||||
<IllustrationContent
|
||||
illustration={
|
||||
sessionFetchError.type === "access_denied"
|
||||
? SvgNoAccess
|
||||
: SvgNotFound
|
||||
}
|
||||
title={
|
||||
sessionFetchError.type === "not_found"
|
||||
? "Chat not found"
|
||||
: sessionFetchError.type === "access_denied"
|
||||
? "Access denied"
|
||||
: "Something went wrong"
|
||||
}
|
||||
description={
|
||||
sessionFetchError.type === "not_found"
|
||||
? "This chat session doesn't exist or has been deleted."
|
||||
: sessionFetchError.type === "access_denied"
|
||||
? "You don't have permission to view this chat session."
|
||||
: sessionFetchError.detail
|
||||
}
|
||||
/>
|
||||
<Button href="/app" prominence="secondary">
|
||||
Start a new chat
|
||||
</Button>
|
||||
</Section>
|
||||
)}
|
||||
</Fade>
|
||||
|
||||
{/* ProjectUI */}
|
||||
{appFocus.isProject() && (
|
||||
<div className="w-full max-h-[50vh] overflow-y-auto overscroll-y-none">
|
||||
@@ -736,7 +785,12 @@ export default function AppPage({ firstMessage }: ChatPageProps) {
|
||||
</div>
|
||||
|
||||
{/* ── Middle-center: AppInputBar ── */}
|
||||
<div className="row-start-2 flex flex-col items-center px-4">
|
||||
<div
|
||||
className={cn(
|
||||
"row-start-2 flex flex-col items-center px-4",
|
||||
sessionFetchError && "hidden"
|
||||
)}
|
||||
>
|
||||
<div className="relative w-full max-w-[var(--app-page-main-content-width)] flex flex-col">
|
||||
{/* Scroll to bottom button - positioned absolutely above AppInputBar */}
|
||||
{appFocus.isChat() && showScrollButton && (
|
||||
|
||||
32
web/src/refresh-pages/admin/AgentsPage.tsx
Normal file
32
web/src/refresh-pages/admin/AgentsPage.tsx
Normal file
@@ -0,0 +1,32 @@
|
||||
"use client";
|
||||
|
||||
import { SvgOnyxOctagon, SvgPlus } from "@opal/icons";
|
||||
import { Button } from "@opal/components";
|
||||
import * as SettingsLayouts from "@/layouts/settings-layouts";
|
||||
import Link from "next/link";
|
||||
|
||||
import AgentsTable from "./AgentsPage/AgentsTable";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Page
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export default function AgentsPage() {
|
||||
return (
|
||||
<SettingsLayouts.Root>
|
||||
<SettingsLayouts.Header
|
||||
title="Agents"
|
||||
description="Customize AI behavior and knowledge with agents. Manage agents in your organization."
|
||||
icon={SvgOnyxOctagon}
|
||||
rightChildren={
|
||||
<Link href="/app/agents/create?admin=true">
|
||||
<Button icon={SvgPlus}>New Agent</Button>
|
||||
</Link>
|
||||
}
|
||||
/>
|
||||
<SettingsLayouts.Body>
|
||||
<AgentsTable />
|
||||
</SettingsLayouts.Body>
|
||||
</SettingsLayouts.Root>
|
||||
);
|
||||
}
|
||||
378
web/src/refresh-pages/admin/AgentsPage/AgentRowActions.tsx
Normal file
378
web/src/refresh-pages/admin/AgentsPage/AgentRowActions.tsx
Normal file
@@ -0,0 +1,378 @@
|
||||
"use client";
|
||||
|
||||
import { useCallback, useState } from "react";
|
||||
import { Button } from "@opal/components";
|
||||
// TODO(@raunakab): migrate to Opal LineItemButton once it supports danger variant
|
||||
import LineItem from "@/refresh-components/buttons/LineItem";
|
||||
import { Disabled } from "@opal/core";
|
||||
import { cn } from "@opal/utils";
|
||||
import {
|
||||
SvgMoreHorizontal,
|
||||
SvgEdit,
|
||||
SvgEye,
|
||||
SvgEyeOff,
|
||||
SvgStar,
|
||||
SvgStarOff,
|
||||
SvgShare,
|
||||
SvgBarChart,
|
||||
SvgTrash,
|
||||
} from "@opal/icons";
|
||||
import Popover, { PopoverMenu } from "@/refresh-components/Popover";
|
||||
import ConfirmationModalLayout from "@/refresh-components/layouts/ConfirmationModalLayout";
|
||||
import Text from "@/refresh-components/texts/Text";
|
||||
import { toast } from "@/hooks/useToast";
|
||||
import { useRouter } from "next/navigation";
|
||||
import {
|
||||
deleteAgent,
|
||||
toggleAgentFeatured,
|
||||
toggleAgentListed,
|
||||
} from "@/refresh-pages/admin/AgentsPage/svc";
|
||||
import type { AgentRow } from "@/refresh-pages/admin/AgentsPage/interfaces";
|
||||
import type { Route } from "next";
|
||||
import ShareAgentModal from "@/sections/modals/ShareAgentModal";
|
||||
import { useCreateModal } from "@/refresh-components/contexts/ModalContext";
|
||||
import { useAgent } from "@/hooks/useAgents";
|
||||
import {
|
||||
updateAgentSharedStatus,
|
||||
updateAgentFeaturedStatus,
|
||||
} from "@/lib/agents";
|
||||
import { usePaidEnterpriseFeaturesEnabled } from "@/components/settings/usePaidEnterpriseFeaturesEnabled";
|
||||
import { useUser } from "@/providers/UserProvider";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
interface AgentRowActionsProps {
|
||||
agent: AgentRow;
|
||||
onMutate: () => void;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Component
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export default function AgentRowActions({
|
||||
agent,
|
||||
onMutate,
|
||||
}: AgentRowActionsProps) {
|
||||
const router = useRouter();
|
||||
const { isAdmin, isCurator } = useUser();
|
||||
const isPaidEnterpriseFeaturesEnabled = usePaidEnterpriseFeaturesEnabled();
|
||||
const canUpdateFeaturedStatus = isAdmin || isCurator;
|
||||
const { agent: fullAgent, refresh: refreshAgent } = useAgent(agent.id);
|
||||
const shareModal = useCreateModal();
|
||||
|
||||
const [popoverOpen, setPopoverOpen] = useState(false);
|
||||
const [isSubmitting, setIsSubmitting] = useState(false);
|
||||
const [deleteOpen, setDeleteOpen] = useState(false);
|
||||
const [featuredOpen, setFeaturedOpen] = useState(false);
|
||||
const [unlistOpen, setUnlistOpen] = useState(false);
|
||||
|
||||
async function handleAction(action: () => Promise<void>, close: () => void) {
|
||||
setIsSubmitting(true);
|
||||
try {
|
||||
await action();
|
||||
onMutate();
|
||||
toast.success(`${agent.name} updated successfully.`);
|
||||
close();
|
||||
} catch (err) {
|
||||
toast.error(err instanceof Error ? err.message : "An error occurred");
|
||||
} finally {
|
||||
setIsSubmitting(false);
|
||||
}
|
||||
}
|
||||
|
||||
const handleShare = useCallback(
|
||||
async (
|
||||
userIds: string[],
|
||||
groupIds: number[],
|
||||
isPublic: boolean,
|
||||
isFeatured: boolean,
|
||||
labelIds: number[]
|
||||
) => {
|
||||
const shareError = await updateAgentSharedStatus(
|
||||
agent.id,
|
||||
userIds,
|
||||
groupIds,
|
||||
isPublic,
|
||||
isPaidEnterpriseFeaturesEnabled,
|
||||
labelIds
|
||||
);
|
||||
|
||||
if (shareError) {
|
||||
toast.error(`Failed to share agent: ${shareError}`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (canUpdateFeaturedStatus) {
|
||||
const featuredError = await updateAgentFeaturedStatus(
|
||||
agent.id,
|
||||
isFeatured
|
||||
);
|
||||
if (featuredError) {
|
||||
toast.error(`Failed to update featured status: ${featuredError}`);
|
||||
refreshAgent();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
refreshAgent();
|
||||
onMutate();
|
||||
shareModal.toggle(false);
|
||||
},
|
||||
[
|
||||
agent.id,
|
||||
isPaidEnterpriseFeaturesEnabled,
|
||||
canUpdateFeaturedStatus,
|
||||
refreshAgent,
|
||||
onMutate,
|
||||
]
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<shareModal.Provider>
|
||||
<ShareAgentModal
|
||||
agentId={agent.id}
|
||||
userIds={fullAgent?.users?.map((u) => u.id) ?? []}
|
||||
groupIds={fullAgent?.groups ?? []}
|
||||
isPublic={fullAgent?.is_public ?? false}
|
||||
isFeatured={fullAgent?.is_featured ?? false}
|
||||
labelIds={fullAgent?.labels?.map((l) => l.id) ?? []}
|
||||
onShare={handleShare}
|
||||
/>
|
||||
</shareModal.Provider>
|
||||
|
||||
<div className="flex items-center gap-0.5">
|
||||
{/* TODO(@raunakab): abstract a more standardized way of doing this
|
||||
opacity-on-hover animation. Making Hoverable more extensible
|
||||
(e.g. supporting table row groups) would let us use it here
|
||||
instead of raw Tailwind group-hover. */}
|
||||
{!agent.builtin_persona && (
|
||||
<div className="opacity-0 group-hover/row:opacity-100 transition-opacity">
|
||||
<Button
|
||||
prominence="tertiary"
|
||||
icon={SvgEdit}
|
||||
tooltip="Edit Agent"
|
||||
onClick={() =>
|
||||
router.push(
|
||||
`/app/agents/edit/${
|
||||
agent.id
|
||||
}?u=${Date.now()}&admin=true` as Route
|
||||
)
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{!agent.is_listed ? (
|
||||
<Button
|
||||
prominence="tertiary"
|
||||
icon={SvgEyeOff}
|
||||
tooltip="Re-list Agent"
|
||||
onClick={() =>
|
||||
handleAction(
|
||||
() => toggleAgentListed(agent.id, agent.is_listed),
|
||||
() => {}
|
||||
)
|
||||
}
|
||||
/>
|
||||
) : (
|
||||
<div
|
||||
className={cn(
|
||||
!agent.is_featured &&
|
||||
"opacity-0 group-hover/row:opacity-100 transition-opacity"
|
||||
)}
|
||||
>
|
||||
<Button
|
||||
prominence="tertiary"
|
||||
icon={SvgStar}
|
||||
interaction={featuredOpen ? "hover" : "rest"}
|
||||
tooltip={
|
||||
agent.is_featured ? "Remove Featured" : "Set as Featured"
|
||||
}
|
||||
onClick={() => {
|
||||
setPopoverOpen(false);
|
||||
setFeaturedOpen(true);
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Overflow menu */}
|
||||
<Popover open={popoverOpen} onOpenChange={setPopoverOpen}>
|
||||
<div
|
||||
className={cn(
|
||||
!popoverOpen &&
|
||||
"opacity-0 group-hover/row:opacity-100 transition-opacity"
|
||||
)}
|
||||
>
|
||||
<Popover.Trigger asChild>
|
||||
<Button prominence="tertiary" icon={SvgMoreHorizontal} />
|
||||
</Popover.Trigger>
|
||||
</div>
|
||||
<Popover.Content align="end" width="sm">
|
||||
<PopoverMenu>
|
||||
{[
|
||||
<LineItem
|
||||
key="visibility"
|
||||
icon={agent.is_listed ? SvgEyeOff : SvgEye}
|
||||
onClick={() => {
|
||||
setPopoverOpen(false);
|
||||
if (agent.is_listed) {
|
||||
setUnlistOpen(true);
|
||||
} else {
|
||||
handleAction(
|
||||
() => toggleAgentListed(agent.id, agent.is_listed),
|
||||
() => {}
|
||||
);
|
||||
}
|
||||
}}
|
||||
>
|
||||
{agent.is_listed ? "Unlist Agent" : "List Agent"}
|
||||
</LineItem>,
|
||||
<LineItem
|
||||
key="share"
|
||||
icon={SvgShare}
|
||||
onClick={() => {
|
||||
setPopoverOpen(false);
|
||||
shareModal.toggle(true);
|
||||
}}
|
||||
>
|
||||
Share
|
||||
</LineItem>,
|
||||
isPaidEnterpriseFeaturesEnabled ? (
|
||||
<LineItem
|
||||
key="stats"
|
||||
icon={SvgBarChart}
|
||||
onClick={() => {
|
||||
setPopoverOpen(false);
|
||||
router.push(`/ee/agents/stats/${agent.id}` as Route);
|
||||
}}
|
||||
>
|
||||
Stats
|
||||
</LineItem>
|
||||
) : undefined,
|
||||
!agent.builtin_persona ? null : undefined,
|
||||
!agent.builtin_persona ? (
|
||||
<LineItem
|
||||
key="delete"
|
||||
icon={SvgTrash}
|
||||
danger
|
||||
onClick={() => {
|
||||
setPopoverOpen(false);
|
||||
setDeleteOpen(true);
|
||||
}}
|
||||
>
|
||||
Delete
|
||||
</LineItem>
|
||||
) : undefined,
|
||||
]}
|
||||
</PopoverMenu>
|
||||
</Popover.Content>
|
||||
</Popover>
|
||||
</div>
|
||||
|
||||
{deleteOpen && (
|
||||
<ConfirmationModalLayout
|
||||
icon={SvgTrash}
|
||||
title="Delete Agent"
|
||||
onClose={isSubmitting ? undefined : () => setDeleteOpen(false)}
|
||||
submit={
|
||||
<Disabled disabled={isSubmitting}>
|
||||
<Button
|
||||
variant="danger"
|
||||
onClick={() => {
|
||||
handleAction(
|
||||
() => deleteAgent(agent.id),
|
||||
() => setDeleteOpen(false)
|
||||
);
|
||||
}}
|
||||
>
|
||||
Delete
|
||||
</Button>
|
||||
</Disabled>
|
||||
}
|
||||
>
|
||||
<Text as="p" text03>
|
||||
Are you sure you want to delete{" "}
|
||||
<Text as="span" text05>
|
||||
{agent.name}
|
||||
</Text>
|
||||
? This action cannot be undone.
|
||||
</Text>
|
||||
</ConfirmationModalLayout>
|
||||
)}
|
||||
|
||||
{featuredOpen && (
|
||||
<ConfirmationModalLayout
|
||||
icon={agent.is_featured ? SvgStarOff : SvgStar}
|
||||
title={
|
||||
agent.is_featured
|
||||
? `Remove ${agent.name} from Featured`
|
||||
: `Feature ${agent.name}`
|
||||
}
|
||||
onClose={isSubmitting ? undefined : () => setFeaturedOpen(false)}
|
||||
submit={
|
||||
<Disabled disabled={isSubmitting}>
|
||||
<Button
|
||||
onClick={() => {
|
||||
handleAction(
|
||||
() => toggleAgentFeatured(agent.id, agent.is_featured),
|
||||
() => setFeaturedOpen(false)
|
||||
);
|
||||
}}
|
||||
>
|
||||
{agent.is_featured ? "Unfeature" : "Feature"}
|
||||
</Button>
|
||||
</Disabled>
|
||||
}
|
||||
>
|
||||
<div className="flex flex-col gap-2">
|
||||
<Text as="p" text03>
|
||||
{agent.is_featured
|
||||
? `This will remove ${agent.name} from the featured section on top of the explore agents list. New users will no longer see it pinned to their sidebar, but existing pins are unaffected.`
|
||||
: "Featured agents appear at the top of the explore agents list and are automatically pinned to the sidebar for new users with access. Use this to highlight recommended agents across your organization."}
|
||||
</Text>
|
||||
<Text as="p" text03>
|
||||
This does not change who can access this agent.
|
||||
</Text>
|
||||
</div>
|
||||
</ConfirmationModalLayout>
|
||||
)}
|
||||
|
||||
{unlistOpen && (
|
||||
<ConfirmationModalLayout
|
||||
icon={SvgEyeOff}
|
||||
title={`Unlist ${agent.name}`}
|
||||
onClose={isSubmitting ? undefined : () => setUnlistOpen(false)}
|
||||
submit={
|
||||
<Disabled disabled={isSubmitting}>
|
||||
<Button
|
||||
onClick={() => {
|
||||
handleAction(
|
||||
() => toggleAgentListed(agent.id, agent.is_listed),
|
||||
() => setUnlistOpen(false)
|
||||
);
|
||||
}}
|
||||
>
|
||||
Unlist
|
||||
</Button>
|
||||
</Disabled>
|
||||
}
|
||||
>
|
||||
<div className="flex flex-col gap-2">
|
||||
<Text as="p" text03>
|
||||
Unlisted agents don't appear in the explore agents list but
|
||||
remain accessible via direct link, and to users who have
|
||||
previously used or pinned them.
|
||||
</Text>
|
||||
<Text as="p" text03>
|
||||
This does not change who can access this agent.
|
||||
</Text>
|
||||
</div>
|
||||
</ConfirmationModalLayout>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
210
web/src/refresh-pages/admin/AgentsPage/AgentsTable.tsx
Normal file
210
web/src/refresh-pages/admin/AgentsPage/AgentsTable.tsx
Normal file
@@ -0,0 +1,210 @@
|
||||
"use client";
|
||||
|
||||
import { useMemo, useState } from "react";
|
||||
import { Table, createTableColumns } from "@opal/components";
|
||||
import { Content, IllustrationContent } from "@opal/layouts";
|
||||
import SvgNoResult from "@opal/illustrations/no-result";
|
||||
import SimpleLoader from "@/refresh-components/loaders/SimpleLoader";
|
||||
import Text from "@/refresh-components/texts/Text";
|
||||
import InputTypeIn from "@/refresh-components/inputs/InputTypeIn";
|
||||
import type { MinimalUserSnapshot } from "@/lib/types";
|
||||
import AgentAvatar from "@/refresh-components/avatars/AgentAvatar";
|
||||
import type { MinimalPersonaSnapshot } from "@/app/admin/agents/interfaces";
|
||||
import { useAdminPersonas } from "@/hooks/useAdminPersonas";
|
||||
import { toast } from "@/hooks/useToast";
|
||||
import AgentRowActions from "@/refresh-pages/admin/AgentsPage/AgentRowActions";
|
||||
import { updateAgentDisplayPriorities } from "@/refresh-pages/admin/AgentsPage/svc";
|
||||
import type { AgentRow } from "@/refresh-pages/admin/AgentsPage/interfaces";
|
||||
import type { Persona } from "@/app/admin/agents/interfaces";
|
||||
import { SvgUser } from "@opal/icons";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function toAgentRow(persona: Persona): AgentRow {
|
||||
return {
|
||||
id: persona.id,
|
||||
name: persona.name,
|
||||
description: persona.description,
|
||||
is_public: persona.is_public,
|
||||
is_listed: persona.is_listed,
|
||||
is_featured: persona.is_featured,
|
||||
builtin_persona: persona.builtin_persona,
|
||||
display_priority: persona.display_priority,
|
||||
owner: persona.owner,
|
||||
groups: persona.groups,
|
||||
users: persona.users,
|
||||
uploaded_image_id: persona.uploaded_image_id,
|
||||
icon_name: persona.icon_name,
|
||||
};
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Column renderers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function renderCreatedByColumn(
|
||||
_value: MinimalUserSnapshot | null,
|
||||
row: AgentRow
|
||||
) {
|
||||
return (
|
||||
<Content
|
||||
sizePreset="main-ui"
|
||||
variant="section"
|
||||
icon={SvgUser}
|
||||
title={row.builtin_persona ? "System" : row.owner?.email ?? "\u2014"}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
function getAccessTitle(row: AgentRow): string {
|
||||
if (row.is_public) return "Public";
|
||||
if (row.groups.length > 0 || row.users.length > 0) return "Shared";
|
||||
return "Private";
|
||||
}
|
||||
|
||||
function renderAccessColumn(_isPublic: boolean, row: AgentRow) {
|
||||
return (
|
||||
<Content
|
||||
sizePreset="main-ui"
|
||||
variant="section"
|
||||
title={getAccessTitle(row)}
|
||||
description={
|
||||
!row.is_listed ? "Unlisted" : row.is_featured ? "Featured" : undefined
|
||||
}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Columns
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const tc = createTableColumns<AgentRow>();
|
||||
|
||||
function buildColumns(onMutate: () => void) {
|
||||
return [
|
||||
tc.qualifier({
|
||||
content: "icon",
|
||||
background: true,
|
||||
getContent: (row) => (props) => (
|
||||
<AgentAvatar
|
||||
agent={row as unknown as MinimalPersonaSnapshot}
|
||||
size={props.size}
|
||||
/>
|
||||
),
|
||||
}),
|
||||
tc.column("name", {
|
||||
header: "Name",
|
||||
weight: 25,
|
||||
cell: (value) => (
|
||||
<Text as="span" mainUiBody text05>
|
||||
{value}
|
||||
</Text>
|
||||
),
|
||||
}),
|
||||
tc.column("description", {
|
||||
header: "Description",
|
||||
weight: 35,
|
||||
cell: (value) => (
|
||||
<Text as="span" mainUiBody text03>
|
||||
{value || "\u2014"}
|
||||
</Text>
|
||||
),
|
||||
}),
|
||||
tc.column("owner", {
|
||||
header: "Created By",
|
||||
weight: 20,
|
||||
cell: renderCreatedByColumn,
|
||||
}),
|
||||
tc.column("is_public", {
|
||||
header: "Access",
|
||||
weight: 12,
|
||||
cell: renderAccessColumn,
|
||||
}),
|
||||
tc.actions({
|
||||
cell: (row) => <AgentRowActions agent={row} onMutate={onMutate} />,
|
||||
}),
|
||||
];
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Component
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const PAGE_SIZE = 10;
|
||||
|
||||
export default function AgentsTable() {
|
||||
const [searchTerm, setSearchTerm] = useState("");
|
||||
|
||||
const { personas, isLoading, error, refresh } = useAdminPersonas();
|
||||
|
||||
const columns = useMemo(() => buildColumns(refresh), [refresh]);
|
||||
|
||||
const agentRows: AgentRow[] = useMemo(
|
||||
() => personas.filter((p) => !p.builtin_persona).map(toAgentRow),
|
||||
[personas]
|
||||
);
|
||||
|
||||
const handleReorder = async (
|
||||
_orderedIds: string[],
|
||||
changedOrders: Record<string, number>
|
||||
) => {
|
||||
try {
|
||||
await updateAgentDisplayPriorities(changedOrders);
|
||||
refresh();
|
||||
} catch (err) {
|
||||
toast.error(
|
||||
err instanceof Error ? err.message : "Failed to update agent order"
|
||||
);
|
||||
refresh();
|
||||
}
|
||||
};
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className="flex justify-center py-12">
|
||||
<SimpleLoader className="h-6 w-6" />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (error) {
|
||||
console.error("Failed to load agents:", error);
|
||||
return (
|
||||
<Text as="p" secondaryBody text03>
|
||||
Failed to load agents. Please try refreshing the page.
|
||||
</Text>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex flex-col gap-3">
|
||||
<InputTypeIn
|
||||
value={searchTerm}
|
||||
onChange={(e) => setSearchTerm(e.target.value)}
|
||||
placeholder="Search agents..."
|
||||
leftSearchIcon
|
||||
/>
|
||||
<Table
|
||||
data={agentRows}
|
||||
columns={columns}
|
||||
getRowId={(row) => String(row.id)}
|
||||
pageSize={PAGE_SIZE}
|
||||
searchTerm={searchTerm}
|
||||
draggable={{
|
||||
onReorder: handleReorder,
|
||||
}}
|
||||
emptyState={
|
||||
<IllustrationContent
|
||||
illustration={SvgNoResult}
|
||||
title="No agents found"
|
||||
description="No agents match the current search."
|
||||
/>
|
||||
}
|
||||
footer={{}}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
17
web/src/refresh-pages/admin/AgentsPage/interfaces.ts
Normal file
17
web/src/refresh-pages/admin/AgentsPage/interfaces.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import type { MinimalUserSnapshot } from "@/lib/types";
|
||||
|
||||
export interface AgentRow {
|
||||
id: number;
|
||||
name: string;
|
||||
description: string;
|
||||
is_public: boolean;
|
||||
is_listed: boolean;
|
||||
is_featured: boolean;
|
||||
builtin_persona: boolean;
|
||||
display_priority: number | null;
|
||||
owner: MinimalUserSnapshot | null;
|
||||
groups: number[];
|
||||
users: MinimalUserSnapshot[];
|
||||
uploaded_image_id?: string;
|
||||
icon_name?: string;
|
||||
}
|
||||
69
web/src/refresh-pages/admin/AgentsPage/svc.ts
Normal file
69
web/src/refresh-pages/admin/AgentsPage/svc.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
async function parseErrorDetail(
|
||||
res: Response,
|
||||
fallback: string
|
||||
): Promise<string> {
|
||||
try {
|
||||
const body = await res.json();
|
||||
return body?.detail ?? fallback;
|
||||
} catch (err) {
|
||||
console.error("Failed to parse error response:", err);
|
||||
return fallback;
|
||||
}
|
||||
}
|
||||
|
||||
export async function deleteAgent(agentId: number): Promise<void> {
|
||||
const res = await fetch(`/api/persona/${agentId}`, {
|
||||
method: "DELETE",
|
||||
credentials: "include",
|
||||
});
|
||||
if (!res.ok) {
|
||||
throw new Error(await parseErrorDetail(res, "Failed to delete agent"));
|
||||
}
|
||||
}
|
||||
|
||||
export async function toggleAgentFeatured(
|
||||
agentId: number,
|
||||
currentlyFeatured: boolean
|
||||
): Promise<void> {
|
||||
const res = await fetch(`/api/admin/persona/${agentId}/featured`, {
|
||||
method: "PATCH",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ is_featured: !currentlyFeatured }),
|
||||
credentials: "include",
|
||||
});
|
||||
if (!res.ok) {
|
||||
throw new Error(
|
||||
await parseErrorDetail(res, "Failed to toggle featured status")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export async function toggleAgentListed(
|
||||
agentId: number,
|
||||
currentlyListed: boolean
|
||||
): Promise<void> {
|
||||
const res = await fetch(`/api/admin/persona/${agentId}/listed`, {
|
||||
method: "PATCH",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ is_listed: !currentlyListed }),
|
||||
credentials: "include",
|
||||
});
|
||||
if (!res.ok) {
|
||||
throw new Error(await parseErrorDetail(res, "Failed to toggle visibility"));
|
||||
}
|
||||
}
|
||||
|
||||
export async function updateAgentDisplayPriorities(
|
||||
displayPriorityMap: Record<string, number>
|
||||
): Promise<void> {
|
||||
const res = await fetch("/api/admin/agents/display-priorities", {
|
||||
method: "PATCH",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ display_priority_map: displayPriorityMap }),
|
||||
});
|
||||
if (!res.ok) {
|
||||
throw new Error(
|
||||
await parseErrorDetail(res, "Failed to update agent order")
|
||||
);
|
||||
}
|
||||
}
|
||||
158
web/src/refresh-pages/admin/GroupsPage/CreateGroupPage.tsx
Normal file
158
web/src/refresh-pages/admin/GroupsPage/CreateGroupPage.tsx
Normal file
@@ -0,0 +1,158 @@
|
||||
"use client";
|
||||
|
||||
import { useMemo, useState } from "react";
|
||||
import { useRouter } from "next/navigation";
|
||||
import useSWR from "swr";
|
||||
import { Table, Button } from "@opal/components";
|
||||
import { IllustrationContent } from "@opal/layouts";
|
||||
import { SvgUsers } from "@opal/icons";
|
||||
import SvgNoResult from "@opal/illustrations/no-result";
|
||||
import * as SettingsLayouts from "@/layouts/settings-layouts";
|
||||
import { Section } from "@/layouts/general-layouts";
|
||||
import InputTypeIn from "@/refresh-components/inputs/InputTypeIn";
|
||||
import Text from "@/refresh-components/texts/Text";
|
||||
import SimpleLoader from "@/refresh-components/loaders/SimpleLoader";
|
||||
import Separator from "@/refresh-components/Separator";
|
||||
import { toast } from "@/hooks/useToast";
|
||||
import { errorHandlingFetcher } from "@/lib/fetcher";
|
||||
import useAdminUsers from "@/hooks/useAdminUsers";
|
||||
import type { ApiKeyDescriptor, MemberRow } from "./interfaces";
|
||||
import { createGroup } from "./svc";
|
||||
import { apiKeyToMemberRow, memberTableColumns, PAGE_SIZE } from "./shared";
|
||||
|
||||
function CreateGroupPage() {
|
||||
const router = useRouter();
|
||||
const [groupName, setGroupName] = useState("");
|
||||
const [selectedUserIds, setSelectedUserIds] = useState<string[]>([]);
|
||||
const [searchTerm, setSearchTerm] = useState("");
|
||||
const [isSubmitting, setIsSubmitting] = useState(false);
|
||||
|
||||
const { users, isLoading: usersLoading, error: usersError } = useAdminUsers();
|
||||
|
||||
const {
|
||||
data: apiKeys,
|
||||
isLoading: apiKeysLoading,
|
||||
error: apiKeysError,
|
||||
} = useSWR<ApiKeyDescriptor[]>("/api/admin/api-key", errorHandlingFetcher);
|
||||
|
||||
const isLoading = usersLoading || apiKeysLoading;
|
||||
const error = usersError ?? apiKeysError;
|
||||
|
||||
const allRows: MemberRow[] = useMemo(() => {
|
||||
const activeUsers = users.filter((u) => u.is_active);
|
||||
const serviceAccountRows = (apiKeys ?? []).map(apiKeyToMemberRow);
|
||||
return [...activeUsers, ...serviceAccountRows];
|
||||
}, [users, apiKeys]);
|
||||
|
||||
async function handleCreate() {
|
||||
const trimmed = groupName.trim();
|
||||
if (!trimmed) {
|
||||
toast.error("Group name is required");
|
||||
return;
|
||||
}
|
||||
|
||||
setIsSubmitting(true);
|
||||
try {
|
||||
await createGroup(trimmed, selectedUserIds);
|
||||
toast.success(`Group "${trimmed}" created`);
|
||||
router.push("/admin/groups");
|
||||
} catch (e) {
|
||||
toast.error(e instanceof Error ? e.message : "Failed to create group");
|
||||
} finally {
|
||||
setIsSubmitting(false);
|
||||
}
|
||||
}
|
||||
|
||||
const headerActions = (
|
||||
<Section flexDirection="row" gap={0.5} width="auto" height="auto">
|
||||
<Button
|
||||
prominence="tertiary"
|
||||
onClick={() => router.push("/admin/groups")}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
onClick={handleCreate}
|
||||
disabled={!groupName.trim() || isSubmitting}
|
||||
>
|
||||
Create
|
||||
</Button>
|
||||
</Section>
|
||||
);
|
||||
|
||||
return (
|
||||
<SettingsLayouts.Root width="lg">
|
||||
<SettingsLayouts.Header
|
||||
icon={SvgUsers}
|
||||
title="Create Group"
|
||||
separator
|
||||
rightChildren={headerActions}
|
||||
/>
|
||||
|
||||
<SettingsLayouts.Body>
|
||||
{/* Group Name */}
|
||||
<Section
|
||||
gap={0.5}
|
||||
height="auto"
|
||||
alignItems="stretch"
|
||||
justifyContent="start"
|
||||
>
|
||||
<Text mainUiBody text04>
|
||||
Group Name
|
||||
</Text>
|
||||
<InputTypeIn
|
||||
placeholder="Name your group"
|
||||
value={groupName}
|
||||
onChange={(e) => setGroupName(e.target.value)}
|
||||
/>
|
||||
</Section>
|
||||
|
||||
<Separator noPadding />
|
||||
|
||||
{/* Members table */}
|
||||
{isLoading && <SimpleLoader />}
|
||||
|
||||
{error && (
|
||||
<Text as="p" secondaryBody text03>
|
||||
Failed to load users.
|
||||
</Text>
|
||||
)}
|
||||
|
||||
{!isLoading && !error && (
|
||||
<Section
|
||||
gap={0.75}
|
||||
height="auto"
|
||||
alignItems="stretch"
|
||||
justifyContent="start"
|
||||
>
|
||||
<InputTypeIn
|
||||
value={searchTerm}
|
||||
onChange={(e) => setSearchTerm(e.target.value)}
|
||||
placeholder="Search users and accounts..."
|
||||
leftSearchIcon
|
||||
/>
|
||||
<Table
|
||||
data={allRows}
|
||||
columns={memberTableColumns}
|
||||
getRowId={(row) => row.id ?? row.email}
|
||||
pageSize={PAGE_SIZE}
|
||||
searchTerm={searchTerm}
|
||||
selectionBehavior="multi-select"
|
||||
onSelectionChange={setSelectedUserIds}
|
||||
footer={{}}
|
||||
emptyState={
|
||||
<IllustrationContent
|
||||
illustration={SvgNoResult}
|
||||
title="No users found"
|
||||
description="No users match your search."
|
||||
/>
|
||||
}
|
||||
/>
|
||||
</Section>
|
||||
)}
|
||||
</SettingsLayouts.Body>
|
||||
</SettingsLayouts.Root>
|
||||
);
|
||||
}
|
||||
|
||||
export default CreateGroupPage;
|
||||
@@ -5,17 +5,38 @@ import { SvgChevronRight, SvgUserManage, SvgUsers } from "@opal/icons";
|
||||
import { ContentAction } from "@opal/layouts";
|
||||
import { Section } from "@/layouts/general-layouts";
|
||||
import Card from "@/refresh-components/cards/Card";
|
||||
import IconButton from "@/refresh-components/buttons/IconButton";
|
||||
import { Button } from "@opal/components";
|
||||
import Text from "@/refresh-components/texts/Text";
|
||||
import { buildGroupDescription, formatMemberCount } from "./utils";
|
||||
import {
|
||||
isBuiltInGroup,
|
||||
buildGroupDescription,
|
||||
formatMemberCount,
|
||||
} from "./utils";
|
||||
import { renameGroup, USER_GROUP_URL } from "./svc";
|
||||
import { toast } from "@/hooks/useToast";
|
||||
import { useSWRConfig } from "swr";
|
||||
|
||||
interface GroupCardProps {
|
||||
group: UserGroup;
|
||||
}
|
||||
|
||||
function GroupCard({ group }: GroupCardProps) {
|
||||
const isBasic = group.name === "Basic";
|
||||
const { mutate } = useSWRConfig();
|
||||
const builtIn = isBuiltInGroup(group);
|
||||
const isAdmin = group.name === "Admin";
|
||||
const isBasic = group.name === "Basic";
|
||||
const isSyncing = !group.is_up_to_date;
|
||||
|
||||
async function handleRename(newName: string) {
|
||||
try {
|
||||
await renameGroup(group.id, newName);
|
||||
mutate(USER_GROUP_URL);
|
||||
toast.success(`Group renamed to "${newName}"`);
|
||||
} catch (e) {
|
||||
console.error("Failed to rename group:", e);
|
||||
toast.error(e instanceof Error ? e.message : "Failed to rename group");
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<Card padding={0.5}>
|
||||
@@ -26,12 +47,20 @@ function GroupCard({ group }: GroupCardProps) {
|
||||
sizePreset="main-content"
|
||||
variant="section"
|
||||
tag={isBasic ? { title: "Default" } : undefined}
|
||||
editable={!builtIn && !isSyncing}
|
||||
onTitleChange={!builtIn && !isSyncing ? handleRename : undefined}
|
||||
rightChildren={
|
||||
<Section flexDirection="row" alignItems="center">
|
||||
<Text mainUiBody text03>
|
||||
{formatMemberCount(group.users.length)}
|
||||
</Text>
|
||||
<IconButton icon={SvgChevronRight} tertiary tooltip="View group" />
|
||||
<Section flexDirection="row" alignItems="start" gap={0}>
|
||||
<div className="py-1">
|
||||
<Text mainUiBody text03>
|
||||
{formatMemberCount(group.users.length)}
|
||||
</Text>
|
||||
</div>
|
||||
<Button
|
||||
icon={SvgChevronRight}
|
||||
prominence="tertiary"
|
||||
tooltip="View group"
|
||||
/>
|
||||
</Section>
|
||||
}
|
||||
/>
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user