Compare commits

..

25 Commits

Author SHA1 Message Date
SubashMohan
a8cd9036dd feat(notification): add USER_GROUP_ASSIGNMENT_FAILED type and improve notification query logic 2026-03-29 12:51:41 +05:30
SubashMohan
3712617f71 fix(tests): update documentation to clarify captcha_token exclusion in UserCreate tests 2026-03-29 12:04:03 +05:30
SubashMohan
f15cec290a feat(migration): set default value for account_type to STANDARD in user model 2026-03-29 11:58:22 +05:30
SubashMohan
0ce92aa788 fix(migration): update API key email pattern and change LIKE to ILIKE for case-insensitive matching 2026-03-29 11:52:55 +05:30
SubashMohan
bf33e88f57 feat(migration): add error handling for missing default groups in user assignment
feat(users): update account type for external permissioned users to BOT
fix(tests): update tests to reflect changes in default group assignment behavior
feat(types): introduce AccountType enum for better user role management
2026-03-29 11:38:32 +05:30
SubashMohan
9d48e03de5 fix(tests): comment out group visibility checks pending permission implementation 2026-03-28 15:37:48 +05:30
SubashMohan
1e3d55ba29 feat(api): include default user groups in getUserGroups response 2026-03-28 13:31:19 +05:30
SubashMohan
3e88f62566 feat(tests): add account_type assignment for test users in fixtures 2026-03-28 13:00:20 +05:30
SubashMohan
ba505e9dac feat(migration): exclude inactive users from group assignments in upgrade function 2026-03-28 12:34:26 +05:30
SubashMohan
706bd0d949 feat(migration): enhance downgrade logic to prevent FK violations and improve user creation tests with account type handling 2026-03-28 10:07:08 +05:30
SubashMohan
0c109789fc fix(migration): adjust downgrade logic for account_type backfill and correct revision reference 2026-03-27 21:00:40 +05:30
SubashMohan
d8427d9304 fix(migration): update permission grant source to 'SYSTEM' for consistency 2026-03-27 20:51:49 +05:30
SubashMohan
12b2b6ad0e feat(tests): add tests for account_type assignment in user creation functions 2026-03-27 20:51:49 +05:30
SubashMohan
3d9b2dfbf7 feat(api_key): update group assignment logic for LIMITED role service accounts 2026-03-27 20:51:49 +05:30
SubashMohan
b0e4d31f9c feat(tests): add SAML user conversion test for account_type and group assignment 2026-03-27 20:51:49 +05:30
SubashMohan
074dd0d339 feat(tests): add integration test for SCIM user creation with default group and account type 2026-03-27 20:51:49 +05:30
SubashMohan
dd92c043ef feat(users): enhance user management with account_type and default group assignment tests 2026-03-27 20:51:49 +05:30
SubashMohan
89f9f9c1a5 feat(users): add option to include default user groups in batch user group retrieval 2026-03-27 20:51:49 +05:30
SubashMohan
4769dc5dec fix(users): update permission handling in default group assignment logic 2026-03-27 20:51:49 +05:30
SubashMohan
57256d3c61 feat(users): add account_type to user creation and assign to default groups 2026-03-27 20:51:49 +05:30
SubashMohan
33439d7f8c fix(migrations): standardize role values in user group assignment and backfill scripts 2026-03-27 20:51:49 +05:30
SubashMohan
e0efc91f53 fix(migrations): standardize account_type values in backfill and assignment scripts 2026-03-27 20:51:49 +05:30
SubashMohan
34eed5e3fd fix: update down_revision in seed_default_groups migration 2026-03-27 20:51:49 +05:30
SubashMohan
fe50f847c8 fix: remove case-insensitive checks for group names in migration 2026-03-27 20:51:49 +05:30
SubashMohan
bc0bdde137 feat(groups): implement user group backfill and assignment logic 2026-03-27 20:51:49 +05:30
62 changed files with 1576 additions and 1219 deletions

View File

@@ -47,8 +47,7 @@ jobs:
done
- name: Publish Helm charts to gh-pages
# NOTE: HEAD of https://github.com/stefanprodan/helm-gh-pages/pull/43
uses: stefanprodan/helm-gh-pages@ad32ad3b8720abfeaac83532fd1e9bdfca5bbe27 # zizmor: ignore[impostor-commit]
uses: stefanprodan/helm-gh-pages@0ad2bb377311d61ac04ad9eb6f252fb68e207260 # ratchet:stefanprodan/helm-gh-pages@v1.7.0
with:
token: ${{ secrets.GITHUB_TOKEN }}
charts_dir: deployment/helm/charts

View File

@@ -35,7 +35,7 @@ Onyx comes loaded with advanced features like Agents, Web Search, RAG, MCP, Deep
> [!TIP]
> Run Onyx with one command (or see deployment section below):
> ```
> curl -fsSL https://onyx.app/install_onyx.sh | bash
> curl -fsSL https://raw.githubusercontent.com/onyx-dot-app/onyx/main/deployment/docker_compose/install.sh > install.sh && chmod +x install.sh && ./install.sh
> ```
****

View File

@@ -0,0 +1,102 @@
"""backfill_account_type
Revision ID: 03d085c5c38d
Revises: 977e834c1427
Create Date: 2026-03-25 16:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "03d085c5c38d"
down_revision = "977e834c1427"
branch_labels = None
depends_on = None
_STANDARD = "STANDARD"
_BOT = "BOT"
_EXT_PERM_USER = "EXT_PERM_USER"
_SERVICE_ACCOUNT = "SERVICE_ACCOUNT"
_ANONYMOUS = "ANONYMOUS"
# Well-known anonymous user UUID
ANONYMOUS_USER_ID = "00000000-0000-0000-0000-000000000002"
# Email pattern for API key virtual users
API_KEY_EMAIL_PATTERN = r"API\_KEY\_\_%"
def upgrade() -> None:
conn = op.get_bind()
# ------------------------------------------------------------------
# Step 1: Backfill account_type from role.
# Order matters — most-specific matches first so the final catch-all
# only touches rows that haven't been classified yet.
# ------------------------------------------------------------------
# 1a. API key virtual users (any role) → SERVICE_ACCOUNT
conn.execute(
sa.text(
'UPDATE "user" SET account_type = :acct_type '
"WHERE email ILIKE :pattern AND account_type IS NULL"
),
{"acct_type": _SERVICE_ACCOUNT, "pattern": API_KEY_EMAIL_PATTERN},
)
# 1b. Anonymous user → ANONYMOUS
conn.execute(
sa.text(
'UPDATE "user" SET account_type = :acct_type '
"WHERE id = :anon_id AND account_type IS NULL"
),
{"acct_type": _ANONYMOUS, "anon_id": ANONYMOUS_USER_ID},
)
# 1c. SLACK_USER → BOT
conn.execute(
sa.text(
'UPDATE "user" SET account_type = :acct_type '
"WHERE role = 'SLACK_USER' AND account_type IS NULL"
),
{"acct_type": _BOT},
)
# 1d. EXT_PERM_USER → EXT_PERM_USER
conn.execute(
sa.text(
'UPDATE "user" SET account_type = :acct_type '
"WHERE role = 'EXT_PERM_USER' AND account_type IS NULL"
),
{"acct_type": _EXT_PERM_USER},
)
# 1e. Remaining (ADMIN, BASIC, CURATOR, GLOBAL_CURATOR) → STANDARD
conn.execute(
sa.text(
'UPDATE "user" SET account_type = :acct_type ' "WHERE account_type IS NULL"
),
{"acct_type": _STANDARD},
)
# ------------------------------------------------------------------
# Step 2: Set account_type to NOT NULL now that every row is filled.
# ------------------------------------------------------------------
op.alter_column(
"user",
"account_type",
nullable=False,
server_default="STANDARD",
)
def downgrade() -> None:
conn = op.get_bind()
# Revert to nullable first, then clear backfilled values
op.alter_column("user", "account_type", nullable=True, server_default=None)
conn.execute(sa.text('UPDATE "user" SET account_type = NULL'))

View File

@@ -0,0 +1,108 @@
"""seed_default_groups
Revision ID: 977e834c1427
Revises: 1d78c0ca7853
Create Date: 2026-03-25 14:59:41.313091
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "977e834c1427"
down_revision = "1d78c0ca7853"
branch_labels = None
depends_on = None
# (group_name, permission_value)
DEFAULT_GROUPS = [
("Admin", "admin"),
("Basic", "basic"),
]
CUSTOM_SUFFIX = "(Custom)"
MAX_RENAME_ATTEMPTS = 100
def _find_available_name(conn: sa.engine.Connection, base: str) -> str:
"""Return a name like 'Admin (Custom)' or 'Admin (Custom 2)' that is not taken."""
candidate = f"{base} {CUSTOM_SUFFIX}"
attempt = 1
while attempt <= MAX_RENAME_ATTEMPTS:
exists = conn.execute(
sa.text("SELECT 1 FROM user_group WHERE name = :name LIMIT 1"),
{"name": candidate},
).fetchone()
if exists is None:
return candidate
attempt += 1
candidate = f"{base} (Custom {attempt})"
raise RuntimeError(
f"Could not find an available name for group '{base}' "
f"after {MAX_RENAME_ATTEMPTS} attempts"
)
def upgrade() -> None:
conn = op.get_bind()
for group_name, permission_value in DEFAULT_GROUPS:
# Step 1: Rename ALL existing groups that clash with the canonical name.
conflicting = conn.execute(
sa.text("SELECT id, name FROM user_group " "WHERE name = :name"),
{"name": group_name},
).fetchall()
for row_id, row_name in conflicting:
new_name = _find_available_name(conn, row_name)
conn.execute(
sa.text(
"UPDATE user_group "
"SET name = :new_name, is_up_to_date = false "
"WHERE id = :id"
),
{"new_name": new_name, "id": row_id},
)
# Step 2: Create a fresh default group.
result = conn.execute(
sa.text(
"INSERT INTO user_group "
"(name, is_up_to_date, is_up_for_deletion, is_default) "
"VALUES (:name, true, false, true) "
"RETURNING id"
),
{"name": group_name},
).fetchone()
assert result is not None
group_id = result[0]
# Step 3: Upsert permission grant.
conn.execute(
sa.text(
"INSERT INTO permission_grant (group_id, permission, grant_source) "
"VALUES (:group_id, :permission, 'SYSTEM') "
"ON CONFLICT (group_id, permission) DO NOTHING"
),
{"group_id": group_id, "permission": permission_value},
)
def downgrade() -> None:
conn = op.get_bind()
# Remove the default groups created by this migration.
# First remove user-group memberships that reference default groups
# to avoid FK violations, then delete the groups themselves.
conn.execute(
sa.text(
"DELETE FROM user__user_group "
"WHERE user_group_id IN "
"(SELECT id FROM user_group WHERE is_default = true)"
)
)
conn.execute(sa.text("DELETE FROM user_group WHERE is_default = true"))

View File

@@ -0,0 +1,79 @@
"""assign_users_to_default_groups
Revision ID: b7bcc991d722
Revises: 03d085c5c38d
Create Date: 2026-03-25 16:30:39.529301
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "b7bcc991d722"
down_revision = "03d085c5c38d"
branch_labels = None
depends_on = None
def upgrade() -> None:
conn = op.get_bind()
# Look up default group IDs
admin_row = conn.execute(
sa.text(
"SELECT id FROM user_group " "WHERE name = 'Admin' AND is_default = true"
)
).fetchone()
basic_row = conn.execute(
sa.text(
"SELECT id FROM user_group " "WHERE name = 'Basic' AND is_default = true"
)
).fetchone()
if admin_row is None:
raise RuntimeError(
"Default 'Admin' group not found. "
"Ensure migration 977e834c1427 (seed_default_groups) ran successfully."
)
if basic_row is None:
raise RuntimeError(
"Default 'Basic' group not found. "
"Ensure migration 977e834c1427 (seed_default_groups) ran successfully."
)
# Users with role=admin → Admin group
# Exclude inactive placeholder/anonymous users that are not real users
conn.execute(
sa.text(
"INSERT INTO user__user_group (user_group_id, user_id) "
'SELECT :gid, id FROM "user" '
"WHERE role = 'ADMIN' AND is_active = true "
"ON CONFLICT (user_group_id, user_id) DO NOTHING"
),
{"gid": admin_row[0]},
)
# STANDARD users (non-admin) and SERVICE_ACCOUNT users (role=basic) → Basic group
# Exclude inactive placeholder/anonymous users that are not real users
conn.execute(
sa.text(
"INSERT INTO user__user_group (user_group_id, user_id) "
'SELECT :gid, id FROM "user" '
"WHERE is_active = true AND ("
"(account_type = 'STANDARD' AND role != 'ADMIN') "
"OR (account_type = 'SERVICE_ACCOUNT' AND role = 'BASIC')"
") "
"ON CONFLICT (user_group_id, user_id) DO NOTHING"
),
{"gid": basic_row[0]},
)
def downgrade() -> None:
# Group memberships are left in place — removing them risks
# deleting memberships that existed before this migration.
pass

View File

@@ -28,7 +28,6 @@ from onyx.access.models import DocExternalAccess
from onyx.access.models import ElementExternalAccess
from onyx.background.celery.apps.app_base import task_logger
from onyx.background.celery.celery_redis import celery_find_task
from onyx.background.celery.celery_redis import celery_get_broker_client
from onyx.background.celery.celery_redis import celery_get_queue_length
from onyx.background.celery.celery_redis import celery_get_queued_task_ids
from onyx.background.celery.celery_redis import celery_get_unacked_task_ids
@@ -188,6 +187,7 @@ def check_for_doc_permissions_sync(self: Task, *, tenant_id: str) -> bool | None
# (which lives on a different db number)
r = get_redis_client()
r_replica = get_redis_replica_client()
r_celery: Redis = self.app.broker_connection().channel().client # type: ignore
lock_beat: RedisLock = r.lock(
OnyxRedisLocks.CHECK_CONNECTOR_DOC_PERMISSIONS_SYNC_BEAT_LOCK,
@@ -227,7 +227,6 @@ def check_for_doc_permissions_sync(self: Task, *, tenant_id: str) -> bool | None
# tasks can be in the queue in redis, in reserved tasks (prefetched by the worker),
# or be currently executing
try:
r_celery = celery_get_broker_client(self.app)
validate_permission_sync_fences(
tenant_id, r, r_replica, r_celery, lock_beat
)

View File

@@ -29,7 +29,6 @@ from ee.onyx.external_permissions.sync_params import (
from ee.onyx.external_permissions.sync_params import get_source_perm_sync_config
from onyx.background.celery.apps.app_base import task_logger
from onyx.background.celery.celery_redis import celery_find_task
from onyx.background.celery.celery_redis import celery_get_broker_client
from onyx.background.celery.celery_redis import celery_get_unacked_task_ids
from onyx.background.celery.tasks.beat_schedule import CLOUD_BEAT_MULTIPLIER_DEFAULT
from onyx.background.error_logging import emit_background_error
@@ -163,6 +162,7 @@ def check_for_external_group_sync(self: Task, *, tenant_id: str) -> bool | None:
# (which lives on a different db number)
r = get_redis_client()
r_replica = get_redis_replica_client()
r_celery: Redis = self.app.broker_connection().channel().client # type: ignore
lock_beat: RedisLock = r.lock(
OnyxRedisLocks.CHECK_CONNECTOR_EXTERNAL_GROUP_SYNC_BEAT_LOCK,
@@ -221,7 +221,6 @@ def check_for_external_group_sync(self: Task, *, tenant_id: str) -> bool | None:
# tasks can be in the queue in redis, in reserved tasks (prefetched by the worker),
# or be currently executing
try:
r_celery = celery_get_broker_client(self.app)
validate_external_group_sync_fences(
tenant_id, self.app, r, r_replica, r_celery, lock_beat
)

View File

@@ -255,6 +255,7 @@ def fetch_user_groups(
db_session: Session,
only_up_to_date: bool = True,
eager_load_for_snapshot: bool = False,
include_default: bool = True,
) -> Sequence[UserGroup]:
"""
Fetches user groups from the database.
@@ -269,6 +270,7 @@ def fetch_user_groups(
to include only up to date user groups. Defaults to `True`.
eager_load_for_snapshot: If True, adds eager loading for all relationships
needed by UserGroup.from_model snapshot creation.
include_default: If False, excludes system default groups (is_default=True).
Returns:
Sequence[UserGroup]: A sequence of `UserGroup` objects matching the query criteria.
@@ -276,6 +278,8 @@ def fetch_user_groups(
stmt = select(UserGroup)
if only_up_to_date:
stmt = stmt.where(UserGroup.is_up_to_date == True) # noqa: E712
if not include_default:
stmt = stmt.where(UserGroup.is_default == False) # noqa: E712
if eager_load_for_snapshot:
stmt = _add_user_group_snapshot_eager_loads(stmt)
return db_session.scalars(stmt).unique().all()
@@ -286,6 +290,7 @@ def fetch_user_groups_for_user(
user_id: UUID,
only_curator_groups: bool = False,
eager_load_for_snapshot: bool = False,
include_default: bool = True,
) -> Sequence[UserGroup]:
stmt = (
select(UserGroup)
@@ -295,6 +300,8 @@ def fetch_user_groups_for_user(
)
if only_curator_groups:
stmt = stmt.where(User__UserGroup.is_curator == True) # noqa: E712
if not include_default:
stmt = stmt.where(UserGroup.is_default == False) # noqa: E712
if eager_load_for_snapshot:
stmt = _add_user_group_snapshot_eager_loads(stmt)
return db_session.scalars(stmt).unique().all()

View File

@@ -52,11 +52,13 @@ from ee.onyx.server.scim.schema_definitions import SERVICE_PROVIDER_CONFIG
from ee.onyx.server.scim.schema_definitions import USER_RESOURCE_TYPE
from ee.onyx.server.scim.schema_definitions import USER_SCHEMA_DEF
from onyx.db.engine.sql_engine import get_session
from onyx.db.enums import AccountType
from onyx.db.models import ScimToken
from onyx.db.models import ScimUserMapping
from onyx.db.models import User
from onyx.db.models import UserGroup
from onyx.db.models import UserRole
from onyx.db.users import assign_user_to_default_groups__no_commit
from onyx.utils.logger import setup_logger
from onyx.utils.variable_functionality import fetch_ee_implementation_or_noop
@@ -486,6 +488,7 @@ def create_user(
email=email,
hashed_password=_pw_helper.hash(_pw_helper.generate()),
role=UserRole.BASIC,
account_type=AccountType.STANDARD,
is_active=user_resource.active,
is_verified=True,
personal_name=personal_name,
@@ -506,13 +509,25 @@ def create_user(
scim_username=scim_username,
fields=fields,
)
dal.commit()
except IntegrityError:
dal.rollback()
return _scim_error_response(
409, f"User with email {email} already has a SCIM mapping"
)
# Assign user to default group BEFORE commit so everything is atomic.
# If this fails, the entire user creation rolls back and IdP can retry.
try:
assign_user_to_default_groups__no_commit(db_session, user)
except Exception:
dal.rollback()
logger.exception(f"Failed to assign SCIM user {email} to default groups")
return _scim_error_response(
500, f"Failed to assign user {email} to default group"
)
dal.commit()
return _scim_resource_response(
provider.build_user_resource(
user,

View File

@@ -43,12 +43,16 @@ router = APIRouter(prefix="/manage", tags=PUBLIC_API_TAGS)
@router.get("/admin/user-group")
def list_user_groups(
include_default: bool = False,
user: User = Depends(current_curator_or_admin_user),
db_session: Session = Depends(get_session),
) -> list[UserGroup]:
if user.role == UserRole.ADMIN:
user_groups = fetch_user_groups(
db_session, only_up_to_date=False, eager_load_for_snapshot=True
db_session,
only_up_to_date=False,
eager_load_for_snapshot=True,
include_default=include_default,
)
else:
user_groups = fetch_user_groups_for_user(
@@ -56,21 +60,28 @@ def list_user_groups(
user_id=user.id,
only_curator_groups=user.role == UserRole.CURATOR,
eager_load_for_snapshot=True,
include_default=include_default,
)
return [UserGroup.from_model(user_group) for user_group in user_groups]
@router.get("/user-groups/minimal")
def list_minimal_user_groups(
include_default: bool = False,
user: User = Depends(current_user),
db_session: Session = Depends(get_session),
) -> list[MinimalUserGroupSnapshot]:
if user.role == UserRole.ADMIN:
user_groups = fetch_user_groups(db_session, only_up_to_date=False)
user_groups = fetch_user_groups(
db_session,
only_up_to_date=False,
include_default=include_default,
)
else:
user_groups = fetch_user_groups_for_user(
db_session=db_session,
user_id=user.id,
include_default=include_default,
)
return [
MinimalUserGroupSnapshot.from_model(user_group) for user_group in user_groups
@@ -100,6 +111,9 @@ def rename_user_group_endpoint(
_: User = Depends(current_admin_user),
db_session: Session = Depends(get_session),
) -> UserGroup:
group = fetch_user_group(db_session, rename_request.id)
if group and group.is_default:
raise OnyxError(OnyxErrorCode.CONFLICT, "Cannot rename a default system group.")
try:
return UserGroup.from_model(
rename_user_group(
@@ -185,6 +199,9 @@ def delete_user_group(
_: User = Depends(current_admin_user),
db_session: Session = Depends(get_session),
) -> None:
group = fetch_user_group(db_session, user_group_id)
if group and group.is_default:
raise OnyxError(OnyxErrorCode.CONFLICT, "Cannot delete a default system group.")
try:
prepare_user_group_for_deletion(db_session, user_group_id)
except ValueError as e:

View File

@@ -22,6 +22,7 @@ class UserGroup(BaseModel):
personas: list[PersonaSnapshot]
is_up_to_date: bool
is_up_for_deletion: bool
is_default: bool
@classmethod
def from_model(cls, user_group_model: UserGroupModel) -> "UserGroup":
@@ -74,18 +75,21 @@ class UserGroup(BaseModel):
],
is_up_to_date=user_group_model.is_up_to_date,
is_up_for_deletion=user_group_model.is_up_for_deletion,
is_default=user_group_model.is_default,
)
class MinimalUserGroupSnapshot(BaseModel):
id: int
name: str
is_default: bool
@classmethod
def from_model(cls, user_group_model: UserGroupModel) -> "MinimalUserGroupSnapshot":
return cls(
id=user_group_model.id,
name=user_group_model.name,
is_default=user_group_model.is_default,
)

View File

@@ -5,6 +5,8 @@ from typing import Any
from fastapi_users import schemas
from typing_extensions import override
from onyx.db.enums import AccountType
class UserRole(str, Enum):
"""
@@ -41,6 +43,7 @@ class UserRead(schemas.BaseUser[uuid.UUID]):
class UserCreate(schemas.BaseUserCreate):
role: UserRole = UserRole.BASIC
account_type: AccountType = AccountType.STANDARD
tenant_id: str | None = None
# Captcha token for cloud signup protection (optional, only used when captcha is enabled)
# Excluded from create_update_dict so it never reaches the DB layer
@@ -50,12 +53,15 @@ class UserCreate(schemas.BaseUserCreate):
def create_update_dict(self) -> dict[str, Any]:
d = super().create_update_dict()
d.pop("captcha_token", None)
# Always include account_type — it's NOT NULL with no DB default
d.setdefault("account_type", self.account_type)
return d
@override
def create_update_dict_superuser(self) -> dict[str, Any]:
d = super().create_update_dict_superuser()
d.pop("captcha_token", None)
d.setdefault("account_type", self.account_type)
return d

View File

@@ -107,6 +107,7 @@ from onyx.configs.constants import DANSWER_API_KEY_DUMMY_EMAIL_DOMAIN
from onyx.configs.constants import DANSWER_API_KEY_PREFIX
from onyx.configs.constants import FASTAPI_USERS_AUTH_COOKIE_NAME
from onyx.configs.constants import MilestoneRecordType
from onyx.configs.constants import NotificationType
from onyx.configs.constants import OnyxRedisLocks
from onyx.configs.constants import PASSWORD_SPECIAL_CHARS
from onyx.configs.constants import UNNAMED_KEY_PLACEHOLDER
@@ -120,11 +121,14 @@ from onyx.db.engine.async_sql_engine import get_async_session
from onyx.db.engine.async_sql_engine import get_async_session_context_manager
from onyx.db.engine.sql_engine import get_session_with_current_tenant
from onyx.db.engine.sql_engine import get_session_with_tenant
from onyx.db.enums import AccountType
from onyx.db.models import AccessToken
from onyx.db.models import OAuthAccount
from onyx.db.models import Persona
from onyx.db.models import User
from onyx.db.notification import create_notification
from onyx.db.pat import fetch_user_for_pat
from onyx.db.users import assign_user_to_default_groups__no_commit
from onyx.db.users import get_user_by_email
from onyx.error_handling.error_codes import OnyxErrorCode
from onyx.error_handling.exceptions import log_onyx_error
@@ -694,6 +698,7 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
"email": account_email,
"hashed_password": self.password_helper.hash(password),
"is_verified": is_verified_by_default,
"account_type": AccountType.STANDARD,
}
user = await self.user_db.create(user_dict)
@@ -748,10 +753,40 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
{
"is_verified": is_verified_by_default,
"role": UserRole.BASIC,
"account_type": AccountType.STANDARD,
**({"is_active": True} if not user.is_active else {}),
},
)
# Assign upgraded user to the Basic default group.
# If this fails, the user update is already committed, so
# log the error and notify admins rather than leaving a
# silently broken state.
try:
with get_session_with_current_tenant() as sync_db:
sync_user = sync_db.query(User).filter(User.id == user.id).first() # type: ignore[arg-type]
if sync_user:
assign_user_to_default_groups__no_commit(sync_db, sync_user)
sync_db.commit()
except Exception:
logger.exception(
"Failed to assign user %s to default groups after "
"account upgrade",
user.email,
)
with get_session_with_current_tenant() as sync_db:
create_notification(
user_id=None,
notif_type=NotificationType.USER_GROUP_ASSIGNMENT_FAILED,
db_session=sync_db,
title="User group assignment failed",
description=(
f"User {user.email} was upgraded to STANDARD "
f"but could not be assigned to default groups. "
f"Please assign them manually."
),
)
# this is needed if an organization goes from `TRACK_EXTERNAL_IDP_EXPIRY=true` to `false`
# otherwise, the oidc expiry will always be old, and the user will never be able to login
if user.oidc_expiry is not None and not TRACK_EXTERNAL_IDP_EXPIRY:
@@ -890,6 +925,18 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
properties=properties,
)
# Assign user to the appropriate default group (Admin or Basic).
# Use the user_count already fetched above to determine admin status
# instead of reading user.role — keeps group assignment role-independent.
# Let exceptions propagate — a user without a group has no permissions,
# so it's better to surface the error than silently create a broken user.
is_admin = user_count == 1 or user.email in get_default_admin_user_emails()
with get_session_with_current_tenant() as db_session:
assign_user_to_default_groups__no_commit(
db_session, user, is_admin=is_admin
)
db_session.commit()
logger.debug(f"User {user.id} has registered.")
optional_telemetry(
record_type=RecordType.SIGN_UP,
@@ -1554,6 +1601,7 @@ def get_anonymous_user() -> User:
is_verified=True,
is_superuser=False,
role=UserRole.LIMITED,
account_type=AccountType.ANONYMOUS,
use_memories=False,
enable_memory_tool=False,
)

View File

@@ -1,6 +1,5 @@
# These are helper objects for tracking the keys we need to write in redis
import json
import threading
from typing import Any
from typing import cast
@@ -8,59 +7,7 @@ from celery import Celery
from redis import Redis
from onyx.background.celery.configs.base import CELERY_SEPARATOR
from onyx.configs.app_configs import REDIS_HEALTH_CHECK_INTERVAL
from onyx.configs.constants import OnyxCeleryPriority
from onyx.configs.constants import REDIS_SOCKET_KEEPALIVE_OPTIONS
_broker_client: Redis | None = None
_broker_url: str | None = None
_broker_client_lock = threading.Lock()
def celery_get_broker_client(app: Celery) -> Redis:
"""Return a shared Redis client connected to the Celery broker DB.
Uses a module-level singleton so all tasks on a worker share one
connection instead of creating a new one per call. The client
connects directly to the broker Redis DB (parsed from the broker URL).
Thread-safe via lock — safe for use in Celery thread-pool workers.
Usage:
r_celery = celery_get_broker_client(self.app)
length = celery_get_queue_length(queue, r_celery)
"""
global _broker_client, _broker_url
with _broker_client_lock:
url = app.conf.broker_url
if _broker_client is not None and _broker_url == url:
try:
_broker_client.ping()
return _broker_client
except Exception:
try:
_broker_client.close()
except Exception:
pass
_broker_client = None
elif _broker_client is not None:
try:
_broker_client.close()
except Exception:
pass
_broker_client = None
_broker_url = url
_broker_client = Redis.from_url(
url,
decode_responses=False,
health_check_interval=REDIS_HEALTH_CHECK_INTERVAL,
socket_keepalive=True,
socket_keepalive_options=REDIS_SOCKET_KEEPALIVE_OPTIONS,
retry_on_timeout=True,
)
return _broker_client
def celery_get_unacked_length(r: Redis) -> int:

View File

@@ -14,7 +14,6 @@ from redis.lock import Lock as RedisLock
from sqlalchemy.orm import Session
from onyx.background.celery.apps.app_base import task_logger
from onyx.background.celery.celery_redis import celery_get_broker_client
from onyx.background.celery.celery_redis import celery_get_queue_length
from onyx.background.celery.celery_redis import celery_get_queued_task_ids
from onyx.configs.app_configs import JOB_TIMEOUT
@@ -133,6 +132,7 @@ def revoke_tasks_blocking_deletion(
def check_for_connector_deletion_task(self: Task, *, tenant_id: str) -> bool | None:
r = get_redis_client()
r_replica = get_redis_replica_client()
r_celery: Redis = self.app.broker_connection().channel().client # type: ignore
lock_beat: RedisLock = r.lock(
OnyxRedisLocks.CHECK_CONNECTOR_DELETION_BEAT_LOCK,
@@ -149,7 +149,6 @@ def check_for_connector_deletion_task(self: Task, *, tenant_id: str) -> bool | N
if not r.exists(OnyxRedisSignals.BLOCK_VALIDATE_CONNECTOR_DELETION_FENCES):
# clear fences that don't have associated celery tasks in progress
try:
r_celery = celery_get_broker_client(self.app)
validate_connector_deletion_fences(
tenant_id, r, r_replica, r_celery, lock_beat
)

View File

@@ -22,7 +22,6 @@ from sqlalchemy.orm import Session
from onyx.background.celery.apps.app_base import task_logger
from onyx.background.celery.celery_redis import celery_find_task
from onyx.background.celery.celery_redis import celery_get_broker_client
from onyx.background.celery.celery_redis import celery_get_unacked_task_ids
from onyx.background.celery.celery_utils import httpx_init_vespa_pool
from onyx.background.celery.memory_monitoring import emit_process_memory
@@ -450,7 +449,7 @@ def check_indexing_completion(
):
# Check if the task exists in the celery queue
# This handles the case where Redis dies after task creation but before task execution
redis_celery = celery_get_broker_client(task.app)
redis_celery = task.app.broker_connection().channel().client # type: ignore
task_exists = celery_find_task(
attempt.celery_task_id,
OnyxCeleryQueues.CONNECTOR_DOC_FETCHING,

View File

@@ -1,5 +1,6 @@
import json
import time
from collections.abc import Callable
from datetime import timedelta
from itertools import islice
from typing import Any
@@ -18,7 +19,6 @@ from sqlalchemy import text
from sqlalchemy.orm import Session
from onyx.background.celery.apps.app_base import task_logger
from onyx.background.celery.celery_redis import celery_get_broker_client
from onyx.background.celery.celery_redis import celery_get_queue_length
from onyx.background.celery.celery_redis import celery_get_unacked_task_ids
from onyx.background.celery.memory_monitoring import emit_process_memory
@@ -698,27 +698,31 @@ def monitor_background_processes(self: Task, *, tenant_id: str) -> None:
return None
try:
# Get Redis client for Celery broker
redis_celery = self.app.broker_connection().channel().client # type: ignore
redis_std = get_redis_client()
# Collect queue metrics with broker connection
r_celery = celery_get_broker_client(self.app)
queue_metrics = _collect_queue_metrics(r_celery)
# Define metric collection functions and their dependencies
metric_functions: list[Callable[[], list[Metric]]] = [
lambda: _collect_queue_metrics(redis_celery),
lambda: _collect_connector_metrics(db_session, redis_std),
lambda: _collect_sync_metrics(db_session, redis_std),
]
# Collect remaining metrics (no broker connection needed)
# Collect and log each metric
with get_session_with_current_tenant() as db_session:
all_metrics: list[Metric] = queue_metrics
all_metrics.extend(_collect_connector_metrics(db_session, redis_std))
all_metrics.extend(_collect_sync_metrics(db_session, redis_std))
for metric_fn in metric_functions:
metrics = metric_fn()
for metric in metrics:
# double check to make sure we aren't double-emitting metrics
if metric.key is None or not _has_metric_been_emitted(
redis_std, metric.key
):
metric.log()
metric.emit(tenant_id)
for metric in all_metrics:
if metric.key is None or not _has_metric_been_emitted(
redis_std, metric.key
):
metric.log()
metric.emit(tenant_id)
if metric.key is not None:
_mark_metric_as_emitted(redis_std, metric.key)
if metric.key is not None:
_mark_metric_as_emitted(redis_std, metric.key)
task_logger.info("Successfully collected background metrics")
except SoftTimeLimitExceeded:
@@ -886,7 +890,7 @@ def monitor_celery_queues_helper(
) -> None:
"""A task to monitor all celery queue lengths."""
r_celery = celery_get_broker_client(task.app)
r_celery = task.app.broker_connection().channel().client # type: ignore
n_celery = celery_get_queue_length(OnyxCeleryQueues.PRIMARY, r_celery)
n_docfetching = celery_get_queue_length(
OnyxCeleryQueues.CONNECTOR_DOC_FETCHING, r_celery
@@ -1076,7 +1080,7 @@ def cloud_monitor_celery_pidbox(
num_deleted = 0
MAX_PIDBOX_IDLE = 24 * 3600 # 1 day in seconds
r_celery = celery_get_broker_client(self.app)
r_celery: Redis = self.app.broker_connection().channel().client # type: ignore
for key in r_celery.scan_iter("*.reply.celery.pidbox"):
key_bytes = cast(bytes, key)
key_str = key_bytes.decode("utf-8")

View File

@@ -17,7 +17,6 @@ from sqlalchemy.orm import Session
from onyx.background.celery.apps.app_base import task_logger
from onyx.background.celery.celery_redis import celery_find_task
from onyx.background.celery.celery_redis import celery_get_broker_client
from onyx.background.celery.celery_redis import celery_get_queue_length
from onyx.background.celery.celery_redis import celery_get_queued_task_ids
from onyx.background.celery.celery_redis import celery_get_unacked_task_ids
@@ -204,6 +203,7 @@ def _is_pruning_due(cc_pair: ConnectorCredentialPair) -> bool:
def check_for_pruning(self: Task, *, tenant_id: str) -> bool | None:
r = get_redis_client()
r_replica = get_redis_replica_client()
r_celery: Redis = self.app.broker_connection().channel().client # type: ignore
lock_beat: RedisLock = r.lock(
OnyxRedisLocks.CHECK_PRUNE_BEAT_LOCK,
@@ -261,7 +261,6 @@ def check_for_pruning(self: Task, *, tenant_id: str) -> bool | None:
# tasks can be in the queue in redis, in reserved tasks (prefetched by the worker),
# or be currently executing
try:
r_celery = celery_get_broker_client(self.app)
validate_pruning_fences(tenant_id, r, r_replica, r_celery, lock_beat)
except Exception:
task_logger.exception("Exception while validating pruning fences")

View File

@@ -16,7 +16,6 @@ from sqlalchemy.orm import Session
from onyx.access.access import build_access_for_user_files
from onyx.background.celery.apps.app_base import task_logger
from onyx.background.celery.celery_redis import celery_get_broker_client
from onyx.background.celery.celery_redis import celery_get_queue_length
from onyx.background.celery.celery_utils import httpx_init_vespa_pool
from onyx.background.celery.tasks.shared.RetryDocumentIndex import RetryDocumentIndex
@@ -106,7 +105,7 @@ def _user_file_delete_queued_key(user_file_id: str | UUID) -> str:
def get_user_file_project_sync_queue_depth(celery_app: Celery) -> int:
redis_celery = celery_get_broker_client(celery_app)
redis_celery: Redis = celery_app.broker_connection().channel().client # type: ignore
return celery_get_queue_length(
OnyxCeleryQueues.USER_FILE_PROJECT_SYNC, redis_celery
)
@@ -239,7 +238,7 @@ def check_user_file_processing(self: Task, *, tenant_id: str) -> None:
skipped_guard = 0
try:
# --- Protection 1: queue depth backpressure ---
r_celery = celery_get_broker_client(self.app)
r_celery = self.app.broker_connection().channel().client # type: ignore
queue_len = celery_get_queue_length(
OnyxCeleryQueues.USER_FILE_PROCESSING, r_celery
)
@@ -592,7 +591,7 @@ def check_for_user_file_delete(self: Task, *, tenant_id: str) -> None:
# --- Protection 1: queue depth backpressure ---
# NOTE: must use the broker's Redis client (not redis_client) because
# Celery queues live on a separate Redis DB with CELERY_SEPARATOR keys.
r_celery = celery_get_broker_client(self.app)
r_celery: Redis = self.app.broker_connection().channel().client # type: ignore
queue_len = celery_get_queue_length(OnyxCeleryQueues.USER_FILE_DELETE, r_celery)
if queue_len > USER_FILE_DELETE_MAX_QUEUE_DEPTH:
task_logger.warning(

View File

@@ -277,6 +277,7 @@ class NotificationType(str, Enum):
RELEASE_NOTES = "release_notes"
ASSISTANT_FILES_READY = "assistant_files_ready"
FEATURE_ANNOUNCEMENT = "feature_announcement"
USER_GROUP_ASSIGNMENT_FAILED = "user_group_assignment_failed"
class BlobType(str, Enum):

View File

@@ -11,14 +11,19 @@ from onyx.auth.api_key import ApiKeyDescriptor
from onyx.auth.api_key import build_displayable_api_key
from onyx.auth.api_key import generate_api_key
from onyx.auth.api_key import hash_api_key
from onyx.auth.schemas import UserRole
from onyx.configs.constants import DANSWER_API_KEY_DUMMY_EMAIL_DOMAIN
from onyx.configs.constants import DANSWER_API_KEY_PREFIX
from onyx.configs.constants import UNNAMED_KEY_PLACEHOLDER
from onyx.db.enums import AccountType
from onyx.db.models import ApiKey
from onyx.db.models import User
from onyx.server.api_key.models import APIKeyArgs
from onyx.utils.logger import setup_logger
from shared_configs.contextvars import get_current_tenant_id
logger = setup_logger()
def get_api_key_email_pattern() -> str:
return DANSWER_API_KEY_DUMMY_EMAIL_DOMAIN
@@ -87,6 +92,7 @@ def insert_api_key(
is_superuser=False,
is_verified=True,
role=api_key_args.role,
account_type=AccountType.SERVICE_ACCOUNT,
)
db_session.add(api_key_user_row)
@@ -99,7 +105,21 @@ def insert_api_key(
)
db_session.add(api_key_row)
# Assign the API key virtual user to the appropriate default group
# before commit so everything is atomic.
# LIMITED role service accounts should have no group membership.
# Late import to avoid circular dependency (api_key <- users <- api_key).
if api_key_args.role != UserRole.LIMITED:
from onyx.db.users import assign_user_to_default_groups__no_commit
assign_user_to_default_groups__no_commit(
db_session,
api_key_user_row,
is_admin=(api_key_args.role == UserRole.ADMIN),
)
db_session.commit()
return ApiKeyDescriptor(
api_key_id=api_key_row.id,
api_key_role=api_key_user_row.role,

View File

@@ -305,8 +305,11 @@ class User(SQLAlchemyBaseUserTableUUID, Base):
role: Mapped[UserRole] = mapped_column(
Enum(UserRole, native_enum=False, default=UserRole.BASIC)
)
account_type: Mapped[AccountType | None] = mapped_column(
Enum(AccountType, native_enum=False), nullable=True
account_type: Mapped[AccountType] = mapped_column(
Enum(AccountType, native_enum=False),
nullable=False,
default=AccountType.STANDARD,
server_default="STANDARD",
)
"""
@@ -4016,7 +4019,12 @@ class PermissionGrant(Base):
ForeignKey("user_group.id", ondelete="CASCADE"), nullable=False
)
permission: Mapped[Permission] = mapped_column(
Enum(Permission, native_enum=False), nullable=False
Enum(
Permission,
native_enum=False,
values_callable=lambda x: [e.value for e in x],
),
nullable=False,
)
grant_source: Mapped[GrantSource] = mapped_column(
Enum(GrantSource, native_enum=False), nullable=False

View File

@@ -3,6 +3,7 @@ from datetime import timezone
from uuid import UUID
from sqlalchemy import cast
from sqlalchemy import or_
from sqlalchemy import select
from sqlalchemy.dialects import postgresql
from sqlalchemy.dialects.postgresql import insert
@@ -90,9 +91,18 @@ def get_notifications(
notif_type: NotificationType | None = None,
include_dismissed: bool = True,
) -> list[Notification]:
query = select(Notification).where(
Notification.user_id == user.id if user else Notification.user_id.is_(None)
)
if user is None:
user_filter = Notification.user_id.is_(None)
elif user.role == UserRole.ADMIN:
# Admins see their own notifications AND admin-targeted ones (user_id IS NULL)
user_filter = or_(
Notification.user_id == user.id,
Notification.user_id.is_(None),
)
else:
user_filter = Notification.user_id == user.id
query = select(Notification).where(user_filter)
if not include_dismissed:
query = query.where(Notification.dismissed.is_(False))
if notif_type:

View File

@@ -19,6 +19,7 @@ from onyx.auth.schemas import UserRole
from onyx.configs.constants import ANONYMOUS_USER_EMAIL
from onyx.configs.constants import NO_AUTH_PLACEHOLDER_USER_EMAIL
from onyx.db.api_key import DANSWER_API_KEY_DUMMY_EMAIL_DOMAIN
from onyx.db.enums import AccountType
from onyx.db.models import DocumentSet
from onyx.db.models import DocumentSet__User
from onyx.db.models import Persona
@@ -27,8 +28,11 @@ from onyx.db.models import SamlAccount
from onyx.db.models import User
from onyx.db.models import User__UserGroup
from onyx.db.models import UserGroup
from onyx.utils.logger import setup_logger
from onyx.utils.variable_functionality import fetch_ee_implementation_or_noop
logger = setup_logger()
def validate_user_role_update(
requested_role: UserRole, current_role: UserRole, explicit_override: bool = False
@@ -298,6 +302,7 @@ def _generate_slack_user(email: str) -> User:
email=email,
hashed_password=hashed_pass,
role=UserRole.SLACK_USER,
account_type=AccountType.BOT,
)
@@ -308,6 +313,7 @@ def add_slack_user_if_not_exists(db_session: Session, email: str) -> User:
# If the user is an external permissioned user, we update it to a slack user
if user.role == UserRole.EXT_PERM_USER:
user.role = UserRole.SLACK_USER
user.account_type = AccountType.BOT
db_session.commit()
return user
@@ -344,6 +350,7 @@ def _generate_ext_permissioned_user(email: str) -> User:
email=email,
hashed_password=hashed_pass,
role=UserRole.EXT_PERM_USER,
account_type=AccountType.EXT_PERM_USER,
)
@@ -375,6 +382,77 @@ def batch_add_ext_perm_user_if_not_exists(
return all_users
def assign_user_to_default_groups__no_commit(
db_session: Session,
user: User,
is_admin: bool = False,
) -> None:
"""Assign a newly created user to the appropriate default group.
Does NOT commit — callers must commit the session themselves so that
group assignment can be part of the same transaction as user creation.
Args:
is_admin: If True, assign to Admin default group; otherwise Basic.
Callers determine this from their own context (e.g. user_count,
admin email list, explicit choice). Defaults to False (Basic).
"""
if user.account_type in (
AccountType.BOT,
AccountType.EXT_PERM_USER,
AccountType.ANONYMOUS,
):
return
target_group_name = "Admin" if is_admin else "Basic"
default_group = (
db_session.query(UserGroup)
.filter(
UserGroup.name == target_group_name,
UserGroup.is_default.is_(True),
)
.first()
)
if default_group is None:
raise RuntimeError(
f"Default group '{target_group_name}' not found. "
f"Cannot assign user {user.email} to a group. "
f"Ensure the seed_default_groups migration has run."
)
# Check if the user is already in the group
existing = (
db_session.query(User__UserGroup)
.filter(
User__UserGroup.user_id == user.id,
User__UserGroup.user_group_id == default_group.id,
)
.first()
)
if existing is not None:
return
savepoint = db_session.begin_nested()
try:
db_session.add(
User__UserGroup(
user_id=user.id,
user_group_id=default_group.id,
)
)
db_session.flush()
except IntegrityError:
# Race condition: another transaction inserted this membership
# between our SELECT and INSERT. The savepoint isolates the failure
# so the outer transaction (user creation) stays intact.
savepoint.rollback()
return
logger.info(f"Assigned user {user.email} to default group '{default_group.name}'")
def delete_user_from_db(
user_to_delete: User,
db_session: Session,
@@ -421,13 +499,14 @@ def delete_user_from_db(
def batch_get_user_groups(
db_session: Session,
user_ids: list[UUID],
include_default: bool = False,
) -> dict[UUID, list[tuple[int, str]]]:
"""Fetch group memberships for a batch of users in a single query.
Returns a mapping of user_id -> list of (group_id, group_name) tuples."""
if not user_ids:
return {}
rows = db_session.execute(
stmt = (
select(
User__UserGroup.user_id,
UserGroup.id,
@@ -435,7 +514,11 @@ def batch_get_user_groups(
)
.join(UserGroup, UserGroup.id == User__UserGroup.user_group_id)
.where(User__UserGroup.user_id.in_(user_ids))
).all()
)
if not include_default:
stmt = stmt.where(UserGroup.is_default == False) # noqa: E712
rows = db_session.execute(stmt).all()
result: dict[UUID, list[tuple[int, str]]] = {uid: [] for uid in user_ids}
for user_id, group_id, group_name in rows:

View File

@@ -123,8 +123,9 @@ def _validate_endpoint(
(not reachable — indicates the api_key is invalid).
Timeout handling:
- Any httpx.TimeoutException (ConnectTimeout, ReadTimeout, WriteTimeout, PoolTimeout) →
timeout (operator should consider increasing timeout_seconds).
- ConnectTimeout: TCP handshake never completed → cannot_connect.
- ReadTimeout / WriteTimeout: TCP was established, server responded slowly → timeout
(operator should consider increasing timeout_seconds).
- All other exceptions → cannot_connect.
"""
_check_ssrf_safety(endpoint_url)

View File

@@ -12,6 +12,7 @@ stale, which is fine for monitoring dashboards.
import json
import threading
import time
from collections.abc import Callable
from datetime import datetime
from datetime import timezone
from typing import Any
@@ -103,23 +104,25 @@ class _CachedCollector(Collector):
class QueueDepthCollector(_CachedCollector):
"""Reads Celery queue lengths from the broker Redis on each scrape."""
"""Reads Celery queue lengths from the broker Redis on each scrape.
Uses a Redis client factory (callable) rather than a stored client
reference so the connection is always fresh from Celery's pool.
"""
def __init__(self, cache_ttl: float = _DEFAULT_CACHE_TTL) -> None:
super().__init__(cache_ttl)
self._celery_app: Any | None = None
self._get_redis: Callable[[], Redis] | None = None
def set_celery_app(self, app: Any) -> None:
"""Set the Celery app for broker Redis access."""
self._celery_app = app
def set_redis_factory(self, factory: Callable[[], Redis]) -> None:
"""Set a callable that returns a broker Redis client on demand."""
self._get_redis = factory
def _collect_fresh(self) -> list[GaugeMetricFamily]:
if self._celery_app is None:
if self._get_redis is None:
return []
from onyx.background.celery.celery_redis import celery_get_broker_client
redis_client = celery_get_broker_client(self._celery_app)
redis_client = self._get_redis()
depth = GaugeMetricFamily(
"onyx_queue_depth",
@@ -401,19 +404,17 @@ class RedisHealthCollector(_CachedCollector):
def __init__(self, cache_ttl: float = _DEFAULT_CACHE_TTL) -> None:
super().__init__(cache_ttl)
self._celery_app: Any | None = None
self._get_redis: Callable[[], Redis] | None = None
def set_celery_app(self, app: Any) -> None:
"""Set the Celery app for broker Redis access."""
self._celery_app = app
def set_redis_factory(self, factory: Callable[[], Redis]) -> None:
"""Set a callable that returns a broker Redis client on demand."""
self._get_redis = factory
def _collect_fresh(self) -> list[GaugeMetricFamily]:
if self._celery_app is None:
if self._get_redis is None:
return []
from onyx.background.celery.celery_redis import celery_get_broker_client
redis_client = celery_get_broker_client(self._celery_app)
redis_client = self._get_redis()
memory_used = GaugeMetricFamily(
"onyx_redis_memory_used_bytes",

View File

@@ -3,8 +3,12 @@
Called once by the monitoring celery worker after Redis and DB are ready.
"""
from collections.abc import Callable
from typing import Any
from celery import Celery
from prometheus_client.registry import REGISTRY
from redis import Redis
from onyx.server.metrics.indexing_pipeline import ConnectorHealthCollector
from onyx.server.metrics.indexing_pipeline import IndexAttemptCollector
@@ -17,7 +21,7 @@ from onyx.utils.logger import setup_logger
logger = setup_logger()
# Module-level singletons — these are lightweight objects (no connections or DB
# state) until configure() / set_celery_app() is called. Keeping them at
# state) until configure() / set_redis_factory() is called. Keeping them at
# module level ensures they survive the lifetime of the worker process and are
# only registered with the Prometheus registry once.
_queue_collector = QueueDepthCollector()
@@ -28,15 +32,72 @@ _worker_health_collector = WorkerHealthCollector()
_heartbeat_monitor: WorkerHeartbeatMonitor | None = None
def _make_broker_redis_factory(celery_app: Celery) -> Callable[[], Redis]:
"""Create a factory that returns a cached broker Redis client.
Reuses a single connection across scrapes to avoid leaking connections.
Reconnects automatically if the cached connection becomes stale.
"""
_cached_client: list[Redis | None] = [None]
# Keep a reference to the Kombu Connection so we can close it on
# reconnect (the raw Redis client outlives the Kombu wrapper).
_cached_kombu_conn: list[Any] = [None]
def _close_client(client: Redis) -> None:
"""Best-effort close of a Redis client."""
try:
client.close()
except Exception:
logger.debug("Failed to close stale Redis client", exc_info=True)
def _close_kombu_conn() -> None:
"""Best-effort close of the cached Kombu Connection."""
conn = _cached_kombu_conn[0]
if conn is not None:
try:
conn.close()
except Exception:
logger.debug("Failed to close Kombu connection", exc_info=True)
_cached_kombu_conn[0] = None
def _get_broker_redis() -> Redis:
client = _cached_client[0]
if client is not None:
try:
client.ping()
return client
except Exception:
logger.debug("Cached Redis client stale, reconnecting")
_close_client(client)
_cached_client[0] = None
_close_kombu_conn()
# Get a fresh Redis client from the broker connection.
# We hold this client long-term (cached above) rather than using a
# context manager, because we need it to persist across scrapes.
# The caching logic above ensures we only ever hold one connection,
# and we close it explicitly on reconnect.
conn = celery_app.broker_connection()
# kombu's Channel exposes .client at runtime (the underlying Redis
# client) but the type stubs don't declare it.
new_client: Redis = conn.channel().client # type: ignore[attr-defined]
_cached_client[0] = new_client
_cached_kombu_conn[0] = conn
return new_client
return _get_broker_redis
def setup_indexing_pipeline_metrics(celery_app: Celery) -> None:
"""Register all indexing pipeline collectors with the default registry.
Args:
celery_app: The Celery application instance. Used to obtain a
celery_app: The Celery application instance. Used to obtain a fresh
broker Redis client on each scrape for queue depth metrics.
"""
_queue_collector.set_celery_app(celery_app)
_redis_health_collector.set_celery_app(celery_app)
redis_factory = _make_broker_redis_factory(celery_app)
_queue_collector.set_redis_factory(redis_factory)
_redis_health_collector.set_redis_factory(redis_factory)
# Start the heartbeat monitor daemon thread — uses a single persistent
# connection to receive worker-heartbeat events.

View File

@@ -7,6 +7,7 @@ from uuid import UUID
from pydantic import BaseModel
from onyx.auth.schemas import UserRole
from onyx.db.enums import AccountType
from onyx.db.models import User
@@ -41,6 +42,7 @@ class FullUserSnapshot(BaseModel):
id: UUID
email: str
role: UserRole
account_type: AccountType
is_active: bool
password_configured: bool
personal_name: str | None
@@ -60,6 +62,7 @@ class FullUserSnapshot(BaseModel):
id=user.id,
email=user.email,
role=user.role,
account_type=user.account_type,
is_active=user.is_active,
password_configured=user.password_configured,
personal_name=user.personal_name,

View File

@@ -5,7 +5,6 @@ import asyncio
import json
import logging
import sys
import time
from dataclasses import asdict
from dataclasses import dataclass
from pathlib import Path
@@ -28,9 +27,6 @@ INTERNAL_SEARCH_TOOL_NAME = "internal_search"
INTERNAL_SEARCH_IN_CODE_TOOL_ID = "SearchTool"
MAX_REQUEST_ATTEMPTS = 5
RETRIABLE_STATUS_CODES = {429, 500, 502, 503, 504}
QUESTION_TIMEOUT_SECONDS = 300
QUESTION_RETRY_PAUSE_SECONDS = 30
MAX_QUESTION_ATTEMPTS = 3
@dataclass(frozen=True)
@@ -113,27 +109,6 @@ def normalize_api_base(api_base: str) -> str:
return f"{normalized}/api"
def load_completed_question_ids(output_file: Path) -> set[str]:
if not output_file.exists():
return set()
completed_ids: set[str] = set()
with output_file.open("r", encoding="utf-8") as file:
for line in file:
stripped = line.strip()
if not stripped:
continue
try:
record = json.loads(stripped)
except json.JSONDecodeError:
continue
question_id = record.get("question_id")
if isinstance(question_id, str) and question_id:
completed_ids.add(question_id)
return completed_ids
def load_questions(questions_file: Path) -> list[QuestionRecord]:
if not questions_file.exists():
raise FileNotFoundError(f"Questions file not found: {questions_file}")
@@ -373,7 +348,6 @@ async def generate_answers(
api_base: str,
api_key: str,
parallelism: int,
skipped: int,
) -> None:
if parallelism < 1:
raise ValueError("`--parallelism` must be at least 1.")
@@ -408,178 +382,58 @@ async def generate_answers(
write_lock = asyncio.Lock()
completed = 0
successful = 0
stuck_count = 0
failed_questions: list[FailedQuestionRecord] = []
remaining_count = len(questions)
overall_total = remaining_count + skipped
question_durations: list[float] = []
run_start_time = time.monotonic()
def print_progress() -> None:
avg_time = (
sum(question_durations) / len(question_durations)
if question_durations
else 0.0
)
elapsed = time.monotonic() - run_start_time
eta = avg_time * (remaining_count - completed) / max(parallelism, 1)
done = skipped + completed
bar_width = 30
filled = (
int(bar_width * done / overall_total)
if overall_total
else bar_width
)
bar = "" * filled + "" * (bar_width - filled)
pct = (done / overall_total * 100) if overall_total else 100.0
parts = (
f"\r{bar} {pct:5.1f}% "
f"[{done}/{overall_total}] "
f"avg {avg_time:.1f}s/q "
f"elapsed {elapsed:.0f}s "
f"ETA {eta:.0f}s "
f"(ok:{successful} fail:{len(failed_questions)}"
)
if stuck_count:
parts += f" stuck:{stuck_count}"
if skipped:
parts += f" skip:{skipped}"
parts += ")"
sys.stderr.write(parts)
sys.stderr.flush()
print_progress()
total = len(questions)
async def process_question(question_record: QuestionRecord) -> None:
nonlocal completed
nonlocal successful
nonlocal stuck_count
last_error: Exception | None = None
for attempt in range(1, MAX_QUESTION_ATTEMPTS + 1):
q_start = time.monotonic()
try:
async with semaphore:
result = await asyncio.wait_for(
submit_question(
session=session,
api_base=api_base,
headers=headers,
internal_search_tool_id=internal_search_tool_id,
question_record=question_record,
),
timeout=QUESTION_TIMEOUT_SECONDS,
)
except asyncio.TimeoutError:
async with progress_lock:
stuck_count += 1
logger.warning(
"Question %s timed out after %ss (attempt %s/%s, "
"total stuck: %s) — retrying in %ss",
question_record.question_id,
QUESTION_TIMEOUT_SECONDS,
attempt,
MAX_QUESTION_ATTEMPTS,
stuck_count,
QUESTION_RETRY_PAUSE_SECONDS,
)
print_progress()
last_error = TimeoutError(
f"Timed out after {QUESTION_TIMEOUT_SECONDS}s "
f"on attempt {attempt}/{MAX_QUESTION_ATTEMPTS}"
try:
async with semaphore:
result = await submit_question(
session=session,
api_base=api_base,
headers=headers,
internal_search_tool_id=internal_search_tool_id,
question_record=question_record,
)
await asyncio.sleep(QUESTION_RETRY_PAUSE_SECONDS)
continue
except Exception as exc:
duration = time.monotonic() - q_start
async with progress_lock:
completed += 1
question_durations.append(duration)
failed_questions.append(
FailedQuestionRecord(
question_id=question_record.question_id,
error=str(exc),
)
)
logger.exception(
"Failed question %s (%s/%s)",
question_record.question_id,
completed,
remaining_count,
)
print_progress()
return
duration = time.monotonic() - q_start
async with write_lock:
file.write(json.dumps(asdict(result), ensure_ascii=False))
file.write("\n")
file.flush()
except Exception as exc:
async with progress_lock:
completed += 1
successful += 1
question_durations.append(duration)
print_progress()
failed_questions.append(
FailedQuestionRecord(
question_id=question_record.question_id,
error=str(exc),
)
)
logger.exception(
"Failed question %s (%s/%s)",
question_record.question_id,
completed,
total,
)
return
# All attempts exhausted due to timeouts
async with write_lock:
file.write(json.dumps(asdict(result), ensure_ascii=False))
file.write("\n")
file.flush()
async with progress_lock:
completed += 1
failed_questions.append(
FailedQuestionRecord(
question_id=question_record.question_id,
error=str(last_error),
)
)
logger.error(
"Question %s failed after %s timeout attempts (%s/%s)",
question_record.question_id,
MAX_QUESTION_ATTEMPTS,
completed,
remaining_count,
)
print_progress()
successful += 1
logger.info("Processed %s/%s questions", completed, total)
await asyncio.gather(
*(process_question(question_record) for question_record in questions)
)
# Final newline after progress bar
sys.stderr.write("\n")
sys.stderr.flush()
total_elapsed = time.monotonic() - run_start_time
avg_time = (
sum(question_durations) / len(question_durations)
if question_durations
else 0.0
)
stuck_suffix = f", {stuck_count} stuck timeouts" if stuck_count else ""
resume_suffix = (
f"{skipped} previously completed, "
f"{skipped + successful}/{overall_total} overall"
if skipped
else ""
)
logger.info(
"Done: %s/%s successful in %.1fs (avg %.1fs/question%s)%s",
successful,
remaining_count,
total_elapsed,
avg_time,
stuck_suffix,
resume_suffix,
)
if failed_questions:
logger.warning(
"%s questions failed:",
"Completed with %s failed questions and %s successful questions.",
len(failed_questions),
successful,
)
for failed_question in failed_questions:
logger.warning(
@@ -599,30 +453,7 @@ def main() -> None:
raise ValueError("`--max-questions` must be at least 1 when provided.")
questions = questions[: args.max_questions]
completed_ids = load_completed_question_ids(args.output_file)
logger.info(
"Found %s already-answered question IDs in %s",
len(completed_ids),
args.output_file,
)
total_before_filter = len(questions)
questions = [q for q in questions if q.question_id not in completed_ids]
skipped = total_before_filter - len(questions)
if skipped:
logger.info(
"Resuming: %s/%s already answered, %s remaining",
skipped,
total_before_filter,
len(questions),
)
else:
logger.info("Loaded %s questions from %s", len(questions), args.questions_file)
if not questions:
logger.info("All questions already answered. Nothing to do.")
return
logger.info("Loaded %s questions from %s", len(questions), args.questions_file)
logger.info("Writing answers to %s", args.output_file)
asyncio.run(
@@ -632,7 +463,6 @@ def main() -> None:
api_base=api_base,
api_key=args.api_key,
parallelism=args.parallelism,
skipped=skipped,
)
)

View File

@@ -129,10 +129,6 @@ def _patch_task_app(task: Any, mock_app: MagicMock) -> Generator[None, None, Non
return_value=mock_app,
),
patch(_PATCH_QUEUE_DEPTH, return_value=0),
patch(
"onyx.background.celery.tasks.user_file_processing.tasks.celery_get_broker_client",
return_value=MagicMock(),
),
):
yield

View File

@@ -88,22 +88,10 @@ def _patch_task_app(task: Any, mock_app: MagicMock) -> Generator[None, None, Non
the actual task instance. We patch ``app`` on that instance's class
(a unique Celery-generated Task subclass) so the mock is scoped to this
task only.
Also patches ``celery_get_broker_client`` so the mock app doesn't need
a real broker URL.
"""
task_instance = task.run.__self__
with (
patch.object(
type(task_instance),
"app",
new_callable=PropertyMock,
return_value=mock_app,
),
patch(
"onyx.background.celery.tasks.user_file_processing.tasks.celery_get_broker_client",
return_value=MagicMock(),
),
with patch.object(
type(task_instance), "app", new_callable=PropertyMock, return_value=mock_app
):
yield

View File

@@ -90,17 +90,8 @@ def _patch_task_app(task: Any, mock_app: MagicMock) -> Generator[None, None, Non
task only.
"""
task_instance = task.run.__self__
with (
patch.object(
type(task_instance),
"app",
new_callable=PropertyMock,
return_value=mock_app,
),
patch(
"onyx.background.celery.tasks.user_file_processing.tasks.celery_get_broker_client",
return_value=MagicMock(),
),
with patch.object(
type(task_instance), "app", new_callable=PropertyMock, return_value=mock_app
):
yield

View File

@@ -7,6 +7,7 @@ from sqlalchemy.orm import Session
from onyx.db.engine.sql_engine import get_session_with_current_tenant
from onyx.db.engine.sql_engine import SqlEngine
from onyx.db.enums import AccountType
from onyx.db.models import User
from onyx.db.models import UserRole
from onyx.file_store.file_store import get_default_file_store
@@ -52,7 +53,12 @@ def tenant_context() -> Generator[None, None, None]:
CURRENT_TENANT_ID_CONTEXTVAR.reset(token)
def create_test_user(db_session: Session, email_prefix: str) -> User:
def create_test_user(
db_session: Session,
email_prefix: str,
role: UserRole = UserRole.BASIC,
account_type: AccountType = AccountType.STANDARD,
) -> User:
"""Helper to create a test user with a unique email"""
# Use UUID to ensure unique email addresses
unique_email = f"{email_prefix}_{uuid4().hex[:8]}@example.com"
@@ -68,7 +74,8 @@ def create_test_user(db_session: Session, email_prefix: str) -> User:
is_active=True,
is_superuser=False,
is_verified=True,
role=UserRole.EXT_PERM_USER,
role=role,
account_type=account_type,
)
db_session.add(user)
db_session.commit()

View File

@@ -13,16 +13,29 @@ from onyx.access.utils import build_ext_group_name_for_onyx
from onyx.configs.constants import DocumentSource
from onyx.connectors.models import InputType
from onyx.db.enums import AccessType
from onyx.db.enums import AccountType
from onyx.db.enums import ConnectorCredentialPairStatus
from onyx.db.models import Connector
from onyx.db.models import ConnectorCredentialPair
from onyx.db.models import Credential
from onyx.db.models import PublicExternalUserGroup
from onyx.db.models import User
from onyx.db.models import User__ExternalUserGroupId
from onyx.db.models import UserRole
from tests.external_dependency_unit.conftest import create_test_user
from tests.external_dependency_unit.constants import TEST_TENANT_ID
def _create_ext_perm_user(db_session: Session, name: str) -> User:
"""Create an external-permission user for group sync tests."""
return create_test_user(
db_session,
name,
role=UserRole.EXT_PERM_USER,
account_type=AccountType.EXT_PERM_USER,
)
def _create_test_connector_credential_pair(
db_session: Session, source: DocumentSource = DocumentSource.GOOGLE_DRIVE
) -> ConnectorCredentialPair:
@@ -100,9 +113,9 @@ class TestPerformExternalGroupSync:
def test_initial_group_sync(self, db_session: Session) -> None:
"""Test syncing external groups for the first time (initial sync)"""
# Create test data
user1 = create_test_user(db_session, "user1")
user2 = create_test_user(db_session, "user2")
user3 = create_test_user(db_session, "user3")
user1 = _create_ext_perm_user(db_session, "user1")
user2 = _create_ext_perm_user(db_session, "user2")
user3 = _create_ext_perm_user(db_session, "user3")
cc_pair = _create_test_connector_credential_pair(db_session)
# Mock external groups data as a generator that yields the expected groups
@@ -175,9 +188,9 @@ class TestPerformExternalGroupSync:
def test_update_existing_groups(self, db_session: Session) -> None:
"""Test updating existing groups (adding/removing users)"""
# Create test data
user1 = create_test_user(db_session, "user1")
user2 = create_test_user(db_session, "user2")
user3 = create_test_user(db_session, "user3")
user1 = _create_ext_perm_user(db_session, "user1")
user2 = _create_ext_perm_user(db_session, "user2")
user3 = _create_ext_perm_user(db_session, "user3")
cc_pair = _create_test_connector_credential_pair(db_session)
# Initial sync with original groups
@@ -272,8 +285,8 @@ class TestPerformExternalGroupSync:
def test_remove_groups(self, db_session: Session) -> None:
"""Test removing groups (groups that no longer exist in external system)"""
# Create test data
user1 = create_test_user(db_session, "user1")
user2 = create_test_user(db_session, "user2")
user1 = _create_ext_perm_user(db_session, "user1")
user2 = _create_ext_perm_user(db_session, "user2")
cc_pair = _create_test_connector_credential_pair(db_session)
# Initial sync with multiple groups
@@ -357,7 +370,7 @@ class TestPerformExternalGroupSync:
def test_empty_group_sync(self, db_session: Session) -> None:
"""Test syncing when no groups are returned (all groups removed)"""
# Create test data
user1 = create_test_user(db_session, "user1")
user1 = _create_ext_perm_user(db_session, "user1")
cc_pair = _create_test_connector_credential_pair(db_session)
# Initial sync with groups
@@ -413,7 +426,7 @@ class TestPerformExternalGroupSync:
# Create many test users
users = []
for i in range(150): # More than the batch size of 100
users.append(create_test_user(db_session, f"user{i}"))
users.append(_create_ext_perm_user(db_session, f"user{i}"))
cc_pair = _create_test_connector_credential_pair(db_session)
@@ -452,8 +465,8 @@ class TestPerformExternalGroupSync:
def test_mixed_regular_and_public_groups(self, db_session: Session) -> None:
"""Test syncing a mix of regular and public groups"""
# Create test data
user1 = create_test_user(db_session, "user1")
user2 = create_test_user(db_session, "user2")
user1 = _create_ext_perm_user(db_session, "user1")
user2 = _create_ext_perm_user(db_session, "user2")
cc_pair = _create_test_connector_credential_pair(db_session)
def mixed_group_sync_func(

View File

@@ -9,6 +9,7 @@ from sqlalchemy.orm import Session
from onyx.db.engine.sql_engine import get_session_with_current_tenant
from onyx.db.engine.sql_engine import SqlEngine
from onyx.db.enums import AccountType
from onyx.db.enums import BuildSessionStatus
from onyx.db.models import BuildSession
from onyx.db.models import User
@@ -52,6 +53,7 @@ def test_user(db_session: Session, tenant_context: None) -> User: # noqa: ARG00
is_superuser=False,
is_verified=True,
role=UserRole.EXT_PERM_USER,
account_type=AccountType.EXT_PERM_USER,
)
db_session.add(user)
db_session.commit()

View File

@@ -0,0 +1,51 @@
"""
Tests that account_type is correctly set when creating users through
the internal DB functions: add_slack_user_if_not_exists and
batch_add_ext_perm_user_if_not_exists.
These functions are called by background workers (Slack bot, permission sync)
and are not exposed via API endpoints, so they must be tested directly.
"""
from sqlalchemy.orm import Session
from onyx.db.enums import AccountType
from onyx.db.models import UserRole
from onyx.db.users import add_slack_user_if_not_exists
from onyx.db.users import batch_add_ext_perm_user_if_not_exists
def test_slack_user_creation_sets_account_type_bot(db_session: Session) -> None:
"""add_slack_user_if_not_exists sets account_type=BOT and role=SLACK_USER."""
user = add_slack_user_if_not_exists(db_session, "slack_acct_type@test.com")
assert user.role == UserRole.SLACK_USER
assert user.account_type == AccountType.BOT
def test_ext_perm_user_creation_sets_account_type(db_session: Session) -> None:
"""batch_add_ext_perm_user_if_not_exists sets account_type=EXT_PERM_USER."""
users = batch_add_ext_perm_user_if_not_exists(
db_session, ["extperm_acct_type@test.com"]
)
assert len(users) == 1
user = users[0]
assert user.role == UserRole.EXT_PERM_USER
assert user.account_type == AccountType.EXT_PERM_USER
def test_ext_perm_to_slack_upgrade_updates_role_and_account_type(
db_session: Session,
) -> None:
"""When an EXT_PERM_USER is upgraded to slack, both role and account_type update."""
email = "ext_to_slack_acct_type@test.com"
# Create as ext_perm user first
batch_add_ext_perm_user_if_not_exists(db_session, [email])
# Now "upgrade" via slack path
user = add_slack_user_if_not_exists(db_session, email)
assert user.role == UserRole.SLACK_USER
assert user.account_type == AccountType.BOT

View File

@@ -8,6 +8,7 @@ import pytest
from fastapi_users.password import PasswordHelper
from sqlalchemy.orm import Session
from onyx.db.enums import AccountType
from onyx.db.llm import fetch_existing_llm_provider
from onyx.db.llm import remove_llm_provider
from onyx.db.llm import update_default_provider
@@ -46,6 +47,7 @@ def _create_admin(db_session: Session) -> User:
is_superuser=True,
is_verified=True,
role=UserRole.ADMIN,
account_type=AccountType.STANDARD,
)
db_session.add(user)
db_session.commit()

View File

@@ -107,10 +107,15 @@ class UserGroupManager:
@staticmethod
def get_all(
user_performing_action: DATestUser,
include_default: bool = False,
) -> list[UserGroup]:
params: dict[str, str] = {}
if include_default:
params["include_default"] = "true"
response = requests.get(
f"{API_SERVER_URL}/manage/admin/user-group",
headers=user_performing_action.headers,
params=params,
)
response.raise_for_status()
return [UserGroup(**ug) for ug in response.json()]

View File

@@ -1,9 +1,13 @@
from uuid import UUID
import requests
from onyx.auth.schemas import UserRole
from onyx.db.enums import AccountType
from tests.integration.common_utils.constants import API_SERVER_URL
from tests.integration.common_utils.managers.api_key import APIKeyManager
from tests.integration.common_utils.managers.user import UserManager
from tests.integration.common_utils.managers.user_group import UserGroupManager
from tests.integration.common_utils.test_models import DATestAPIKey
from tests.integration.common_utils.test_models import DATestUser
@@ -33,3 +37,120 @@ def test_limited(reset: None) -> None: # noqa: ARG001
headers=api_key.headers,
)
assert response.status_code == 403
def _get_service_account_account_type(
admin_user: DATestUser,
api_key_user_id: UUID,
) -> AccountType:
"""Fetch the account_type of a service account user via the user listing API."""
response = requests.get(
f"{API_SERVER_URL}/manage/users",
headers=admin_user.headers,
params={"include_api_keys": "true"},
)
response.raise_for_status()
data = response.json()
user_id_str = str(api_key_user_id)
for user in data["accepted"]:
if user["id"] == user_id_str:
return AccountType(user["account_type"])
raise AssertionError(
f"Service account user {user_id_str} not found in user listing"
)
def _get_default_group_user_ids(
admin_user: DATestUser,
) -> tuple[set[str], set[str]]:
"""Return (admin_group_user_ids, basic_group_user_ids) from default groups."""
all_groups = UserGroupManager.get_all(
user_performing_action=admin_user,
include_default=True,
)
admin_group = next(
(g for g in all_groups if g.name == "Admin" and g.is_default), None
)
basic_group = next(
(g for g in all_groups if g.name == "Basic" and g.is_default), None
)
assert admin_group is not None, "Admin default group not found"
assert basic_group is not None, "Basic default group not found"
admin_ids = {str(u.id) for u in admin_group.users}
basic_ids = {str(u.id) for u in basic_group.users}
return admin_ids, basic_ids
def test_api_key_limited_service_account(reset: None) -> None: # noqa: ARG001
"""LIMITED role API key: account_type is SERVICE_ACCOUNT, no group membership."""
admin_user: DATestUser = UserManager.create(name="admin_user")
api_key: DATestAPIKey = APIKeyManager.create(
api_key_role=UserRole.LIMITED,
user_performing_action=admin_user,
)
# Verify account_type
account_type = _get_service_account_account_type(admin_user, api_key.user_id)
assert (
account_type == AccountType.SERVICE_ACCOUNT
), f"Expected account_type={AccountType.SERVICE_ACCOUNT}, got {account_type}"
# Verify no group membership
admin_ids, basic_ids = _get_default_group_user_ids(admin_user)
user_id_str = str(api_key.user_id)
assert (
user_id_str not in admin_ids
), "LIMITED API key should NOT be in Admin default group"
assert (
user_id_str not in basic_ids
), "LIMITED API key should NOT be in Basic default group"
def test_api_key_basic_service_account(reset: None) -> None: # noqa: ARG001
"""BASIC role API key: account_type is SERVICE_ACCOUNT, in Basic group only."""
admin_user: DATestUser = UserManager.create(name="admin_user")
api_key: DATestAPIKey = APIKeyManager.create(
api_key_role=UserRole.BASIC,
user_performing_action=admin_user,
)
# Verify account_type
account_type = _get_service_account_account_type(admin_user, api_key.user_id)
assert (
account_type == AccountType.SERVICE_ACCOUNT
), f"Expected account_type={AccountType.SERVICE_ACCOUNT}, got {account_type}"
# Verify Basic group membership
admin_ids, basic_ids = _get_default_group_user_ids(admin_user)
user_id_str = str(api_key.user_id)
assert user_id_str in basic_ids, "BASIC API key should be in Basic default group"
assert (
user_id_str not in admin_ids
), "BASIC API key should NOT be in Admin default group"
def test_api_key_admin_service_account(reset: None) -> None: # noqa: ARG001
"""ADMIN role API key: account_type is SERVICE_ACCOUNT, in Admin group only."""
admin_user: DATestUser = UserManager.create(name="admin_user")
api_key: DATestAPIKey = APIKeyManager.create(
api_key_role=UserRole.ADMIN,
user_performing_action=admin_user,
)
# Verify account_type
account_type = _get_service_account_account_type(admin_user, api_key.user_id)
assert (
account_type == AccountType.SERVICE_ACCOUNT
), f"Expected account_type={AccountType.SERVICE_ACCOUNT}, got {account_type}"
# Verify Admin group membership
admin_ids, basic_ids = _get_default_group_user_ids(admin_user)
user_id_str = str(api_key.user_id)
assert user_id_str in admin_ids, "ADMIN API key should be in Admin default group"
assert (
user_id_str not in basic_ids
), "ADMIN API key should NOT be in Basic default group"

View File

@@ -6,6 +6,7 @@ import requests
from onyx.auth.schemas import UserRole
from tests.integration.common_utils.constants import API_SERVER_URL
from tests.integration.common_utils.managers.user import UserManager
from tests.integration.common_utils.managers.user_group import UserGroupManager
from tests.integration.common_utils.test_models import DATestUser
@@ -95,3 +96,63 @@ def test_saml_user_conversion(reset: None) -> None: # noqa: ARG001
# Verify the user's role was changed in the database
assert UserManager.is_role(slack_user, UserRole.BASIC)
@pytest.mark.skipif(
os.environ.get("ENABLE_PAID_ENTERPRISE_EDITION_FEATURES", "").lower() != "true",
reason="SAML tests are enterprise only",
)
def test_saml_user_conversion_sets_account_type_and_group(
reset: None, # noqa: ARG001
) -> None:
"""
Test that SAML login sets account_type to STANDARD when converting a
non-web user (EXT_PERM_USER) and that the user receives the correct role
(BASIC) after conversion.
This validates the permissions-migration-phase2 changes which ensure that:
1. account_type is updated to 'standard' on SAML conversion
2. The converted user is assigned to the Basic default group
"""
# Create an admin user (first user is automatically admin)
admin_user: DATestUser = UserManager.create(email="admin@example.com")
# Create a user and set them as EXT_PERM_USER
test_email = "ext_convert@example.com"
test_user = UserManager.create(email=test_email)
UserManager.set_role(
user_to_set=test_user,
target_role=UserRole.EXT_PERM_USER,
user_performing_action=admin_user,
explicit_override=True,
)
assert UserManager.is_role(test_user, UserRole.EXT_PERM_USER)
# Simulate SAML login
response = requests.post(
f"{API_SERVER_URL}/manage/users/test-upsert-user",
json={"email": test_email},
headers=admin_user.headers,
)
response.raise_for_status()
user_data = response.json()
# Verify account_type is set to standard after conversion
assert (
user_data["account_type"] == "standard"
), f"Expected account_type='standard', got '{user_data['account_type']}'"
# Verify role is BASIC after conversion
assert user_data["role"] == UserRole.BASIC.value
# Verify the user was assigned to the Basic default group
all_groups = UserGroupManager.get_all(admin_user, include_default=True)
basic_default = [g for g in all_groups if g.is_default and g.name == "Basic"]
assert basic_default, "Basic default group not found"
basic_group = basic_default[0]
member_emails = {u.email for u in basic_group.users}
assert test_email in member_emails, (
f"Converted user '{test_email}' not found in Basic default group members: "
f"{member_emails}"
)

View File

@@ -35,9 +35,16 @@ from onyx.auth.schemas import UserRole
from onyx.configs.app_configs import REDIS_DB_NUMBER
from onyx.configs.app_configs import REDIS_HOST
from onyx.configs.app_configs import REDIS_PORT
from onyx.db.enums import AccountType
from onyx.server.settings.models import ApplicationStatus
from tests.integration.common_utils.constants import ADMIN_USER_NAME
from tests.integration.common_utils.constants import GENERAL_HEADERS
from tests.integration.common_utils.managers.scim_client import ScimClient
from tests.integration.common_utils.managers.scim_token import ScimTokenManager
from tests.integration.common_utils.managers.user import build_email
from tests.integration.common_utils.managers.user import DEFAULT_PASSWORD
from tests.integration.common_utils.managers.user import UserManager
from tests.integration.common_utils.test_models import DATestUser
SCIM_USER_SCHEMA = "urn:ietf:params:scim:schemas:core:2.0:User"
@@ -211,6 +218,49 @@ def test_create_user(scim_token: str, idp_style: str) -> None:
_assert_entra_emails(body, email)
def test_create_user_default_group_and_account_type(
scim_token: str, idp_style: str
) -> None:
"""SCIM-provisioned users get Basic default group and STANDARD account_type."""
email = f"scim_defaults_{idp_style}@example.com"
ext_id = f"ext-defaults-{idp_style}"
resp = _create_scim_user(scim_token, email, ext_id, idp_style)
assert resp.status_code == 201
user_id = resp.json()["id"]
# --- Verify group assignment via SCIM GET ---
get_resp = ScimClient.get(f"/Users/{user_id}", scim_token)
assert get_resp.status_code == 200
groups = get_resp.json().get("groups", [])
group_names = {g["display"] for g in groups}
assert "Basic" in group_names, f"Expected 'Basic' in groups, got {group_names}"
assert "Admin" not in group_names, "SCIM user should not be in Admin group"
# --- Verify account_type via admin API ---
admin = UserManager.login_as_user(
DATestUser(
id="",
email=build_email(ADMIN_USER_NAME),
password=DEFAULT_PASSWORD,
headers=GENERAL_HEADERS,
role=UserRole.ADMIN,
is_active=True,
)
)
page = UserManager.get_user_page(
user_performing_action=admin,
search_query=email,
)
assert page.total_items >= 1
scim_user_snapshot = next((u for u in page.items if u.email == email), None)
assert (
scim_user_snapshot is not None
), f"SCIM user {email} not found in user listing"
assert (
scim_user_snapshot.account_type == AccountType.STANDARD
), f"Expected STANDARD, got {scim_user_snapshot.account_type}"
def test_get_user(scim_token: str, idp_style: str) -> None:
"""GET /Users/{id} returns the user resource with all stored fields."""
email = f"scim_get_{idp_style}@example.com"

View File

@@ -0,0 +1,78 @@
"""Integration tests for default group assignment on user registration.
Verifies that:
- The first registered user is assigned to the Admin default group
- Subsequent registered users are assigned to the Basic default group
- account_type is set to STANDARD for email/password registrations
"""
from onyx.auth.schemas import UserRole
from onyx.db.enums import AccountType
from tests.integration.common_utils.managers.user import UserManager
from tests.integration.common_utils.managers.user_group import UserGroupManager
from tests.integration.common_utils.test_models import DATestUser
def test_default_group_assignment_on_registration(reset: None) -> None: # noqa: ARG001
# Register first user — should become admin
admin_user: DATestUser = UserManager.create(name="first_user")
assert admin_user.role == UserRole.ADMIN
# Register second user — should become basic
basic_user: DATestUser = UserManager.create(name="second_user")
assert basic_user.role == UserRole.BASIC
# Fetch all groups including default ones
all_groups = UserGroupManager.get_all(
user_performing_action=admin_user,
include_default=True,
)
# Find the default Admin and Basic groups
admin_group = next(
(g for g in all_groups if g.name == "Admin" and g.is_default), None
)
basic_group = next(
(g for g in all_groups if g.name == "Basic" and g.is_default), None
)
assert admin_group is not None, "Admin default group not found"
assert basic_group is not None, "Basic default group not found"
# Verify admin user is in Admin group and NOT in Basic group
admin_group_user_ids = {str(u.id) for u in admin_group.users}
basic_group_user_ids = {str(u.id) for u in basic_group.users}
assert (
admin_user.id in admin_group_user_ids
), "First user should be in Admin default group"
assert (
admin_user.id not in basic_group_user_ids
), "First user should NOT be in Basic default group"
# Verify basic user is in Basic group and NOT in Admin group
assert (
basic_user.id in basic_group_user_ids
), "Second user should be in Basic default group"
assert (
basic_user.id not in admin_group_user_ids
), "Second user should NOT be in Admin default group"
# Verify account_type is STANDARD for both users via user listing API
paginated_result = UserManager.get_user_page(
user_performing_action=admin_user,
page_num=0,
page_size=10,
)
users_by_id = {str(u.id): u for u in paginated_result.items}
admin_snapshot = users_by_id.get(admin_user.id)
basic_snapshot = users_by_id.get(basic_user.id)
assert admin_snapshot is not None, "Admin user not found in user listing"
assert basic_snapshot is not None, "Basic user not found in user listing"
assert (
admin_snapshot.account_type == AccountType.STANDARD
), f"Admin user account_type should be STANDARD, got {admin_snapshot.account_type}"
assert (
basic_snapshot.account_type == AccountType.STANDARD
), f"Basic user account_type should be STANDARD, got {basic_snapshot.account_type}"

View File

@@ -0,0 +1,29 @@
"""
Unit tests for UserCreate schema dict methods.
Verifies that account_type is always included in create_update_dict
and create_update_dict_superuser.
"""
from onyx.auth.schemas import UserCreate
from onyx.db.enums import AccountType
def test_create_update_dict_includes_default_account_type() -> None:
uc = UserCreate(email="a@b.com", password="secret123")
d = uc.create_update_dict()
assert d["account_type"] == AccountType.STANDARD
def test_create_update_dict_includes_explicit_account_type() -> None:
uc = UserCreate(
email="a@b.com", password="secret123", account_type=AccountType.SERVICE_ACCOUNT
)
d = uc.create_update_dict()
assert d["account_type"] == AccountType.SERVICE_ACCOUNT
def test_create_update_dict_superuser_includes_account_type() -> None:
uc = UserCreate(email="a@b.com", password="secret123")
d = uc.create_update_dict_superuser()
assert d["account_type"] == AccountType.STANDARD

View File

@@ -1,87 +0,0 @@
"""Tests for celery_get_broker_client singleton."""
from collections.abc import Iterator
from unittest.mock import MagicMock
from unittest.mock import patch
import pytest
from onyx.background.celery import celery_redis
@pytest.fixture(autouse=True)
def reset_singleton() -> Iterator[None]:
"""Reset the module-level singleton between tests."""
celery_redis._broker_client = None
celery_redis._broker_url = None
yield
celery_redis._broker_client = None
celery_redis._broker_url = None
def _make_mock_app(broker_url: str = "redis://localhost:6379/15") -> MagicMock:
app = MagicMock()
app.conf.broker_url = broker_url
return app
class TestCeleryGetBrokerClient:
@patch("onyx.background.celery.celery_redis.Redis")
def test_creates_client_on_first_call(self, mock_redis_cls: MagicMock) -> None:
mock_client = MagicMock()
mock_redis_cls.from_url.return_value = mock_client
app = _make_mock_app()
result = celery_redis.celery_get_broker_client(app)
assert result is mock_client
call_args = mock_redis_cls.from_url.call_args
assert call_args[0][0] == "redis://localhost:6379/15"
assert call_args[1]["decode_responses"] is False
assert call_args[1]["socket_keepalive"] is True
assert call_args[1]["retry_on_timeout"] is True
@patch("onyx.background.celery.celery_redis.Redis")
def test_reuses_cached_client(self, mock_redis_cls: MagicMock) -> None:
mock_client = MagicMock()
mock_client.ping.return_value = True
mock_redis_cls.from_url.return_value = mock_client
app = _make_mock_app()
client1 = celery_redis.celery_get_broker_client(app)
client2 = celery_redis.celery_get_broker_client(app)
assert client1 is client2
# from_url called only once
assert mock_redis_cls.from_url.call_count == 1
@patch("onyx.background.celery.celery_redis.Redis")
def test_reconnects_on_ping_failure(self, mock_redis_cls: MagicMock) -> None:
stale_client = MagicMock()
stale_client.ping.side_effect = ConnectionError("disconnected")
fresh_client = MagicMock()
fresh_client.ping.return_value = True
mock_redis_cls.from_url.side_effect = [stale_client, fresh_client]
app = _make_mock_app()
# First call creates stale_client
client1 = celery_redis.celery_get_broker_client(app)
assert client1 is stale_client
# Second call: ping fails, creates fresh_client
client2 = celery_redis.celery_get_broker_client(app)
assert client2 is fresh_client
assert mock_redis_cls.from_url.call_count == 2
@patch("onyx.background.celery.celery_redis.Redis")
def test_uses_broker_url_from_app_config(self, mock_redis_cls: MagicMock) -> None:
mock_redis_cls.from_url.return_value = MagicMock()
app = _make_mock_app("redis://custom-host:6380/3")
celery_redis.celery_get_broker_client(app)
call_args = mock_redis_cls.from_url.call_args
assert call_args[0][0] == "redis://custom-host:6380/3"

View File

@@ -0,0 +1,176 @@
"""
Unit tests for assign_user_to_default_groups__no_commit in onyx.db.users.
Covers:
1. Standard/service-account users get assigned to the correct default group
2. BOT, EXT_PERM_USER, ANONYMOUS account types are skipped
3. Missing default group raises RuntimeError
4. Already-in-group is a no-op
5. IntegrityError race condition is handled gracefully
6. The function never commits the session
"""
from unittest.mock import MagicMock
from uuid import uuid4
import pytest
from sqlalchemy.exc import IntegrityError
from onyx.db.enums import AccountType
from onyx.db.models import User__UserGroup
from onyx.db.models import UserGroup
from onyx.db.users import assign_user_to_default_groups__no_commit
def _mock_user(
account_type: AccountType = AccountType.STANDARD,
email: str = "test@example.com",
) -> MagicMock:
user = MagicMock()
user.id = uuid4()
user.email = email
user.account_type = account_type
return user
def _mock_group(name: str = "Basic", group_id: int = 1) -> MagicMock:
group = MagicMock()
group.id = group_id
group.name = name
group.is_default = True
return group
def _make_query_chain(first_return: object = None) -> MagicMock:
"""Returns a mock that supports .filter(...).filter(...).first() chaining."""
chain = MagicMock()
chain.filter.return_value = chain
chain.first.return_value = first_return
return chain
def _setup_db_session(
group_result: object = None,
membership_result: object = None,
) -> MagicMock:
"""Create a db_session mock that routes query(UserGroup) and query(User__UserGroup)."""
db_session = MagicMock()
group_chain = _make_query_chain(group_result)
membership_chain = _make_query_chain(membership_result)
def query_side_effect(model: type) -> MagicMock:
if model is UserGroup:
return group_chain
if model is User__UserGroup:
return membership_chain
return MagicMock()
db_session.query.side_effect = query_side_effect
return db_session
def test_standard_user_assigned_to_basic_group() -> None:
group = _mock_group("Basic")
db_session = _setup_db_session(group_result=group, membership_result=None)
savepoint = MagicMock()
db_session.begin_nested.return_value = savepoint
user = _mock_user(AccountType.STANDARD)
assign_user_to_default_groups__no_commit(db_session, user, is_admin=False)
db_session.add.assert_called_once()
added = db_session.add.call_args[0][0]
assert isinstance(added, User__UserGroup)
assert added.user_id == user.id
assert added.user_group_id == group.id
db_session.flush.assert_called_once()
def test_admin_user_assigned_to_admin_group() -> None:
group = _mock_group("Admin", group_id=2)
db_session = _setup_db_session(group_result=group, membership_result=None)
savepoint = MagicMock()
db_session.begin_nested.return_value = savepoint
user = _mock_user(AccountType.STANDARD)
assign_user_to_default_groups__no_commit(db_session, user, is_admin=True)
db_session.add.assert_called_once()
added = db_session.add.call_args[0][0]
assert isinstance(added, User__UserGroup)
assert added.user_group_id == group.id
@pytest.mark.parametrize(
"account_type",
[AccountType.BOT, AccountType.EXT_PERM_USER, AccountType.ANONYMOUS],
)
def test_excluded_account_types_skipped(account_type: AccountType) -> None:
db_session = MagicMock()
user = _mock_user(account_type)
assign_user_to_default_groups__no_commit(db_session, user)
db_session.query.assert_not_called()
db_session.add.assert_not_called()
def test_service_account_not_skipped() -> None:
group = _mock_group("Basic")
db_session = _setup_db_session(group_result=group, membership_result=None)
savepoint = MagicMock()
db_session.begin_nested.return_value = savepoint
user = _mock_user(AccountType.SERVICE_ACCOUNT)
assign_user_to_default_groups__no_commit(db_session, user, is_admin=False)
db_session.add.assert_called_once()
def test_missing_default_group_raises_error() -> None:
db_session = _setup_db_session(group_result=None)
user = _mock_user()
with pytest.raises(RuntimeError, match="Default group .* not found"):
assign_user_to_default_groups__no_commit(db_session, user)
def test_already_in_group_is_noop() -> None:
group = _mock_group("Basic")
existing_membership = MagicMock()
db_session = _setup_db_session(
group_result=group, membership_result=existing_membership
)
user = _mock_user()
assign_user_to_default_groups__no_commit(db_session, user)
db_session.add.assert_not_called()
db_session.begin_nested.assert_not_called()
def test_integrity_error_race_condition_handled() -> None:
group = _mock_group("Basic")
db_session = _setup_db_session(group_result=group, membership_result=None)
savepoint = MagicMock()
db_session.begin_nested.return_value = savepoint
db_session.flush.side_effect = IntegrityError(None, None, Exception("duplicate"))
user = _mock_user()
# Should not raise
assign_user_to_default_groups__no_commit(db_session, user)
savepoint.rollback.assert_called_once()
def test_no_commit_called_on_successful_assignment() -> None:
group = _mock_group("Basic")
db_session = _setup_db_session(group_result=group, membership_result=None)
savepoint = MagicMock()
db_session.begin_nested.return_value = savepoint
user = _mock_user()
assign_user_to_default_groups__no_commit(db_session, user)
db_session.commit.assert_not_called()

View File

@@ -3,6 +3,7 @@ from unittest.mock import MagicMock
from uuid import uuid4
from onyx.auth.schemas import UserRole
from onyx.db.enums import AccountType
from onyx.server.models import FullUserSnapshot
from onyx.server.models import UserGroupInfo
@@ -25,6 +26,7 @@ def _mock_user(
user.updated_at = updated_at or datetime.datetime(
2025, 6, 15, tzinfo=datetime.timezone.utc
)
user.account_type = AccountType.STANDARD
return user

View File

@@ -1,6 +1,5 @@
"""Tests for indexing pipeline Prometheus collectors."""
from collections.abc import Iterator
from datetime import datetime
from datetime import timedelta
from datetime import timezone
@@ -14,16 +13,6 @@ from onyx.server.metrics.indexing_pipeline import IndexAttemptCollector
from onyx.server.metrics.indexing_pipeline import QueueDepthCollector
@pytest.fixture(autouse=True)
def _mock_broker_client() -> Iterator[None]:
"""Patch celery_get_broker_client for all collector tests."""
with patch(
"onyx.background.celery.celery_redis.celery_get_broker_client",
return_value=MagicMock(),
):
yield
class TestQueueDepthCollector:
def test_returns_empty_when_factory_not_set(self) -> None:
collector = QueueDepthCollector()
@@ -35,7 +24,8 @@ class TestQueueDepthCollector:
def test_collects_queue_depths(self) -> None:
collector = QueueDepthCollector(cache_ttl=0)
collector.set_celery_app(MagicMock())
mock_redis = MagicMock()
collector.set_redis_factory(lambda: mock_redis)
with (
patch(
@@ -70,8 +60,8 @@ class TestQueueDepthCollector:
def test_handles_redis_error_gracefully(self) -> None:
collector = QueueDepthCollector(cache_ttl=0)
MagicMock()
collector.set_celery_app(MagicMock())
mock_redis = MagicMock()
collector.set_redis_factory(lambda: mock_redis)
with patch(
"onyx.server.metrics.indexing_pipeline.celery_get_queue_length",
@@ -84,8 +74,8 @@ class TestQueueDepthCollector:
def test_caching_returns_stale_within_ttl(self) -> None:
collector = QueueDepthCollector(cache_ttl=60)
MagicMock()
collector.set_celery_app(MagicMock())
mock_redis = MagicMock()
collector.set_redis_factory(lambda: mock_redis)
with (
patch(
@@ -108,10 +98,31 @@ class TestQueueDepthCollector:
assert first is second # Same object, from cache
def test_factory_called_each_scrape(self) -> None:
"""Verify the Redis factory is called on each fresh collect, not cached."""
collector = QueueDepthCollector(cache_ttl=0)
factory = MagicMock(return_value=MagicMock())
collector.set_redis_factory(factory)
with (
patch(
"onyx.server.metrics.indexing_pipeline.celery_get_queue_length",
return_value=0,
),
patch(
"onyx.server.metrics.indexing_pipeline.celery_get_unacked_task_ids",
return_value=set(),
),
):
collector.collect()
collector.collect()
assert factory.call_count == 2
def test_error_returns_stale_cache(self) -> None:
collector = QueueDepthCollector(cache_ttl=0)
MagicMock()
collector.set_celery_app(MagicMock())
mock_redis = MagicMock()
collector.set_redis_factory(lambda: mock_redis)
# First call succeeds
with (

View File

@@ -1,22 +1,96 @@
"""Tests for indexing pipeline setup."""
"""Tests for indexing pipeline setup (Redis factory caching)."""
from unittest.mock import MagicMock
from onyx.server.metrics.indexing_pipeline import QueueDepthCollector
from onyx.server.metrics.indexing_pipeline import RedisHealthCollector
from onyx.server.metrics.indexing_pipeline_setup import _make_broker_redis_factory
class TestCollectorCeleryAppSetup:
def test_queue_depth_collector_uses_celery_app(self) -> None:
"""QueueDepthCollector.set_celery_app stores the app for broker access."""
collector = QueueDepthCollector()
mock_app = MagicMock()
collector.set_celery_app(mock_app)
assert collector._celery_app is mock_app
def _make_mock_app(client: MagicMock) -> MagicMock:
"""Create a mock Celery app whose broker_connection().channel().client
returns the given client."""
mock_app = MagicMock()
mock_conn = MagicMock()
mock_conn.channel.return_value.client = client
def test_redis_health_collector_uses_celery_app(self) -> None:
"""RedisHealthCollector.set_celery_app stores the app for broker access."""
collector = RedisHealthCollector()
mock_app = MagicMock()
collector.set_celery_app(mock_app)
assert collector._celery_app is mock_app
mock_app.broker_connection.return_value = mock_conn
return mock_app
class TestMakeBrokerRedisFactory:
def test_caches_redis_client_across_calls(self) -> None:
"""Factory should reuse the same client on subsequent calls."""
mock_client = MagicMock()
mock_client.ping.return_value = True
mock_app = _make_mock_app(mock_client)
factory = _make_broker_redis_factory(mock_app)
client1 = factory()
client2 = factory()
assert client1 is client2
# broker_connection should only be called once
assert mock_app.broker_connection.call_count == 1
def test_reconnects_when_ping_fails(self) -> None:
"""Factory should create a new client if ping fails (stale connection)."""
mock_client_stale = MagicMock()
mock_client_stale.ping.side_effect = ConnectionError("disconnected")
mock_client_fresh = MagicMock()
mock_client_fresh.ping.return_value = True
mock_app = _make_mock_app(mock_client_stale)
factory = _make_broker_redis_factory(mock_app)
# First call — creates and caches
client1 = factory()
assert client1 is mock_client_stale
assert mock_app.broker_connection.call_count == 1
# Switch to fresh client for next connection
mock_conn_fresh = MagicMock()
mock_conn_fresh.channel.return_value.client = mock_client_fresh
mock_app.broker_connection.return_value = mock_conn_fresh
# Second call — ping fails on stale, reconnects
client2 = factory()
assert client2 is mock_client_fresh
assert mock_app.broker_connection.call_count == 2
def test_reconnect_closes_stale_client(self) -> None:
"""When ping fails, the old client should be closed before reconnecting."""
mock_client_stale = MagicMock()
mock_client_stale.ping.side_effect = ConnectionError("disconnected")
mock_client_fresh = MagicMock()
mock_client_fresh.ping.return_value = True
mock_app = _make_mock_app(mock_client_stale)
factory = _make_broker_redis_factory(mock_app)
# First call — creates and caches
factory()
# Switch to fresh client
mock_conn_fresh = MagicMock()
mock_conn_fresh.channel.return_value.client = mock_client_fresh
mock_app.broker_connection.return_value = mock_conn_fresh
# Second call — ping fails, should close stale client
factory()
mock_client_stale.close.assert_called_once()
def test_first_call_creates_connection(self) -> None:
"""First call should always create a new connection."""
mock_client = MagicMock()
mock_app = _make_mock_app(mock_client)
factory = _make_broker_redis_factory(mock_app)
client = factory()
assert client is mock_client
mock_app.broker_connection.assert_called_once()

View File

@@ -73,15 +73,6 @@ interface ContentMdProps {
/** When `true`, the title color hooks into `Interactive`'s `--interactive-foreground` variable. */
withInteractive?: boolean;
/** Optional class name applied to the title element. */
titleClassName?: string;
/** Optional class name applied to the icon element. */
iconClassName?: string;
/** Content rendered below the description, indented to align with it. */
bottomChildren?: React.ReactNode;
/** Ref forwarded to the root `<div>`. */
ref?: React.Ref<HTMLDivElement>;
}
@@ -155,9 +146,6 @@ function ContentMd({
tag,
sizePreset = "main-ui",
withInteractive,
titleClassName,
iconClassName,
bottomChildren,
ref,
}: ContentMdProps) {
const [editing, setEditing] = useState(false);
@@ -196,11 +184,7 @@ function ContentMd({
style={{ minHeight: config.lineHeight }}
>
<Icon
className={cn(
"opal-content-md-icon",
config.iconColorClass,
iconClassName
)}
className={cn("opal-content-md-icon", config.iconColorClass)}
style={{ width: config.iconSize, height: config.iconSize }}
/>
</div>
@@ -243,8 +227,7 @@ function ContentMd({
"opal-content-md-title",
config.titleFont,
"text-text-04",
editable && "cursor-pointer",
titleClassName
editable && "cursor-pointer"
)}
title={toPlainString(title)}
onClick={editable ? startEditing : undefined}
@@ -312,13 +295,6 @@ function ContentMd({
{resolveStr(description)}
</div>
)}
{bottomChildren && (
<div
style={Icon ? { paddingLeft: config.descriptionIndent } : undefined}
>
{bottomChildren}
</div>
)}
</div>
);
}

View File

@@ -138,12 +138,6 @@ type MdContentProps = ContentBaseProps & {
auxIcon?: "info-gray" | "info-blue" | "warning" | "error";
/** Tag rendered beside the title. */
tag?: TagProps;
/** Optional class name applied to the title element. */
titleClassName?: string;
/** Optional class name applied to the icon element. */
iconClassName?: string;
/** Content rendered below the description, indented to align with it. */
bottomChildren?: React.ReactNode;
};
/** ContentSm does not support descriptions or inline editing. */

View File

@@ -2,6 +2,7 @@
import { useState, useMemo, useEffect } from "react";
import useSWR from "swr";
import Text from "@/refresh-components/texts/Text";
import { Select } from "@/refresh-components/cards";
import { useCreateModal } from "@/refresh-components/contexts/ModalContext";
import { toast } from "@/hooks/useToast";
@@ -23,9 +24,8 @@ import { ProviderIcon } from "@/app/admin/configuration/llm/ProviderIcon";
import Message from "@/refresh-components/messages/Message";
import ConfirmationModalLayout from "@/refresh-components/layouts/ConfirmationModalLayout";
import InputSelect from "@/refresh-components/inputs/InputSelect";
import { Button, Text } from "@opal/components";
import { Button } from "@opal/components";
import { SvgSlash, SvgUnplug } from "@opal/icons";
import { markdown } from "@opal/utils";
const NO_DEFAULT_VALUE = "__none__";
@@ -201,10 +201,10 @@ export default function ImageGenerationContent() {
<div className="flex flex-col gap-6">
{/* Section Header */}
<div className="flex flex-col gap-0.5">
<Text font="main-content-emphasis" color="text-05">
<Text mainContentEmphasis text05>
Image Generation Model
</Text>
<Text font="secondary-body" color="text-03">
<Text secondaryBody text03>
Select a model to generate images in chat.
</Text>
</div>
@@ -223,7 +223,7 @@ export default function ImageGenerationContent() {
{/* Provider Groups */}
{IMAGE_PROVIDER_GROUPS.map((group) => (
<div key={group.name} className="flex flex-col gap-2">
<Text font="secondary-body" color="text-03">
<Text secondaryBody text03>
{group.name}
</Text>
<div className="flex flex-col gap-2">
@@ -277,13 +277,12 @@ export default function ImageGenerationContent() {
{needsReplacement ? (
hasReplacements ? (
<Section alignItems="start">
<Text as="p" color="text-03">
{markdown(
`**${disconnectProvider.title}** is currently the default image generation model. Session history will be preserved.`
)}
<Text as="p" text03>
<b>{disconnectProvider.title}</b> is currently the default
image generation model. Session history will be preserved.
</Text>
<Section alignItems="start" gap={0.25}>
<Text as="p" color="text-04">
<Text as="p" text04>
Set New Default
</Text>
<InputSelect
@@ -330,24 +329,22 @@ export default function ImageGenerationContent() {
</Section>
) : (
<>
<Text as="p" color="text-03">
{markdown(
`**${disconnectProvider.title}** is currently the default image generation model.`
)}
<Text as="p" text03>
<b>{disconnectProvider.title}</b> is currently the default
image generation model.
</Text>
<Text as="p" color="text-03">
<Text as="p" text03>
Connect another provider to continue using image generation.
</Text>
</>
)
) : (
<>
<Text as="p" color="text-03">
{markdown(
`**${disconnectProvider.title}** models will no longer be used to generate images.`
)}
<Text as="p" text03>
<b>{disconnectProvider.title}</b> models will no longer be used
to generate images.
</Text>
<Text as="p" color="text-03">
<Text as="p" text03>
Session history will be preserved.
</Text>
</>

View File

@@ -15,7 +15,7 @@ import { Callout } from "@/components/ui/callout";
import { cn } from "@/lib/utils";
import { toast } from "@/hooks/useToast";
import { SvgGlobe, SvgOnyxLogo, SvgSlash, SvgUnplug } from "@opal/icons";
import { Button } from "@opal/components";
import { Button as OpalButton } from "@opal/components";
import { ADMIN_ROUTES } from "@/lib/admin-routes";
import { WebProviderSetupModal } from "@/app/admin/configuration/web-search/WebProviderSetupModal";
import ConfirmationModalLayout from "@/refresh-components/layouts/ConfirmationModalLayout";
@@ -151,7 +151,7 @@ function WebSearchDisconnectModal({
description="This will remove the stored credentials for this provider."
onClose={onClose}
submit={
<Button
<OpalButton
variant="danger"
onClick={onDisconnect}
disabled={
@@ -159,7 +159,7 @@ function WebSearchDisconnectModal({
}
>
Disconnect
</Button>
</OpalButton>
}
>
{needsReplacement ? (

View File

@@ -4,7 +4,7 @@ import { useCallback } from "react";
import useSWR from "swr";
import { errorHandlingFetcher } from "@/lib/fetcher";
import { NEXT_PUBLIC_CLOUD_ENABLED } from "@/lib/constants";
import { UserStatus } from "@/lib/types";
import { AccountType, UserStatus } from "@/lib/types";
import type { UserRole, InvitedUserSnapshot } from "@/lib/types";
import type {
UserRow,
@@ -19,6 +19,7 @@ interface FullUserSnapshot {
id: string;
email: string;
role: UserRole;
account_type: AccountType;
is_active: boolean;
password_configured: boolean;
personal_name: string | null;

View File

@@ -52,6 +52,14 @@ export interface UserPersonalization {
user_preferences: string;
}
export enum AccountType {
STANDARD = "standard",
BOT = "bot",
EXT_PERM_USER = "ext_perm_user",
SERVICE_ACCOUNT = "service_account",
ANONYMOUS = "anonymous",
}
export enum UserRole {
LIMITED = "limited",
BASIC = "basic",
@@ -479,6 +487,7 @@ export interface UserGroup {
personas: Persona[];
is_up_to_date: boolean;
is_up_for_deletion: boolean;
is_default: boolean;
}
export enum ValidSources {

View File

@@ -1,10 +1,8 @@
import type { UserGroup } from "@/lib/types";
/** Groups that are created by the system and cannot be deleted. */
export const BUILT_IN_GROUP_NAMES = ["Basic", "Admin"] as const;
/** Whether this group is a system default group (Admin, Basic). */
export function isBuiltInGroup(group: UserGroup): boolean {
return (BUILT_IN_GROUP_NAMES as readonly string[]).includes(group.name);
return group.is_default;
}
/** Human-readable description for built-in groups. */

View File

@@ -1,420 +0,0 @@
"use client";
import { useState } from "react";
import { toast } from "@/hooks/useToast";
import { Button } from "@opal/components";
import { Disabled } from "@opal/core";
import { cn } from "@/lib/utils";
import { ContentAction } from "@opal/layouts";
import Card from "@/refresh-components/cards/Card";
import Text from "@/refresh-components/texts/Text";
import { Section } from "@/layouts/general-layouts";
import {
SvgCheckCircle,
SvgExternalLink,
SvgPlug,
SvgRefreshCw,
SvgSettings,
SvgTrash,
SvgUnplug,
} from "@opal/icons";
import Modal, { BasicModalFooter } from "@/refresh-components/Modal";
import type {
HookPointMeta,
HookResponse,
} from "@/refresh-pages/admin/HooksPage/interfaces";
import {
activateHook,
deactivateHook,
deleteHook,
validateHook,
} from "@/refresh-pages/admin/HooksPage/svc";
import { getHookPointIcon } from "@/refresh-pages/admin/HooksPage/hookPointIcons";
// ---------------------------------------------------------------------------
// Sub-component: disconnect confirmation modal
// ---------------------------------------------------------------------------
interface DisconnectConfirmModalProps {
open: boolean;
onOpenChange: (open: boolean) => void;
hook: HookResponse;
onDisconnect: () => void;
onDisconnectAndDelete: () => void;
}
function DisconnectConfirmModal({
open,
onOpenChange,
hook,
onDisconnect,
onDisconnectAndDelete,
}: DisconnectConfirmModalProps) {
return (
<Modal open={open} onOpenChange={onOpenChange}>
<Modal.Content width="md" height="fit">
<Modal.Header
icon={(props) => (
<SvgUnplug {...props} className="text-action-danger-05" />
)}
title={`Disconnect ${hook.name}`}
onClose={() => onOpenChange(false)}
/>
<Modal.Body>
<div className="flex flex-col gap-4">
<Text mainUiBody text03>
Onyx will stop calling this endpoint for hook{" "}
<strong>
<em>{hook.name}</em>
</strong>
. In-flight requests will continue to run. The external endpoint
may still retain data previously sent to it. You can reconnect
this hook later if needed.
</Text>
<Text mainUiBody text03>
You can also delete this hook. Deletion cannot be undone.
</Text>
</div>
</Modal.Body>
<Modal.Footer>
<BasicModalFooter
cancel={
<Button
prominence="secondary"
onClick={() => onOpenChange(false)}
>
Cancel
</Button>
}
submit={
<div className="flex items-center gap-2">
<Button
variant="danger"
prominence="secondary"
onClick={onDisconnectAndDelete}
>
Disconnect &amp; Delete
</Button>
<Button
variant="danger"
prominence="primary"
onClick={onDisconnect}
>
Disconnect
</Button>
</div>
}
/>
</Modal.Footer>
</Modal.Content>
</Modal>
);
}
// ---------------------------------------------------------------------------
// Sub-component: delete confirmation modal
// ---------------------------------------------------------------------------
interface DeleteConfirmModalProps {
open: boolean;
onOpenChange: (open: boolean) => void;
hook: HookResponse;
onDelete: () => void;
}
function DeleteConfirmModal({
open,
onOpenChange,
hook,
onDelete,
}: DeleteConfirmModalProps) {
return (
<Modal open={open} onOpenChange={onOpenChange}>
<Modal.Content width="md" height="fit">
<Modal.Header
icon={(props) => (
<SvgTrash {...props} className="text-action-danger-05" />
)}
title={`Delete ${hook.name}`}
onClose={() => onOpenChange(false)}
/>
<Modal.Body>
<div className="flex flex-col gap-4">
<Text mainUiBody text03>
Hook{" "}
<strong>
<em>{hook.name}</em>
</strong>{" "}
will be permanently removed from this hook point. The external
endpoint may still retain data previously sent to it.
</Text>
<Text mainUiBody text03>
Deletion cannot be undone.
</Text>
</div>
</Modal.Body>
<Modal.Footer>
<BasicModalFooter
cancel={
<Button
prominence="secondary"
onClick={() => onOpenChange(false)}
>
Cancel
</Button>
}
submit={
<Button variant="danger" prominence="primary" onClick={onDelete}>
Delete
</Button>
}
/>
</Modal.Footer>
</Modal.Content>
</Modal>
);
}
// ---------------------------------------------------------------------------
// ConnectedHookCard
// ---------------------------------------------------------------------------
export interface ConnectedHookCardProps {
hook: HookResponse;
spec: HookPointMeta | undefined;
onEdit: () => void;
onDeleted: () => void;
onToggled: (updated: HookResponse) => void;
}
export default function ConnectedHookCard({
hook,
spec,
onEdit,
onDeleted,
onToggled,
}: ConnectedHookCardProps) {
const [isBusy, setIsBusy] = useState(false);
const [disconnectConfirmOpen, setDisconnectConfirmOpen] = useState(false);
const [deleteConfirmOpen, setDeleteConfirmOpen] = useState(false);
async function handleDelete() {
setDeleteConfirmOpen(false);
setIsBusy(true);
try {
await deleteHook(hook.id);
onDeleted();
} catch (err) {
console.error("Failed to delete hook:", err);
toast.error(
err instanceof Error ? err.message : "Failed to delete hook."
);
} finally {
setIsBusy(false);
}
}
async function handleActivate() {
setIsBusy(true);
try {
const updated = await activateHook(hook.id);
onToggled(updated);
} catch (err) {
console.error("Failed to reconnect hook:", err);
toast.error(
err instanceof Error ? err.message : "Failed to reconnect hook."
);
} finally {
setIsBusy(false);
}
}
async function handleDeactivate() {
setDisconnectConfirmOpen(false);
setIsBusy(true);
try {
const updated = await deactivateHook(hook.id);
onToggled(updated);
} catch (err) {
console.error("Failed to deactivate hook:", err);
toast.error(
err instanceof Error ? err.message : "Failed to deactivate hook."
);
} finally {
setIsBusy(false);
}
}
async function handleDisconnectAndDelete() {
setDisconnectConfirmOpen(false);
setIsBusy(true);
try {
const deactivated = await deactivateHook(hook.id);
onToggled(deactivated);
await deleteHook(hook.id);
onDeleted();
} catch (err) {
console.error("Failed to disconnect hook:", err);
toast.error(
err instanceof Error ? err.message : "Failed to disconnect hook."
);
} finally {
setIsBusy(false);
}
}
async function handleValidate() {
setIsBusy(true);
try {
const result = await validateHook(hook.id);
if (result.status === "passed") {
toast.success("Hook validated successfully.");
} else {
toast.error(
result.error_message ?? `Validation failed: ${result.status}`
);
}
} catch (err) {
console.error("Failed to validate hook:", err);
toast.error(
err instanceof Error ? err.message : "Failed to validate hook."
);
} finally {
setIsBusy(false);
}
}
const HookIcon = getHookPointIcon(hook.hook_point);
return (
<>
<DisconnectConfirmModal
open={disconnectConfirmOpen}
onOpenChange={setDisconnectConfirmOpen}
hook={hook}
onDisconnect={handleDeactivate}
onDisconnectAndDelete={handleDisconnectAndDelete}
/>
<DeleteConfirmModal
open={deleteConfirmOpen}
onOpenChange={setDeleteConfirmOpen}
hook={hook}
onDelete={handleDelete}
/>
<Card
variant="primary"
padding={0.5}
gap={0}
className={cn(
"hover:border-border-02",
!hook.is_active && "!bg-background-neutral-02"
)}
>
<ContentAction
sizePreset="main-ui"
variant="section"
paddingVariant="sm"
icon={HookIcon}
title={hook.name}
titleClassName={!hook.is_active ? "line-through" : undefined}
iconClassName="text-text-04"
description={`Hook Point: ${spec?.display_name ?? hook.hook_point}`}
bottomChildren={
spec?.docs_url ? (
<a
href={spec.docs_url}
target="_blank"
rel="noopener noreferrer"
className="flex items-center gap-1 w-fit font-secondary-body text-text-03"
>
<span className="underline">Documentation</span>
<SvgExternalLink size={12} className="shrink-0" />
</a>
) : undefined
}
rightChildren={
<Section
flexDirection="column"
alignItems="end"
width="fit"
height="fit"
gap={0}
>
<div className="flex items-center gap-1 p-2">
{hook.is_active ? (
<>
<Text mainUiAction text03>
Connected
</Text>
<SvgCheckCircle
size={16}
className="text-status-success-05"
/>
</>
) : (
<div
className={cn(
"flex items-center gap-1",
isBusy
? "opacity-50 pointer-events-none"
: "cursor-pointer"
)}
onClick={handleActivate}
>
<Text mainUiAction text03>
Reconnect
</Text>
<SvgPlug size={16} className="text-text-03 shrink-0" />
</div>
)}
</div>
<Disabled disabled={isBusy}>
{/* Plain div instead of Section: Section applies style={{ padding }} inline which
overrides Tailwind padding classes, making per-side padding (pl/pr/pb) ineffective. */}
<div className="flex items-center gap-0.5 pl-1 pr-1 pb-1">
{hook.is_active ? (
<>
<Button
prominence="tertiary"
size="sm"
icon={SvgUnplug}
onClick={() => setDisconnectConfirmOpen(true)}
tooltip="Disconnect Hook"
aria-label="Deactivate hook"
/>
<Button
prominence="tertiary"
size="sm"
icon={SvgRefreshCw}
onClick={handleValidate}
tooltip="Test Connection"
aria-label="Re-validate hook"
/>
</>
) : (
<Button
prominence="tertiary"
size="sm"
icon={SvgTrash}
onClick={() => setDeleteConfirmOpen(true)}
tooltip="Delete"
aria-label="Delete hook"
/>
)}
<Button
prominence="tertiary"
size="sm"
icon={SvgSettings}
onClick={onEdit}
tooltip="Manage"
aria-label="Configure hook"
/>
</div>
</Disabled>
</Section>
}
/>
</Card>
</>
);
}

View File

@@ -1,211 +1,117 @@
"use client";
import { useState } from "react";
import { useState, useEffect } from "react";
import { toast } from "@/hooks/useToast";
import { useHookSpecs } from "@/hooks/useHookSpecs";
import { useHooks } from "@/hooks/useHooks";
import SimpleLoader from "@/refresh-components/loaders/SimpleLoader";
import { Button } from "@opal/components";
import { ContentAction } from "@opal/layouts";
import { Button } from "@opal/components";
import InputSearch from "@/refresh-components/inputs/InputSearch";
import Card from "@/refresh-components/cards/Card";
import Text from "@/refresh-components/texts/Text";
import { SvgArrowExchange, SvgExternalLink } from "@opal/icons";
import HookFormModal from "@/refresh-pages/admin/HooksPage/HookFormModal";
import ConnectedHookCard from "@/refresh-pages/admin/HooksPage/ConnectedHookCard";
import { getHookPointIcon } from "@/refresh-pages/admin/HooksPage/hookPointIcons";
import type {
HookPointMeta,
HookResponse,
} from "@/refresh-pages/admin/HooksPage/interfaces";
import {
SvgArrowExchange,
SvgBubbleText,
SvgExternalLink,
SvgFileBroadcast,
SvgHookNodes,
} from "@opal/icons";
import { IconFunctionComponent } from "@opal/types";
// ---------------------------------------------------------------------------
// Main component
// ---------------------------------------------------------------------------
const HOOK_POINT_ICONS: Record<string, IconFunctionComponent> = {
document_ingestion: SvgFileBroadcast,
query_processing: SvgBubbleText,
};
function getHookPointIcon(hookPoint: string): IconFunctionComponent {
return HOOK_POINT_ICONS[hookPoint] ?? SvgHookNodes;
}
export default function HooksContent() {
const [search, setSearch] = useState("");
const [connectSpec, setConnectSpec] = useState<HookPointMeta | null>(null);
const [editHook, setEditHook] = useState<HookResponse | null>(null);
const { specs, isLoading: specsLoading, error: specsError } = useHookSpecs();
const {
hooks,
isLoading: hooksLoading,
error: hooksError,
mutate,
} = useHooks();
const { specs, isLoading, error } = useHookSpecs();
if (specsLoading || hooksLoading) {
useEffect(() => {
if (error) {
toast.error("Failed to load hook specifications.");
}
}, [error]);
if (isLoading) {
return <SimpleLoader />;
}
if (specsError || hooksError) {
if (error) {
return (
<Text text03 secondaryBody>
Failed to load{specsError ? " hook specifications" : " hooks"}. Please
refresh the page.
Failed to load hook specifications. Please refresh the page.
</Text>
);
}
const hooksByPoint: Record<string, HookResponse[]> = {};
for (const hook of hooks ?? []) {
(hooksByPoint[hook.hook_point] ??= []).push(hook);
}
const searchLower = search.toLowerCase();
// Connected hooks sorted alphabetically by hook name
const connectedHooks = (hooks ?? [])
.filter(
(hook) =>
!searchLower ||
hook.name.toLowerCase().includes(searchLower) ||
specs
?.find((s) => s.hook_point === hook.hook_point)
?.display_name.toLowerCase()
.includes(searchLower)
)
.sort((a, b) => a.name.localeCompare(b.name));
// Unconnected hook point specs sorted alphabetically
const unconnectedSpecs = (specs ?? [])
.filter(
(spec) =>
(hooksByPoint[spec.hook_point]?.length ?? 0) === 0 &&
(!searchLower ||
spec.display_name.toLowerCase().includes(searchLower) ||
spec.description.toLowerCase().includes(searchLower))
)
.sort((a, b) => a.display_name.localeCompare(b.display_name));
function handleHookSuccess(updated: HookResponse) {
mutate((prev) => {
if (!prev) return [updated];
const idx = prev.findIndex((h) => h.id === updated.id);
if (idx >= 0) {
const next = [...prev];
next[idx] = updated;
return next;
}
return [...prev, updated];
});
}
function handleHookDeleted(id: number) {
mutate((prev) => prev?.filter((h) => h.id !== id));
}
const connectSpec_ =
connectSpec ??
(editHook
? specs?.find((s) => s.hook_point === editHook.hook_point)
: undefined);
const filtered = (specs ?? []).filter(
(spec) =>
spec.display_name.toLowerCase().includes(search.toLowerCase()) ||
spec.description.toLowerCase().includes(search.toLowerCase())
);
return (
<>
<div className="flex flex-col gap-6">
<InputSearch
placeholder="Search hooks..."
value={search}
onChange={(e) => setSearch(e.target.value)}
/>
<div className="flex flex-col gap-6">
<InputSearch
placeholder="Search hooks..."
value={search}
onChange={(e) => setSearch(e.target.value)}
/>
<div className="flex flex-col gap-2">
{connectedHooks.length === 0 && unconnectedSpecs.length === 0 ? (
<Text text03 secondaryBody>
{search
? "No hooks match your search."
: "No hook points are available."}
</Text>
) : (
<>
{connectedHooks.map((hook) => {
const spec = specs?.find(
(s) => s.hook_point === hook.hook_point
);
return (
<ConnectedHookCard
key={hook.id}
hook={hook}
spec={spec}
onEdit={() => setEditHook(hook)}
onDeleted={() => handleHookDeleted(hook.id)}
onToggled={handleHookSuccess}
/>
);
})}
{unconnectedSpecs.map((spec) => {
const UnconnectedIcon = getHookPointIcon(spec.hook_point);
return (
<Card
key={spec.hook_point}
variant="secondary"
padding={0.5}
gap={0}
className="hover:border-border-02"
<div className="flex flex-col gap-2">
{filtered.length === 0 ? (
<Text text03 secondaryBody>
{search
? "No hooks match your search."
: "No hook points are available."}
</Text>
) : (
filtered.map((spec) => (
<Card
key={spec.hook_point}
variant="secondary"
padding={0.5}
gap={0}
>
<ContentAction
icon={getHookPointIcon(spec.hook_point)}
title={spec.display_name}
description={spec.description}
sizePreset="main-content"
variant="section"
paddingVariant="fit"
rightChildren={
// TODO(Bo-Onyx): wire up Connect — open modal to create/edit hook
<Button prominence="tertiary" rightIcon={SvgArrowExchange}>
Connect
</Button>
}
/>
{spec.docs_url && (
<div className="pl-7 pt-1">
<a
href={spec.docs_url}
target="_blank"
rel="noopener noreferrer"
className="flex items-center gap-1 w-fit text-text-03"
>
<ContentAction
sizePreset="main-ui"
variant="section"
paddingVariant="sm"
icon={UnconnectedIcon}
title={spec.display_name}
iconClassName="text-text-04"
description={spec.description}
bottomChildren={
spec.docs_url ? (
<a
href={spec.docs_url}
target="_blank"
rel="noopener noreferrer"
className="flex items-center gap-1 w-fit font-secondary-body text-text-03"
>
<span className="underline">Documentation</span>
<SvgExternalLink size={12} className="shrink-0" />
</a>
) : undefined
}
rightChildren={
<Button
prominence="tertiary"
rightIcon={SvgArrowExchange}
onClick={() => setConnectSpec(spec)}
>
Connect
</Button>
}
/>
</Card>
);
})}
</>
)}
</div>
<Text as="span" secondaryBody text03 className="underline">
Documentation
</Text>
<SvgExternalLink size={16} className="text-text-02" />
</a>
</div>
)}
</Card>
))
)}
</div>
{/* Create modal */}
<HookFormModal
key={connectSpec?.hook_point ?? "create"}
open={!!connectSpec}
onOpenChange={(open) => {
if (!open) setConnectSpec(null);
}}
spec={connectSpec ?? undefined}
onSuccess={handleHookSuccess}
/>
{/* Edit modal */}
<HookFormModal
key={editHook?.id ?? "edit"}
open={!!editHook}
onOpenChange={(open) => {
if (!open) setEditHook(null);
}}
hook={editHook ?? undefined}
spec={connectSpec_ ?? undefined}
onSuccess={handleHookSuccess}
/>
</>
</div>
);
}

View File

@@ -1,13 +0,0 @@
import { SvgBubbleText, SvgFileBroadcast, SvgHookNodes } from "@opal/icons";
import type { IconFunctionComponent } from "@opal/types";
const HOOK_POINT_ICONS: Record<string, IconFunctionComponent> = {
document_ingestion: SvgFileBroadcast,
query_processing: SvgBubbleText,
};
function getHookPointIcon(hookPoint: string): IconFunctionComponent {
return HOOK_POINT_ICONS[hookPoint] ?? SvgHookNodes;
}
export { HOOK_POINT_ICONS, getHookPointIcon };

View File

@@ -7,6 +7,7 @@ import {
IconProps,
OpenAIIcon,
} from "@/components/icons/icons";
import Text from "@/refresh-components/texts/Text";
import { Select } from "@/refresh-components/cards";
import Message from "@/refresh-components/messages/Message";
import * as SettingsLayouts from "@/layouts/settings-layouts";
@@ -25,8 +26,7 @@ import { toast } from "@/hooks/useToast";
import { Callout } from "@/components/ui/callout";
import { Content } from "@opal/layouts";
import { SvgMicrophone, SvgSlash, SvgUnplug } from "@opal/icons";
import { Button, Text } from "@opal/components";
import { markdown } from "@opal/utils";
import { Button as OpalButton } from "@opal/components";
import ConfirmationModalLayout from "@/refresh-components/layouts/ConfirmationModalLayout";
import { Section } from "@/layouts/general-layouts";
import { ADMIN_ROUTES } from "@/lib/admin-routes";
@@ -205,7 +205,7 @@ function VoiceDisconnectModal({
description="Voice models"
onClose={onClose}
submit={
<Button
<OpalButton
variant="danger"
onClick={onDisconnect}
disabled={
@@ -213,19 +213,19 @@ function VoiceDisconnectModal({
}
>
Disconnect
</Button>
</OpalButton>
}
>
{needsReplacement ? (
hasReplacements ? (
<Section alignItems="start">
<Text as="p" color="text-03">
{markdown(
`**${disconnectTarget.providerLabel}** models will no longer be used for speech-to-text or text-to-speech, and it will no longer be your default. Session history will be preserved.`
)}
<Text as="p" text03>
<b>{disconnectTarget.providerLabel}</b> models will no longer be
used for speech-to-text or text-to-speech, and it will no longer
be your default. Session history will be preserved.
</Text>
<Section alignItems="start" gap={0.25}>
<Text as="p" color="text-04">
<Text as="p" text04>
Set New Default
</Text>
<InputSelect
@@ -256,24 +256,23 @@ function VoiceDisconnectModal({
</Section>
) : (
<>
<Text as="p" color="text-03">
{markdown(
`**${disconnectTarget.providerLabel}** models will no longer be used for speech-to-text or text-to-speech, and it will no longer be your default.`
)}
<Text as="p" text03>
<b>{disconnectTarget.providerLabel}</b> models will no longer be
used for speech-to-text or text-to-speech, and it will no longer
be your default.
</Text>
<Text as="p" color="text-03">
<Text as="p" text03>
Connect another provider to continue using voice.
</Text>
</>
)
) : (
<>
<Text as="p" color="text-03">
{markdown(
`**${disconnectTarget.providerLabel}** models will no longer be available for voice.`
)}
<Text as="p" text03>
<b>{disconnectTarget.providerLabel}</b> models will no longer be
available for voice.
</Text>
<Text as="p" color="text-03">
<Text as="p" text03>
Session history will be preserved.
</Text>
</>
@@ -537,7 +536,7 @@ export default function VoiceConfigurationPage() {
<Callout type="danger" title="Failed to load voice settings">
{message}
{detail && (
<Text as="p" font="main-content-body" color="text-03">
<Text as="p" mainContentBody text03>
{detail}
</Text>
)}
@@ -627,7 +626,7 @@ export default function VoiceConfigurationPage() {
{TTS_PROVIDER_GROUPS.map((group) => (
<div key={group.providerType} className="flex flex-col gap-2">
<Text font="secondary-body" color="text-03">
<Text secondaryBody text03>
{group.providerLabel}
</Text>
<div className="flex flex-col gap-2">

View File

@@ -53,19 +53,18 @@ test.describe("Groups page — layout", () => {
test.beforeAll(async ({ browser }) => {
await withApiContext(browser, async (api) => {
adminGroupId = await api.createUserGroup("Admin");
basicGroupId = await api.createUserGroup("Basic");
await api.waitForGroupSync(adminGroupId);
await api.waitForGroupSync(basicGroupId);
const groups = await api.getUserGroups();
const adminGroup = groups.find((g) => g.name === "Admin" && g.is_default);
const basicGroup = groups.find((g) => g.name === "Basic" && g.is_default);
if (!adminGroup || !basicGroup) {
throw new Error("Default Admin/Basic groups not found");
}
adminGroupId = adminGroup.id;
basicGroupId = basicGroup.id;
});
});
test.afterAll(async ({ browser }) => {
await withApiContext(browser, async (api) => {
await softCleanup(() => api.deleteUserGroup(adminGroupId));
await softCleanup(() => api.deleteUserGroup(basicGroupId));
});
});
// No afterAll — these are built-in default groups and must not be deleted
test("renders page title, search, and new group button", async ({
groupsPage,
@@ -77,7 +76,8 @@ test.describe("Groups page — layout", () => {
await expect(groupsPage.newGroupButton).toBeVisible();
});
test("shows built-in groups (Admin, Basic)", async ({ groupsPage }) => {
test.skip("shows built-in groups (Admin, Basic)", async ({ groupsPage }) => {
// TODO: Enable once default groups are shown via include_default=true
await groupsPage.goto();
await groupsPage.expectGroupVisible("Admin");

View File

@@ -4,14 +4,6 @@ import { expectScreenshot } from "@tests/e2e/utils/visualRegression";
test.use({ storageState: "admin_auth.json" });
/** Maps each settings slug to the header title shown on that page. */
const SLUG_TO_HEADER: Record<string, string> = {
general: "Profile",
"chat-preferences": "Chats",
"accounts-access": "Accounts",
connectors: "Connectors",
};
for (const theme of THEMES) {
test.describe(`Settings pages (${theme} mode)`, () => {
test.beforeEach(async ({ page }) => {
@@ -19,33 +11,21 @@ for (const theme of THEMES) {
});
test("should screenshot each settings tab", async ({ page }) => {
await page.goto("/app/settings/general");
await page
.getByTestId("settings-left-tab-navigation")
.waitFor({ state: "visible" });
await page.goto("/app/settings");
await page.waitForLoadState("networkidle");
const nav = page.getByTestId("settings-left-tab-navigation");
const tabs = nav.locator("a");
await expect(tabs.first()).toBeVisible({ timeout: 10_000 });
const count = await tabs.count();
expect(count).toBeGreaterThan(0);
for (let i = 0; i < count; i++) {
const tab = tabs.nth(i);
const href = await tab.getAttribute("href");
const slug = href ? href.replace("/app/settings/", "") : `tab-${i}`;
await tab.click();
const expectedHeader = SLUG_TO_HEADER[slug];
if (expectedHeader) {
await expect(
page
.locator(".opal-content-md-header")
.filter({ hasText: expectedHeader })
).toBeVisible({ timeout: 10_000 });
} else {
await page.waitForLoadState("networkidle");
}
await page.waitForLoadState("networkidle");
await expectScreenshot(page, {
name: `settings-${theme}-${slug}`,

View File

@@ -632,6 +632,18 @@ export class OnyxApiClient {
this.log(`Deleted user group: ${groupId}`);
}
/**
* Lists all user groups.
*/
async getUserGroups(): Promise<
Array<{ id: number; name: string; is_default: boolean }>
> {
const response = await this.get(
"/manage/admin/user-group?include_default=true"
);
return response.json();
}
async setUserRole(
email: string,
role: "admin" | "curator" | "global_curator" | "basic",