Compare commits

..

21 Commits

Author SHA1 Message Date
Raunak Bhagat
0807b8cff4 refactor: clean up Footer pagination usage
- Use singular/plural "item"/"items" for count variant units
- Limit pagination sizes to md/lg only
- Remove unused size prop from FooterSelectionModeProps
- Remove unused TableSize type import
2026-03-16 15:55:53 -07:00
Raunak Bhagat
886ae70fad chore: init refactor/table-footer branch 2026-03-16 15:55:04 -07:00
Raunak Bhagat
5d2dd6e7b8 refactor: rename showPages to hidePages (inverted default)
Users must now explicitly opt out of showing pages with hidePages,
rather than opt in with showPages. Default behavior unchanged.
2026-03-16 15:54:53 -07:00
Raunak Bhagat
bcd64cc2f6 fix: use sizeVariants.lg.height for go-to-page input 2026-03-16 15:39:18 -07:00
Raunak Bhagat
f1eeb9e372 refactor: unify pagination callbacks to onChange
Consolidate onArrowClick and onPageClick into a single onChange
callback across all variants. Fix React.ReactNode import in Footer.
2026-03-16 15:33:17 -07:00
Raunak Bhagat
aed3db2322 fix: remove unused ELLIPSIS_SIZE constant 2026-03-16 15:14:14 -07:00
Raunak Bhagat
7fe753c0ce fix: remove stale goto prop from README documentation 2026-03-16 15:02:50 -07:00
Raunak Bhagat
375da7aaa6 fix: make go-to-page popover size-independent and polish styling
- Remove size prop from GoToPagePopup so it renders consistently
- Fixed input height (36px), width (7rem), rounded-08, font-main-ui-body
- Popover uses rounded-12, p-1, gap-1
- Submit button always renders at lg size
2026-03-16 14:58:45 -07:00
Raunak Bhagat
d3b46f7d9b feat: add go-to-page popup to all Pagination variants
Replace the old `goto` callback prop with an inline popover that lets
users type a page number and jump directly to it. The popup is
activated by clicking the page indicator (simple/count) or the
ellipsis button (list). Input is number-only and the submit button
is disabled when the value is out of range.
2026-03-16 14:43:07 -07:00
Raunak Bhagat
5ba61c7331 fix: clamp currentPage upper bound to totalPages 2026-03-16 13:55:57 -07:00
Raunak Bhagat
2be27028a4 Fix spacing 2026-03-16 13:53:40 -07:00
Raunak Bhagat
ab1fc67204 fix: address PR review comments
- Use explicit named imports from react instead of React namespace
- Clamp nav button page values to prevent out-of-bounds
- Replace inline marginLeft styles with Tailwind ml-1
- Normalize currentPage alongside totalPages
- Use gap-1 token instead of gap-[4px] in PaginationCount
2026-03-16 13:34:16 -07:00
Raunak Bhagat
640590cfbf fix: remove unnecessary cn wrapper and unused size prop 2026-03-16 13:29:51 -07:00
Raunak Bhagat
0ff2fd4bca fix: remove conditional gap sizing in PaginationSimple 2026-03-16 13:24:36 -07:00
Raunak Bhagat
994ab1b4b7 refactor: rename showSummary to showPages in Pagination 2026-03-16 13:12:15 -07:00
Raunak Bhagat
50d821f233 fix: address PR review comments
- Combine duplicate @opal/components imports in Footer.tsx
- Add onPageClick to List story args to prevent throw on click
- Fix README size default from "varies" to "lg"
2026-03-16 13:09:49 -07:00
Raunak Bhagat
8d51546361 fix: set fixed width for count variant page number
Page number between arrows in variant="count" is now 28px for lg/md
and 20px for sm, ensuring consistent layout.
2026-03-16 13:03:36 -07:00
Raunak Bhagat
108652e86c fix: pagination list ellipsis threshold and slot width
Change ellipsis threshold to >7 pages (was >5). Always render exactly
7 slots when truncating for constant component width. Size ellipsis
slots to match icon-only Button dimensions (36/28/24px).
2026-03-16 12:51:59 -07:00
Raunak Bhagat
cdcb77d146 refactor: simplify Pagination list variant API
Replace onChange/onPageClick split with a single onPageClick callback
for the list variant. Remove PaginationBase (no longer shared). Default
size is now "lg" for all variants. Update all consumers, stories,
README, and JSDoc.
2026-03-16 12:34:09 -07:00
Raunak Bhagat
fd4202c5fd refactor: update Pagination count variant API and fix spacing
Update variant="count" to use onArrowClick, showSummary, units, and
goto props (matching simple variant pattern). Buttons section (arrows +
page number) renders with no internal gap. Fixed 4px gap between
summary, buttons, and goto sections. Update stories, README, and
JSDoc to reflect current API for all variants.
2026-03-16 12:28:16 -07:00
Raunak Bhagat
9f4d60090d refactor: move Pagination to opal with three-variant API
Move Pagination from refresh-components to @opal/components with a
discriminated union API: variant="simple" (arrows + summary),
variant="count" (range display), and variant="list" (numbered pages,
default). Inline table/Pagination.tsx into Footer.tsx then replace
with the opal import. Remove internal-only stories (Footer, DataTable)
and the old refresh-components Pagination + story.
2026-03-16 12:14:17 -07:00
56 changed files with 528 additions and 3051 deletions

View File

@@ -455,7 +455,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ needs.determine-builds.outputs.is-test-run == 'true' && env.RUNS_ON_ECR_CACHE || env.REGISTRY_IMAGE }}
flavor: |
@@ -529,7 +529,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ needs.determine-builds.outputs.is-test-run == 'true' && env.RUNS_ON_ECR_CACHE || env.REGISTRY_IMAGE }}
flavor: |
@@ -607,7 +607,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ needs.determine-builds.outputs.is-test-run == 'true' && env.RUNS_ON_ECR_CACHE || env.REGISTRY_IMAGE }}
flavor: |
@@ -668,7 +668,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ needs.determine-builds.outputs.is-test-run == 'true' && env.RUNS_ON_ECR_CACHE || env.REGISTRY_IMAGE }}
flavor: |
@@ -750,7 +750,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ needs.determine-builds.outputs.is-test-run == 'true' && env.RUNS_ON_ECR_CACHE || env.REGISTRY_IMAGE }}
flavor: |
@@ -836,7 +836,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ needs.determine-builds.outputs.is-test-run == 'true' && env.RUNS_ON_ECR_CACHE || env.REGISTRY_IMAGE }}
flavor: |
@@ -894,7 +894,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ needs.determine-builds.outputs.is-test-run == 'true' && env.RUNS_ON_ECR_CACHE || env.REGISTRY_IMAGE }}
flavor: |
@@ -967,7 +967,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ needs.determine-builds.outputs.is-test-run == 'true' && env.RUNS_ON_ECR_CACHE || env.REGISTRY_IMAGE }}
flavor: |
@@ -1044,7 +1044,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ needs.determine-builds.outputs.is-test-run == 'true' && env.RUNS_ON_ECR_CACHE || env.REGISTRY_IMAGE }}
flavor: |
@@ -1105,7 +1105,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
flavor: |
@@ -1178,7 +1178,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
flavor: |
@@ -1256,7 +1256,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
flavor: |
@@ -1317,7 +1317,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ needs.determine-builds.outputs.is-test-run == 'true' && env.RUNS_ON_ECR_CACHE || env.REGISTRY_IMAGE }}
flavor: |
@@ -1397,7 +1397,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ needs.determine-builds.outputs.is-test-run == 'true' && env.RUNS_ON_ECR_CACHE || env.REGISTRY_IMAGE }}
flavor: |
@@ -1480,7 +1480,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ needs.determine-builds.outputs.is-test-run == 'true' && env.RUNS_ON_ECR_CACHE || env.REGISTRY_IMAGE }}
flavor: |

View File

@@ -105,7 +105,7 @@ jobs:
- name: Upload build artifacts
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
with:
name: desktop-build-${{ matrix.platform }}-${{ github.run_id }}
path: |

View File

@@ -174,7 +174,7 @@ jobs:
- name: Upload Docker logs
if: failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
with:
name: docker-logs-${{ matrix.test-dir }}
path: docker-logs/

View File

@@ -25,7 +25,7 @@ jobs:
outputs:
modules: ${{ steps.set-modules.outputs.modules }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
with:
persist-credentials: false
- id: set-modules
@@ -39,7 +39,7 @@ jobs:
matrix:
modules: ${{ fromJSON(needs.detect-modules.outputs.modules) }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # ratchet:actions/checkout@v6
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # zizmor: ignore[cache-poisoning]

View File

@@ -466,7 +466,7 @@ jobs:
- name: Upload logs
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
with:
name: docker-all-logs-${{ matrix.edition }}-${{ matrix.test-dir.name }}
path: ${{ github.workspace }}/docker-compose.log
@@ -587,7 +587,7 @@ jobs:
- name: Upload logs (onyx-lite)
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
with:
name: docker-all-logs-onyx-lite
path: ${{ github.workspace }}/docker-compose-onyx-lite.log
@@ -725,7 +725,7 @@ jobs:
- name: Upload logs (multi-tenant)
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
with:
name: docker-all-logs-multitenant
path: ${{ github.workspace }}/docker-compose-multitenant.log

View File

@@ -44,7 +44,7 @@ jobs:
- name: Upload coverage reports
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
with:
name: jest-coverage-${{ github.run_id }}
path: ./web/coverage

View File

@@ -445,7 +445,7 @@ jobs:
run: |
npx playwright test --project ${PROJECT}
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
if: always()
with:
# Includes test results and trace.zip files
@@ -454,7 +454,7 @@ jobs:
retention-days: 30
- name: Upload screenshots
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
if: always()
with:
name: playwright-screenshots-${{ matrix.project }}-${{ github.run_id }}
@@ -534,7 +534,7 @@ jobs:
"s3://${PLAYWRIGHT_S3_BUCKET}/reports/pr-${PR_NUMBER}/${RUN_ID}/${PROJECT}/"
- name: Upload visual diff summary
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
if: always()
with:
name: screenshot-diff-summary-${{ matrix.project }}
@@ -543,7 +543,7 @@ jobs:
retention-days: 5
- name: Upload visual diff report artifact
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
if: always()
with:
name: screenshot-diff-report-${{ matrix.project }}-${{ github.run_id }}
@@ -590,7 +590,7 @@ jobs:
- name: Upload logs
if: success() || failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
with:
name: docker-logs-${{ matrix.project }}-${{ github.run_id }}
path: ${{ github.workspace }}/docker-compose.log
@@ -674,7 +674,7 @@ jobs:
working-directory: ./web
run: npx playwright test --project lite
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
if: always()
with:
name: playwright-test-results-lite-${{ github.run_id }}
@@ -692,7 +692,7 @@ jobs:
- name: Upload logs
if: success() || failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
with:
name: docker-logs-lite-${{ github.run_id }}
path: ${{ github.workspace }}/docker-compose.log

View File

@@ -122,7 +122,7 @@ jobs:
- name: Upload logs
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
with:
name: docker-all-logs
path: ${{ github.workspace }}/docker-compose.log

View File

@@ -319,7 +319,7 @@ jobs:
- name: Upload logs
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
with:
name: docker-all-logs-nightly-${{ matrix.provider }}-llm-provider
path: |

View File

@@ -125,7 +125,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
flavor: |
@@ -195,7 +195,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
flavor: |
@@ -268,7 +268,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
flavor: |

View File

@@ -1,103 +0,0 @@
"""add_hook_and_hook_execution_log_tables
Revision ID: 689433b0d8de
Revises: 93a2e195e25c
Create Date: 2026-03-13 11:25:06.547474
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import UUID as PGUUID
# revision identifiers, used by Alembic.
revision = "689433b0d8de"
down_revision = "93a2e195e25c"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"hook",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sa.String(), nullable=False),
sa.Column(
"hook_point",
sa.Enum("document_ingestion", "query_processing", native_enum=False),
nullable=False,
),
sa.Column("endpoint_url", sa.Text(), nullable=True),
sa.Column("api_key", sa.LargeBinary(), nullable=True),
sa.Column("is_reachable", sa.Boolean(), nullable=True),
sa.Column(
"fail_strategy",
sa.Enum("hard", "soft", native_enum=False),
nullable=False,
),
sa.Column("timeout_seconds", sa.Float(), nullable=False),
sa.Column(
"is_active", sa.Boolean(), nullable=False, server_default=sa.text("false")
),
sa.Column(
"deleted", sa.Boolean(), nullable=False, server_default=sa.text("false")
),
sa.Column("creator_id", PGUUID(as_uuid=True), nullable=True),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.ForeignKeyConstraint(["creator_id"], ["user.id"], ondelete="SET NULL"),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
"ix_hook_one_non_deleted_per_point",
"hook",
["hook_point"],
unique=True,
postgresql_where=sa.text("deleted = false"),
)
op.create_table(
"hook_execution_log",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("hook_id", sa.Integer(), nullable=False),
sa.Column(
"is_success",
sa.Boolean(),
nullable=False,
),
sa.Column("error_message", sa.Text(), nullable=True),
sa.Column("status_code", sa.Integer(), nullable=True),
sa.Column("duration_ms", sa.Integer(), nullable=True),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.ForeignKeyConstraint(["hook_id"], ["hook.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
)
op.create_index("ix_hook_execution_log_hook_id", "hook_execution_log", ["hook_id"])
op.create_index(
"ix_hook_execution_log_created_at", "hook_execution_log", ["created_at"]
)
def downgrade() -> None:
op.drop_index("ix_hook_execution_log_created_at", table_name="hook_execution_log")
op.drop_index("ix_hook_execution_log_hook_id", table_name="hook_execution_log")
op.drop_table("hook_execution_log")
op.drop_index("ix_hook_one_non_deleted_per_point", table_name="hook")
op.drop_table("hook")

View File

@@ -118,7 +118,9 @@ JWT_PUBLIC_KEY_URL: str | None = os.getenv("JWT_PUBLIC_KEY_URL", None)
SUPER_USERS = json.loads(os.environ.get("SUPER_USERS", "[]"))
SUPER_CLOUD_API_KEY = os.environ.get("SUPER_CLOUD_API_KEY", "api_key")
POSTHOG_API_KEY = os.environ.get("POSTHOG_API_KEY")
# The posthog client does not accept empty API keys or hosts however it fails silently
# when the capture is called. These defaults prevent Posthog issues from breaking the Onyx app
POSTHOG_API_KEY = os.environ.get("POSTHOG_API_KEY") or "FooBar"
POSTHOG_HOST = os.environ.get("POSTHOG_HOST") or "https://us.i.posthog.com"
POSTHOG_DEBUG_LOGS_ENABLED = (
os.environ.get("POSTHOG_DEBUG_LOGS_ENABLED", "").lower() == "true"

View File

@@ -34,9 +34,6 @@ class PostHogFeatureFlagProvider(FeatureFlagProvider):
Returns:
True if the feature is enabled for the user, False otherwise.
"""
if not posthog:
return False
try:
posthog.set(
distinct_id=user_id,

View File

@@ -29,6 +29,7 @@ from onyx.configs.app_configs import OPENAI_DEFAULT_API_KEY
from onyx.configs.app_configs import OPENROUTER_DEFAULT_API_KEY
from onyx.configs.app_configs import VERTEXAI_DEFAULT_CREDENTIALS
from onyx.configs.app_configs import VERTEXAI_DEFAULT_LOCATION
from onyx.configs.constants import MilestoneRecordType
from onyx.db.engine.sql_engine import get_session_with_shared_schema
from onyx.db.engine.sql_engine import get_session_with_tenant
from onyx.db.image_generation import create_default_image_gen_config_from_api_key
@@ -58,6 +59,7 @@ from onyx.server.manage.llm.models import LLMProviderUpsertRequest
from onyx.server.manage.llm.models import ModelConfigurationUpsertRequest
from onyx.setup import setup_onyx
from onyx.utils.logger import setup_logger
from onyx.utils.telemetry import mt_cloud_telemetry
from shared_configs.configs import MULTI_TENANT
from shared_configs.configs import POSTGRES_DEFAULT_SCHEMA
from shared_configs.configs import TENANT_ID_PREFIX
@@ -69,9 +71,7 @@ logger = setup_logger()
async def get_or_provision_tenant(
email: str,
referral_source: str | None = None,
request: Request | None = None,
email: str, referral_source: str | None = None, request: Request | None = None
) -> str:
"""
Get existing tenant ID for an email or create a new tenant if none exists.
@@ -693,6 +693,12 @@ async def assign_tenant_to_user(
try:
add_users_to_tenant([email], tenant_id)
mt_cloud_telemetry(
tenant_id=tenant_id,
distinct_id=email,
event=MilestoneRecordType.TENANT_CREATED,
)
except Exception:
logger.exception(f"Failed to assign tenant {tenant_id} to user {email}")
raise Exception("Failed to assign tenant to user")

View File

@@ -9,7 +9,6 @@ from ee.onyx.configs.app_configs import POSTHOG_API_KEY
from ee.onyx.configs.app_configs import POSTHOG_DEBUG_LOGS_ENABLED
from ee.onyx.configs.app_configs import POSTHOG_HOST
from onyx.utils.logger import setup_logger
from shared_configs.configs import MULTI_TENANT
logger = setup_logger()
@@ -19,19 +18,12 @@ def posthog_on_error(error: Any, items: Any) -> None:
logger.error(f"PostHog error: {error}, items: {items}")
posthog: Posthog | None = None
if POSTHOG_API_KEY:
posthog = Posthog(
project_api_key=POSTHOG_API_KEY,
host=POSTHOG_HOST,
debug=POSTHOG_DEBUG_LOGS_ENABLED,
on_error=posthog_on_error,
)
elif MULTI_TENANT:
logger.warning(
"POSTHOG_API_KEY is not set but MULTI_TENANT is enabled — "
"PostHog telemetry and feature flags will be disabled"
)
posthog = Posthog(
project_api_key=POSTHOG_API_KEY,
host=POSTHOG_HOST,
debug=POSTHOG_DEBUG_LOGS_ENABLED,
on_error=posthog_on_error,
)
# For cross referencing between cloud and www Onyx sites
# NOTE: These clients are separate because they are separate posthog projects.
@@ -68,7 +60,7 @@ def capture_and_sync_with_alternate_posthog(
logger.error(f"Error capturing marketing posthog event: {e}")
try:
if posthog and (cloud_user_id := props.get("onyx_cloud_user_id")):
if cloud_user_id := props.get("onyx_cloud_user_id"):
cloud_props = props.copy()
cloud_props.pop("onyx_cloud_user_id", None)

View File

@@ -1,5 +1,3 @@
from typing import Any
from ee.onyx.utils.posthog_client import posthog
from onyx.utils.logger import setup_logger
@@ -7,27 +5,12 @@ logger = setup_logger()
def event_telemetry(
distinct_id: str, event: str, properties: dict[str, Any] | None = None
distinct_id: str, event: str, properties: dict | None = None
) -> None:
"""Capture and send an event to PostHog, flushing immediately."""
if not posthog:
return
logger.info(f"Capturing PostHog event: {distinct_id} {event} {properties}")
try:
posthog.capture(distinct_id, event, properties)
posthog.flush()
except Exception as e:
logger.error(f"Error capturing PostHog event: {e}")
def identify_user(distinct_id: str, properties: dict[str, Any] | None = None) -> None:
"""Create/update a PostHog person profile, flushing immediately."""
if not posthog:
return
try:
posthog.identify(distinct_id, properties)
posthog.flush()
except Exception as e:
logger.error(f"Error identifying PostHog user: {e}")

View File

@@ -19,7 +19,6 @@ from typing import Optional
from typing import Protocol
from typing import Tuple
from typing import TypeVar
from urllib.parse import urlparse
import jwt
from email_validator import EmailNotValidError
@@ -135,7 +134,6 @@ from onyx.redis.redis_pool import retrieve_ws_token_data
from onyx.server.settings.store import load_settings
from onyx.server.utils import BasicAuthenticationError
from onyx.utils.logger import setup_logger
from onyx.utils.telemetry import mt_cloud_identify
from onyx.utils.telemetry import mt_cloud_telemetry
from onyx.utils.telemetry import optional_telemetry
from onyx.utils.telemetry import RecordType
@@ -794,12 +792,6 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
except Exception:
logger.exception("Error deleting anonymous user cookie")
tenant_id = CURRENT_TENANT_ID_CONTEXTVAR.get()
mt_cloud_identify(
distinct_id=str(user.id),
properties={"email": user.email, "tenant_id": tenant_id},
)
async def on_after_register(
self, user: User, request: Optional[Request] = None
) -> None:
@@ -818,25 +810,12 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
user_count = await get_user_count()
logger.debug(f"Current tenant user count: {user_count}")
# Ensure a PostHog person profile exists for this user.
mt_cloud_identify(
distinct_id=str(user.id),
properties={"email": user.email, "tenant_id": tenant_id},
)
mt_cloud_telemetry(
tenant_id=tenant_id,
distinct_id=str(user.id),
distinct_id=user.email,
event=MilestoneRecordType.USER_SIGNED_UP,
)
if user_count == 1:
mt_cloud_telemetry(
tenant_id=tenant_id,
distinct_id=str(user.id),
event=MilestoneRecordType.TENANT_CREATED,
)
finally:
CURRENT_TENANT_ID_CONTEXTVAR.reset(token)
@@ -1673,33 +1652,6 @@ async def _get_user_from_token_data(token_data: dict) -> User | None:
return user
_LOOPBACK_HOSTNAMES = frozenset({"localhost", "127.0.0.1", "::1"})
def _is_same_origin(actual: str, expected: str) -> bool:
"""Compare two origins for the WebSocket CSWSH check.
Scheme and hostname must match exactly. Port must also match, except
when the hostname is a loopback address (localhost / 127.0.0.1 / ::1),
where port is ignored. On loopback, all ports belong to the same
operator, so port differences carry no security significance — the
CSWSH threat is remote origins, not local ones.
"""
a = urlparse(actual.rstrip("/"))
e = urlparse(expected.rstrip("/"))
if a.scheme != e.scheme or a.hostname != e.hostname:
return False
if a.hostname in _LOOPBACK_HOSTNAMES:
return True
actual_port = a.port or (443 if a.scheme == "https" else 80)
expected_port = e.port or (443 if e.scheme == "https" else 80)
return actual_port == expected_port
async def current_user_from_websocket(
websocket: WebSocket,
token: str = Query(..., description="WebSocket authentication token"),
@@ -1719,15 +1671,19 @@ async def current_user_from_websocket(
This applies the same auth checks as current_user() for HTTP endpoints.
"""
# Check Origin header to prevent Cross-Site WebSocket Hijacking (CSWSH).
# Browsers always send Origin on WebSocket connections.
# Check Origin header to prevent Cross-Site WebSocket Hijacking (CSWSH)
# Browsers always send Origin on WebSocket connections
origin = websocket.headers.get("origin")
expected_origin = WEB_DOMAIN.rstrip("/")
if not origin:
logger.warning("WS auth: missing Origin header")
raise BasicAuthenticationError(detail="Access denied. Missing origin.")
if not _is_same_origin(origin, WEB_DOMAIN):
logger.warning(f"WS auth: origin mismatch. Expected {WEB_DOMAIN}, got {origin}")
actual_origin = origin.rstrip("/")
if actual_origin != expected_origin:
logger.warning(
f"WS auth: origin mismatch. Expected {expected_origin}, got {actual_origin}"
)
raise BasicAuthenticationError(detail="Access denied. Invalid origin.")
# Validate WS token in Redis (single-use, deleted after retrieval)

View File

@@ -29,8 +29,6 @@ from onyx.configs.constants import OnyxCeleryPriority
from onyx.configs.constants import OnyxCeleryQueues
from onyx.configs.constants import OnyxCeleryTask
from onyx.configs.constants import OnyxRedisLocks
from onyx.connectors.factory import ConnectorMissingException
from onyx.connectors.factory import identify_connector_class
from onyx.connectors.factory import instantiate_connector
from onyx.connectors.interfaces import HierarchyConnector
from onyx.connectors.models import HierarchyNode as PydanticHierarchyNode
@@ -57,26 +55,6 @@ logger = setup_logger()
HIERARCHY_FETCH_INTERVAL_SECONDS = 24 * 60 * 60
def _connector_supports_hierarchy_fetching(
cc_pair: ConnectorCredentialPair,
) -> bool:
"""Return True only for connectors whose class implements HierarchyConnector."""
try:
connector_class = identify_connector_class(
cc_pair.connector.source,
)
except ConnectorMissingException as e:
task_logger.warning(
"Skipping hierarchy fetching enqueue for source=%s input_type=%s: %s",
cc_pair.connector.source,
cc_pair.connector.input_type,
str(e),
)
return False
return issubclass(connector_class, HierarchyConnector)
def _is_hierarchy_fetching_due(cc_pair: ConnectorCredentialPair) -> bool:
"""Returns boolean indicating if hierarchy fetching is due for this connector.
@@ -208,10 +186,7 @@ def check_for_hierarchy_fetching(self: Task, *, tenant_id: str) -> int | None:
cc_pair_id=cc_pair_id,
)
if not cc_pair or not _connector_supports_hierarchy_fetching(cc_pair):
continue
if not _is_hierarchy_fetching_due(cc_pair):
if not cc_pair or not _is_hierarchy_fetching_due(cc_pair):
continue
task_id = _try_creating_hierarchy_fetching_task(

View File

@@ -490,13 +490,13 @@ def handle_stream_message_objects(
# Milestone tracking, most devs using the API don't need to understand this
mt_cloud_telemetry(
tenant_id=tenant_id,
distinct_id=str(user.id) if not user.is_anonymous else tenant_id,
distinct_id=user.email if not user.is_anonymous else tenant_id,
event=MilestoneRecordType.MULTIPLE_ASSISTANTS,
)
mt_cloud_telemetry(
tenant_id=tenant_id,
distinct_id=str(user.id) if not user.is_anonymous else tenant_id,
distinct_id=user.email if not user.is_anonymous else tenant_id,
event=MilestoneRecordType.USER_MESSAGE_SENT,
properties={
"origin": new_msg_req.origin.value,

View File

@@ -1046,8 +1046,6 @@ POD_NAMESPACE = os.environ.get("POD_NAMESPACE")
DEV_MODE = os.environ.get("DEV_MODE", "").lower() == "true"
HOOK_ENABLED = os.environ.get("HOOK_ENABLED", "").lower() == "true"
INTEGRATION_TESTS_MODE = os.environ.get("INTEGRATION_TESTS_MODE", "").lower() == "true"
#####

View File

@@ -304,13 +304,3 @@ class LLMModelFlowType(str, PyEnum):
CHAT = "chat"
VISION = "vision"
CONTEXTUAL_RAG = "contextual_rag"
class HookPoint(str, PyEnum):
DOCUMENT_INGESTION = "document_ingestion"
QUERY_PROCESSING = "query_processing"
class HookFailStrategy(str, PyEnum):
HARD = "hard" # exception propagates, pipeline aborts
SOFT = "soft" # log error, return original input, pipeline continues

View File

@@ -64,8 +64,6 @@ from onyx.db.enums import (
BuildSessionStatus,
EmbeddingPrecision,
HierarchyNodeType,
HookFailStrategy,
HookPoint,
IndexingMode,
OpenSearchDocumentMigrationStatus,
OpenSearchTenantMigrationStatus,
@@ -5180,90 +5178,3 @@ class CacheStore(Base):
expires_at: Mapped[datetime.datetime | None] = mapped_column(
DateTime(timezone=True), nullable=True
)
class Hook(Base):
"""Pairs a HookPoint with a customer-provided API endpoint.
At most one non-deleted Hook per HookPoint is allowed, enforced by a
partial unique index on (hook_point) where deleted=false.
"""
__tablename__ = "hook"
id: Mapped[int] = mapped_column(Integer, primary_key=True)
name: Mapped[str] = mapped_column(String, nullable=False)
hook_point: Mapped[HookPoint] = mapped_column(
Enum(HookPoint, native_enum=False), nullable=False
)
endpoint_url: Mapped[str | None] = mapped_column(Text, nullable=True)
api_key: Mapped[SensitiveValue[str] | None] = mapped_column(
EncryptedString(), nullable=True
)
is_reachable: Mapped[bool | None] = mapped_column(
Boolean, nullable=True, default=None
) # null = never validated, true = last check passed, false = last check failed
fail_strategy: Mapped[HookFailStrategy] = mapped_column(
Enum(HookFailStrategy, native_enum=False),
nullable=False,
default=HookFailStrategy.HARD,
)
timeout_seconds: Mapped[float] = mapped_column(Float, nullable=False, default=30.0)
is_active: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False)
deleted: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False)
creator_id: Mapped[UUID | None] = mapped_column(
PGUUID(as_uuid=True),
ForeignKey("user.id", ondelete="SET NULL"),
nullable=True,
)
created_at: Mapped[datetime.datetime] = mapped_column(
DateTime(timezone=True), server_default=func.now(), nullable=False
)
updated_at: Mapped[datetime.datetime] = mapped_column(
DateTime(timezone=True),
server_default=func.now(),
onupdate=func.now(),
nullable=False,
)
creator: Mapped["User | None"] = relationship("User", foreign_keys=[creator_id])
execution_logs: Mapped[list["HookExecutionLog"]] = relationship(
"HookExecutionLog", back_populates="hook", cascade="all, delete-orphan"
)
__table_args__ = (
Index(
"ix_hook_one_non_deleted_per_point",
"hook_point",
unique=True,
postgresql_where=(deleted == False), # noqa: E712
),
)
class HookExecutionLog(Base):
"""Records hook executions for health monitoring and debugging.
Currently only failures are logged; the is_success column exists so
success logging can be added later without a schema change.
Retention: rows older than 30 days are deleted by a nightly Celery task.
"""
__tablename__ = "hook_execution_log"
id: Mapped[int] = mapped_column(Integer, primary_key=True)
hook_id: Mapped[int] = mapped_column(
Integer,
ForeignKey("hook.id", ondelete="CASCADE"),
nullable=False,
index=True,
)
is_success: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False)
error_message: Mapped[str | None] = mapped_column(Text, nullable=True)
status_code: Mapped[int | None] = mapped_column(Integer, nullable=True)
duration_ms: Mapped[int | None] = mapped_column(Integer, nullable=True)
created_at: Mapped[datetime.datetime] = mapped_column(
DateTime(timezone=True), server_default=func.now(), nullable=False, index=True
)
hook: Mapped["Hook"] = relationship("Hook", back_populates="execution_logs")

View File

@@ -35,8 +35,6 @@ class OnyxErrorCode(Enum):
INSUFFICIENT_PERMISSIONS = ("INSUFFICIENT_PERMISSIONS", 403)
ADMIN_ONLY = ("ADMIN_ONLY", 403)
EE_REQUIRED = ("EE_REQUIRED", 403)
SINGLE_TENANT_ONLY = ("SINGLE_TENANT_ONLY", 403)
ENV_VAR_GATED = ("ENV_VAR_GATED", 403)
# ------------------------------------------------------------------
# Validation / Bad Request (400)

View File

@@ -1,26 +0,0 @@
from onyx.configs.app_configs import HOOK_ENABLED
from onyx.error_handling.error_codes import OnyxErrorCode
from onyx.error_handling.exceptions import OnyxError
from shared_configs.configs import MULTI_TENANT
def require_hook_enabled() -> None:
"""FastAPI dependency that gates all hook management endpoints.
Hooks are only available in single-tenant / self-hosted deployments with
HOOK_ENABLED=true explicitly set. Two layers of protection:
1. MULTI_TENANT check — rejects even if HOOK_ENABLED is accidentally set true
2. HOOK_ENABLED flag — explicit opt-in by the operator
Use as: Depends(require_hook_enabled)
"""
if MULTI_TENANT:
raise OnyxError(
OnyxErrorCode.SINGLE_TENANT_ONLY,
"Hooks are not available in multi-tenant deployments",
)
if not HOOK_ENABLED:
raise OnyxError(
OnyxErrorCode.ENV_VAR_GATED,
"Hooks are not enabled. Set HOOK_ENABLED=true to enable.",
)

View File

@@ -1319,7 +1319,7 @@ def get_connector_indexing_status(
# Track admin page visit for analytics
mt_cloud_telemetry(
tenant_id=tenant_id,
distinct_id=str(user.id),
distinct_id=user.email,
event=MilestoneRecordType.VISITED_ADMIN_PAGE,
)
@@ -1533,7 +1533,7 @@ def create_connector_from_model(
mt_cloud_telemetry(
tenant_id=tenant_id,
distinct_id=str(user.id),
distinct_id=user.email,
event=MilestoneRecordType.CREATED_CONNECTOR,
)
@@ -1611,7 +1611,7 @@ def create_connector_with_mock_credential(
mt_cloud_telemetry(
tenant_id=tenant_id,
distinct_id=str(user.id),
distinct_id=user.email,
event=MilestoneRecordType.CREATED_CONNECTOR,
)
return response
@@ -1915,7 +1915,9 @@ def submit_connector_request(
if not connector_name:
raise HTTPException(status_code=400, detail="Connector name cannot be empty")
# Get user identifier for telemetry
user_email = user.email
distinct_id = user_email or tenant_id
# Track connector request via PostHog telemetry (Cloud only)
from shared_configs.configs import MULTI_TENANT
@@ -1923,11 +1925,11 @@ def submit_connector_request(
if MULTI_TENANT:
mt_cloud_telemetry(
tenant_id=tenant_id,
distinct_id=str(user.id),
distinct_id=distinct_id,
event=MilestoneRecordType.REQUESTED_CONNECTOR,
properties={
"connector_name": connector_name,
"user_email": user.email,
"user_email": user_email,
},
)

View File

@@ -314,7 +314,7 @@ def create_persona(
)
mt_cloud_telemetry(
tenant_id=tenant_id,
distinct_id=str(user.id),
distinct_id=user.email,
event=MilestoneRecordType.CREATED_ASSISTANT,
)

View File

@@ -561,7 +561,7 @@ def handle_send_chat_message(
tenant_id = get_current_tenant_id()
mt_cloud_telemetry(
tenant_id=tenant_id,
distinct_id=tenant_id if user.is_anonymous else str(user.id),
distinct_id=tenant_id if user.is_anonymous else user.email,
event=MilestoneRecordType.RAN_QUERY,
)

View File

@@ -2,7 +2,6 @@ import contextvars
import threading
import uuid
from enum import Enum
from typing import Any
import requests
@@ -153,7 +152,7 @@ def mt_cloud_telemetry(
tenant_id: str,
distinct_id: str,
event: MilestoneRecordType,
properties: dict[str, Any] | None = None,
properties: dict | None = None,
) -> None:
if not MULTI_TENANT:
return
@@ -174,18 +173,3 @@ def mt_cloud_telemetry(
attribute="event_telemetry",
fallback=noop_fallback,
)(distinct_id, event, all_properties)
def mt_cloud_identify(
distinct_id: str,
properties: dict[str, Any] | None = None,
) -> None:
"""Create/update a PostHog person profile (Cloud only)."""
if not MULTI_TENANT:
return
fetch_versioned_implementation_with_fallback(
module="onyx.utils.telemetry",
attribute="identify_user",
fallback=noop_fallback,
)(distinct_id, properties)

View File

@@ -65,7 +65,7 @@ attrs==25.4.0
# jsonschema
# referencing
# zeep
authlib==1.6.9
authlib==1.6.7
# via fastmcp
azure-cognitiveservices-speech==1.38.0
# via onyx

View File

@@ -45,21 +45,6 @@ npx playwright test <TEST_NAME>
Shared fixtures live in `backend/tests/conftest.py`. Test subdirectories can define
their own `conftest.py` for directory-scoped fixtures.
## Running Tests Repeatedly (`pytest-repeat`)
Use `pytest-repeat` to catch flaky tests by running them multiple times:
```bash
# Run a specific test 50 times
pytest --count=50 backend/tests/unit/path/to/test.py::test_name
# Stop on first failure with -x
pytest --count=50 -x backend/tests/unit/path/to/test.py::test_name
# Repeat an entire test file
pytest --count=10 backend/tests/unit/path/to/test_file.py
```
## Best Practices
### Use `enable_ee` fixture instead of inlining

View File

@@ -1,120 +0,0 @@
import pytest
from onyx.auth.users import _is_same_origin
class TestExactMatch:
"""Origins that are textually identical should always match."""
@pytest.mark.parametrize(
"origin",
[
"http://localhost:3000",
"https://app.example.com",
"https://app.example.com:8443",
"http://127.0.0.1:8080",
],
)
def test_identical_origins(self, origin: str) -> None:
assert _is_same_origin(origin, origin)
class TestLoopbackPortRelaxation:
"""On loopback addresses, port differences should be ignored."""
@pytest.mark.parametrize(
"actual,expected",
[
("http://localhost:3001", "http://localhost:3000"),
("http://localhost:8080", "http://localhost:3000"),
("http://localhost", "http://localhost:3000"),
("http://127.0.0.1:3001", "http://127.0.0.1:3000"),
("http://[::1]:3001", "http://[::1]:3000"),
],
)
def test_loopback_different_ports_accepted(
self, actual: str, expected: str
) -> None:
assert _is_same_origin(actual, expected)
@pytest.mark.parametrize(
"actual,expected",
[
("https://localhost:3001", "http://localhost:3000"),
("http://localhost:3001", "https://localhost:3000"),
],
)
def test_loopback_different_scheme_rejected(
self, actual: str, expected: str
) -> None:
assert not _is_same_origin(actual, expected)
def test_loopback_hostname_mismatch_rejected(self) -> None:
assert not _is_same_origin("http://localhost:3001", "http://127.0.0.1:3000")
class TestNonLoopbackStrictPort:
"""Non-loopback origins must match scheme, hostname, AND port."""
def test_different_port_rejected(self) -> None:
assert not _is_same_origin(
"https://app.example.com:8443", "https://app.example.com"
)
def test_different_hostname_rejected(self) -> None:
assert not _is_same_origin("https://evil.com", "https://app.example.com")
def test_different_scheme_rejected(self) -> None:
assert not _is_same_origin("http://app.example.com", "https://app.example.com")
def test_same_port_explicit(self) -> None:
assert _is_same_origin(
"https://app.example.com:443", "https://app.example.com:443"
)
class TestDefaultPortNormalization:
"""Port should be normalized so that omitted default port == explicit default port."""
def test_http_implicit_vs_explicit_80(self) -> None:
assert _is_same_origin("http://example.com", "http://example.com:80")
def test_http_explicit_80_vs_implicit(self) -> None:
assert _is_same_origin("http://example.com:80", "http://example.com")
def test_https_implicit_vs_explicit_443(self) -> None:
assert _is_same_origin("https://example.com", "https://example.com:443")
def test_https_explicit_443_vs_implicit(self) -> None:
assert _is_same_origin("https://example.com:443", "https://example.com")
def test_http_non_default_port_vs_implicit_rejected(self) -> None:
assert not _is_same_origin("http://example.com:8080", "http://example.com")
class TestTrailingSlash:
"""Trailing slashes should not affect comparison."""
def test_trailing_slash_on_actual(self) -> None:
assert _is_same_origin("https://app.example.com/", "https://app.example.com")
def test_trailing_slash_on_expected(self) -> None:
assert _is_same_origin("https://app.example.com", "https://app.example.com/")
def test_trailing_slash_on_both(self) -> None:
assert _is_same_origin("https://app.example.com/", "https://app.example.com/")
class TestCSWSHScenarios:
"""Realistic attack scenarios that must be rejected."""
def test_remote_attacker_rejected(self) -> None:
assert not _is_same_origin("https://evil.com", "http://localhost:3000")
def test_remote_attacker_same_port_rejected(self) -> None:
assert not _is_same_origin("http://evil.com:3000", "http://localhost:3000")
def test_remote_attacker_matching_hostname_different_port(self) -> None:
assert not _is_same_origin(
"https://app.example.com:9999", "https://app.example.com"
)

View File

@@ -1,194 +0,0 @@
from unittest.mock import MagicMock
from unittest.mock import patch
from onyx.background.celery.tasks.hierarchyfetching.tasks import (
_connector_supports_hierarchy_fetching,
)
from onyx.background.celery.tasks.hierarchyfetching.tasks import (
check_for_hierarchy_fetching,
)
from onyx.connectors.factory import ConnectorMissingException
from onyx.connectors.interfaces import BaseConnector
from onyx.connectors.interfaces import HierarchyConnector
from onyx.connectors.interfaces import HierarchyOutput
from onyx.connectors.interfaces import SecondsSinceUnixEpoch
TASKS_MODULE = "onyx.background.celery.tasks.hierarchyfetching.tasks"
class _NonHierarchyConnector(BaseConnector):
def load_credentials(self, credentials: dict) -> dict | None: # noqa: ARG002
return None
class _HierarchyCapableConnector(HierarchyConnector):
def load_credentials(self, credentials: dict) -> dict | None: # noqa: ARG002
return None
def load_hierarchy(
self,
start: SecondsSinceUnixEpoch, # noqa: ARG002
end: SecondsSinceUnixEpoch, # noqa: ARG002
) -> HierarchyOutput:
return
yield
def _build_cc_pair_mock() -> MagicMock:
cc_pair = MagicMock()
cc_pair.connector.source = "mock-source"
cc_pair.connector.input_type = "mock-input-type"
return cc_pair
def _build_redis_mock_with_lock() -> tuple[MagicMock, MagicMock]:
redis_client = MagicMock()
lock = MagicMock()
lock.acquire.return_value = True
lock.owned.return_value = True
redis_client.lock.return_value = lock
return redis_client, lock
@patch(f"{TASKS_MODULE}.identify_connector_class")
def test_connector_supports_hierarchy_fetching_false_for_non_hierarchy_connector(
mock_identify_connector_class: MagicMock,
) -> None:
mock_identify_connector_class.return_value = _NonHierarchyConnector
assert _connector_supports_hierarchy_fetching(_build_cc_pair_mock()) is False
mock_identify_connector_class.assert_called_once_with("mock-source")
@patch(f"{TASKS_MODULE}.task_logger.warning")
@patch(f"{TASKS_MODULE}.identify_connector_class")
def test_connector_supports_hierarchy_fetching_false_when_class_missing(
mock_identify_connector_class: MagicMock,
mock_warning: MagicMock,
) -> None:
mock_identify_connector_class.side_effect = ConnectorMissingException("missing")
assert _connector_supports_hierarchy_fetching(_build_cc_pair_mock()) is False
mock_warning.assert_called_once()
@patch(f"{TASKS_MODULE}.identify_connector_class")
def test_connector_supports_hierarchy_fetching_true_for_supported_connector(
mock_identify_connector_class: MagicMock,
) -> None:
mock_identify_connector_class.return_value = _HierarchyCapableConnector
assert _connector_supports_hierarchy_fetching(_build_cc_pair_mock()) is True
mock_identify_connector_class.assert_called_once_with("mock-source")
@patch(f"{TASKS_MODULE}._try_creating_hierarchy_fetching_task")
@patch(f"{TASKS_MODULE}._is_hierarchy_fetching_due")
@patch(f"{TASKS_MODULE}.get_connector_credential_pair_from_id")
@patch(f"{TASKS_MODULE}.fetch_indexable_standard_connector_credential_pair_ids")
@patch(f"{TASKS_MODULE}.get_session_with_current_tenant")
@patch(f"{TASKS_MODULE}.get_redis_client")
@patch(f"{TASKS_MODULE}._connector_supports_hierarchy_fetching")
def test_check_for_hierarchy_fetching_skips_unsupported_connectors(
mock_supports_hierarchy_fetching: MagicMock,
mock_get_redis_client: MagicMock,
mock_get_session: MagicMock,
mock_fetch_cc_pair_ids: MagicMock,
mock_get_cc_pair: MagicMock,
mock_is_due: MagicMock,
mock_try_create_task: MagicMock,
) -> None:
redis_client, lock = _build_redis_mock_with_lock()
mock_get_redis_client.return_value = redis_client
mock_get_session.return_value.__enter__.return_value = MagicMock()
mock_fetch_cc_pair_ids.return_value = [123]
mock_get_cc_pair.return_value = _build_cc_pair_mock()
mock_supports_hierarchy_fetching.return_value = False
mock_is_due.return_value = True
task_app = MagicMock()
with patch.object(check_for_hierarchy_fetching, "app", task_app):
result = check_for_hierarchy_fetching.run(tenant_id="test-tenant")
assert result == 0
mock_is_due.assert_not_called()
mock_try_create_task.assert_not_called()
lock.release.assert_called_once()
@patch(f"{TASKS_MODULE}._try_creating_hierarchy_fetching_task")
@patch(f"{TASKS_MODULE}._is_hierarchy_fetching_due")
@patch(f"{TASKS_MODULE}.get_connector_credential_pair_from_id")
@patch(f"{TASKS_MODULE}.fetch_indexable_standard_connector_credential_pair_ids")
@patch(f"{TASKS_MODULE}.get_session_with_current_tenant")
@patch(f"{TASKS_MODULE}.get_redis_client")
@patch(f"{TASKS_MODULE}._connector_supports_hierarchy_fetching")
def test_check_for_hierarchy_fetching_creates_task_for_supported_due_connector(
mock_supports_hierarchy_fetching: MagicMock,
mock_get_redis_client: MagicMock,
mock_get_session: MagicMock,
mock_fetch_cc_pair_ids: MagicMock,
mock_get_cc_pair: MagicMock,
mock_is_due: MagicMock,
mock_try_create_task: MagicMock,
) -> None:
redis_client, lock = _build_redis_mock_with_lock()
cc_pair = _build_cc_pair_mock()
db_session = MagicMock()
mock_get_redis_client.return_value = redis_client
mock_get_session.return_value.__enter__.return_value = db_session
mock_fetch_cc_pair_ids.return_value = [123]
mock_get_cc_pair.return_value = cc_pair
mock_supports_hierarchy_fetching.return_value = True
mock_is_due.return_value = True
mock_try_create_task.return_value = "task-id"
task_app = MagicMock()
with patch.object(check_for_hierarchy_fetching, "app", task_app):
result = check_for_hierarchy_fetching.run(tenant_id="test-tenant")
assert result == 1
mock_is_due.assert_called_once_with(cc_pair)
mock_try_create_task.assert_called_once_with(
celery_app=task_app,
cc_pair=cc_pair,
db_session=db_session,
r=redis_client,
tenant_id="test-tenant",
)
lock.release.assert_called_once()
@patch(f"{TASKS_MODULE}._try_creating_hierarchy_fetching_task")
@patch(f"{TASKS_MODULE}._is_hierarchy_fetching_due")
@patch(f"{TASKS_MODULE}.get_connector_credential_pair_from_id")
@patch(f"{TASKS_MODULE}.fetch_indexable_standard_connector_credential_pair_ids")
@patch(f"{TASKS_MODULE}.get_session_with_current_tenant")
@patch(f"{TASKS_MODULE}.get_redis_client")
@patch(f"{TASKS_MODULE}._connector_supports_hierarchy_fetching")
def test_check_for_hierarchy_fetching_skips_supported_connector_when_not_due(
mock_supports_hierarchy_fetching: MagicMock,
mock_get_redis_client: MagicMock,
mock_get_session: MagicMock,
mock_fetch_cc_pair_ids: MagicMock,
mock_get_cc_pair: MagicMock,
mock_is_due: MagicMock,
mock_try_create_task: MagicMock,
) -> None:
redis_client, lock = _build_redis_mock_with_lock()
cc_pair = _build_cc_pair_mock()
mock_get_redis_client.return_value = redis_client
mock_get_session.return_value.__enter__.return_value = MagicMock()
mock_fetch_cc_pair_ids.return_value = [123]
mock_get_cc_pair.return_value = cc_pair
mock_supports_hierarchy_fetching.return_value = True
mock_is_due.return_value = False
task_app = MagicMock()
with patch.object(check_for_hierarchy_fetching, "app", task_app):
result = check_for_hierarchy_fetching.run(tenant_id="test-tenant")
assert result == 0
mock_is_due.assert_called_once_with(cc_pair)
mock_try_create_task.assert_not_called()
lock.release.assert_called_once()

View File

@@ -1,40 +0,0 @@
"""Unit tests for the hooks feature gate."""
from unittest.mock import patch
import pytest
from onyx.error_handling.error_codes import OnyxErrorCode
from onyx.error_handling.exceptions import OnyxError
from onyx.hooks.api_dependencies import require_hook_enabled
class TestRequireHookEnabled:
def test_raises_when_multi_tenant(self) -> None:
with (
patch("onyx.hooks.api_dependencies.MULTI_TENANT", True),
patch("onyx.hooks.api_dependencies.HOOK_ENABLED", True),
):
with pytest.raises(OnyxError) as exc_info:
require_hook_enabled()
assert exc_info.value.error_code is OnyxErrorCode.SINGLE_TENANT_ONLY
assert exc_info.value.status_code == 403
assert "multi-tenant" in exc_info.value.detail
def test_raises_when_flag_disabled(self) -> None:
with (
patch("onyx.hooks.api_dependencies.MULTI_TENANT", False),
patch("onyx.hooks.api_dependencies.HOOK_ENABLED", False),
):
with pytest.raises(OnyxError) as exc_info:
require_hook_enabled()
assert exc_info.value.error_code is OnyxErrorCode.ENV_VAR_GATED
assert exc_info.value.status_code == 403
assert "HOOK_ENABLED" in exc_info.value.detail
def test_passes_when_enabled_single_tenant(self) -> None:
with (
patch("onyx.hooks.api_dependencies.MULTI_TENANT", False),
patch("onyx.hooks.api_dependencies.HOOK_ENABLED", True),
):
require_hook_enabled() # must not raise

View File

@@ -17,7 +17,7 @@ def test_mt_cloud_telemetry_noop_when_not_multi_tenant(monkeypatch: Any) -> None
telemetry_utils.mt_cloud_telemetry(
tenant_id="tenant-1",
distinct_id="12345678-1234-1234-1234-123456789abc",
distinct_id="user@example.com",
event=MilestoneRecordType.USER_MESSAGE_SENT,
properties={"origin": "web"},
)
@@ -40,7 +40,7 @@ def test_mt_cloud_telemetry_calls_event_telemetry_when_multi_tenant(
telemetry_utils.mt_cloud_telemetry(
tenant_id="tenant-1",
distinct_id="12345678-1234-1234-1234-123456789abc",
distinct_id="user@example.com",
event=MilestoneRecordType.USER_MESSAGE_SENT,
properties={"origin": "web"},
)
@@ -51,52 +51,7 @@ def test_mt_cloud_telemetry_calls_event_telemetry_when_multi_tenant(
fallback=telemetry_utils.noop_fallback,
)
event_telemetry.assert_called_once_with(
"12345678-1234-1234-1234-123456789abc",
"user@example.com",
MilestoneRecordType.USER_MESSAGE_SENT,
{"origin": "web", "tenant_id": "tenant-1"},
)
def test_mt_cloud_identify_noop_when_not_multi_tenant(monkeypatch: Any) -> None:
fetch_impl = Mock()
monkeypatch.setattr(
telemetry_utils,
"fetch_versioned_implementation_with_fallback",
fetch_impl,
)
monkeypatch.setattr("onyx.utils.telemetry.MULTI_TENANT", False)
telemetry_utils.mt_cloud_identify(
distinct_id="12345678-1234-1234-1234-123456789abc",
properties={"email": "user@example.com"},
)
fetch_impl.assert_not_called()
def test_mt_cloud_identify_calls_identify_user_when_multi_tenant(
monkeypatch: Any,
) -> None:
identify_user = Mock()
fetch_impl = Mock(return_value=identify_user)
monkeypatch.setattr(
telemetry_utils,
"fetch_versioned_implementation_with_fallback",
fetch_impl,
)
monkeypatch.setattr("onyx.utils.telemetry.MULTI_TENANT", True)
telemetry_utils.mt_cloud_identify(
distinct_id="12345678-1234-1234-1234-123456789abc",
properties={"email": "user@example.com"},
)
fetch_impl.assert_called_once_with(
module="onyx.utils.telemetry",
attribute="identify_user",
fallback=telemetry_utils.noop_fallback,
)
identify_user.assert_called_once_with(
"12345678-1234-1234-1234-123456789abc",
{"email": "user@example.com"},
)

View File

@@ -32,17 +32,15 @@ def test_run_with_timeout_raises_on_timeout(slow: float, timeout: float) -> None
"""Test that a function that exceeds timeout raises TimeoutError"""
def slow_function() -> None:
time.sleep(slow)
time.sleep(slow) # Sleep for 2 seconds
start = time.monotonic()
with pytest.raises(TimeoutError) as exc_info:
run_with_timeout(timeout, slow_function)
elapsed = time.monotonic() - start
start = time.time()
run_with_timeout(timeout, slow_function) # Set timeout to 0.1 seconds
end = time.time()
assert end - start >= timeout
assert end - start < (slow + timeout) / 2
assert f"timed out after {timeout} seconds" in str(exc_info.value)
assert elapsed >= timeout
# Should return around the timeout duration, not the full sleep duration
assert elapsed == pytest.approx(timeout, abs=0.8)
@pytest.mark.filterwarnings("ignore::pytest.PytestUnhandledThreadExceptionWarning")

View File

@@ -15,9 +15,8 @@
# -f docker-compose.dev.yml up -d --wait
#
# This overlay:
# - Moves Vespa (index), both model servers, OpenSearch, MinIO,
# Redis (cache), and the background worker to profiles so they do
# not start by default
# - Moves Vespa (index), both model servers, code-interpreter, Redis (cache),
# and the background worker to profiles so they do not start by default
# - Makes depends_on references to removed services optional
# - Sets DISABLE_VECTOR_DB=true on the api_server
# - Uses PostgreSQL for caching and auth instead of Redis
@@ -28,8 +27,7 @@
# --profile inference Inference model server
# --profile background Background worker (Celery) — also needs redis
# --profile redis Redis cache
# --profile opensearch OpenSearch
# --profile s3-filestore MinIO (S3-compatible file store)
# --profile code-interpreter Code interpreter
# =============================================================================
name: onyx
@@ -40,9 +38,6 @@ services:
index:
condition: service_started
required: false
opensearch:
condition: service_started
required: false
cache:
condition: service_started
required: false
@@ -89,10 +84,4 @@ services:
inference_model_server:
profiles: ["inference"]
# OpenSearch is not needed in lite mode (no indexing).
opensearch:
profiles: ["opensearch"]
# MinIO is not needed in lite mode (Postgres handles file storage).
minio:
profiles: ["s3-filestore"]
code-interpreter: {}

View File

@@ -1,8 +1,8 @@
#!/bin/bash
set -euo pipefail
set -e
# Expected resource requirements (overridden below if --lite)
# Expected resource requirements
EXPECTED_DOCKER_RAM_GB=10
EXPECTED_DISK_GB=32
@@ -10,11 +10,6 @@ EXPECTED_DISK_GB=32
SHUTDOWN_MODE=false
DELETE_DATA_MODE=false
INCLUDE_CRAFT=false # Disabled by default, use --include-craft to enable
LITE_MODE=false # Disabled by default, use --lite to enable
USE_LOCAL_FILES=false # Disabled by default, use --local to skip downloading config files
NO_PROMPT=false
DRY_RUN=false
VERBOSE=false
while [[ $# -gt 0 ]]; do
case $1 in
@@ -30,26 +25,6 @@ while [[ $# -gt 0 ]]; do
INCLUDE_CRAFT=true
shift
;;
--lite)
LITE_MODE=true
shift
;;
--local)
USE_LOCAL_FILES=true
shift
;;
--no-prompt)
NO_PROMPT=true
shift
;;
--dry-run)
DRY_RUN=true
shift
;;
--verbose)
VERBOSE=true
shift
;;
--help|-h)
echo "Onyx Installation Script"
echo ""
@@ -57,23 +32,15 @@ while [[ $# -gt 0 ]]; do
echo ""
echo "Options:"
echo " --include-craft Enable Onyx Craft (AI-powered web app building)"
echo " --lite Deploy Onyx Lite (no Vespa, Redis, or model servers)"
echo " --local Use existing config files instead of downloading from GitHub"
echo " --shutdown Stop (pause) Onyx containers"
echo " --delete-data Remove all Onyx data (containers, volumes, and files)"
echo " --no-prompt Run non-interactively with defaults (for CI/automation)"
echo " --dry-run Show what would be done without making changes"
echo " --verbose Show detailed output for debugging"
echo " --help, -h Show this help message"
echo ""
echo "Examples:"
echo " $0 # Install Onyx"
echo " $0 --lite # Install Onyx Lite (minimal deployment)"
echo " $0 --include-craft # Install Onyx with Craft enabled"
echo " $0 --shutdown # Pause Onyx services"
echo " $0 --delete-data # Completely remove Onyx and all data"
echo " $0 --local # Re-run using existing config files on disk"
echo " $0 --no-prompt # Non-interactive install with defaults"
exit 0
;;
*)
@@ -84,129 +51,8 @@ while [[ $# -gt 0 ]]; do
esac
done
if [[ "$VERBOSE" = true ]]; then
set -x
fi
if [[ "$LITE_MODE" = true ]] && [[ "$INCLUDE_CRAFT" = true ]]; then
echo "ERROR: --lite and --include-craft cannot be used together."
echo "Craft requires services (Vespa, Redis, background workers) that lite mode disables."
exit 1
fi
# When --lite is passed as a flag, lower resource thresholds early (before the
# resource check). When lite is chosen interactively, the thresholds are adjusted
# inside the new-deployment flow, after the resource check has already passed
# with the standard thresholds — which is the safer direction.
if [[ "$LITE_MODE" = true ]]; then
EXPECTED_DOCKER_RAM_GB=4
EXPECTED_DISK_GB=16
fi
INSTALL_ROOT="${INSTALL_PREFIX:-onyx_data}"
LITE_COMPOSE_FILE="docker-compose.onyx-lite.yml"
# Build the -f flags for docker compose.
# Pass "true" as $1 to auto-detect a previously-downloaded lite overlay
# (used by shutdown/delete-data so users don't need to remember --lite).
# Without the argument, the lite overlay is only included when --lite was
# explicitly passed — preventing install/start from silently staying in
# lite mode just because the file exists on disk from a prior run.
compose_file_args() {
local auto_detect="${1:-false}"
local args="-f docker-compose.yml"
if [[ "$LITE_MODE" = true ]] || { [[ "$auto_detect" = true ]] && [[ -f "${INSTALL_ROOT}/deployment/${LITE_COMPOSE_FILE}" ]]; }; then
args="$args -f ${LITE_COMPOSE_FILE}"
fi
echo "$args"
}
# --- Downloader detection (curl with wget fallback) ---
DOWNLOADER=""
detect_downloader() {
if command -v curl &> /dev/null; then
DOWNLOADER="curl"
return 0
fi
if command -v wget &> /dev/null; then
DOWNLOADER="wget"
return 0
fi
echo "ERROR: Neither curl nor wget found. Please install one and retry."
exit 1
}
detect_downloader
download_file() {
local url="$1"
local output="$2"
if [[ "$DOWNLOADER" == "curl" ]]; then
curl -fsSL --retry 3 --retry-delay 2 --retry-connrefused -o "$output" "$url"
else
wget -q --tries=3 --timeout=20 -O "$output" "$url"
fi
}
# Ensures a required file is present. With --local, verifies the file exists on
# disk. Otherwise, downloads it from the given URL. Returns 0 on success, 1 on
# failure (caller should handle the exit).
ensure_file() {
local path="$1"
local url="$2"
local desc="$3"
if [[ "$USE_LOCAL_FILES" = true ]]; then
if [[ -f "$path" ]]; then
print_success "Using existing ${desc}"
return 0
fi
print_error "Required file missing: ${desc} (${path})"
return 1
fi
print_info "Downloading ${desc}..."
if download_file "$url" "$path" 2>/dev/null; then
print_success "${desc} downloaded"
return 0
fi
print_error "Failed to download ${desc}"
print_info "Please ensure you have internet connection and try again"
return 1
}
# --- Interactive prompt helpers ---
is_interactive() {
[[ "$NO_PROMPT" = false ]] && [[ -t 0 ]]
}
prompt_or_default() {
local prompt_text="$1"
local default_value="$2"
if is_interactive; then
read -p "$prompt_text" -r REPLY
if [[ -z "$REPLY" ]]; then
REPLY="$default_value"
fi
else
REPLY="$default_value"
fi
}
prompt_yn_or_default() {
local prompt_text="$1"
local default_value="$2"
if is_interactive; then
read -p "$prompt_text" -n 1 -r
echo ""
if [[ -z "$REPLY" ]]; then
REPLY="$default_value"
fi
else
REPLY="$default_value"
fi
}
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
@@ -265,7 +111,7 @@ if [ "$SHUTDOWN_MODE" = true ]; then
fi
# Stop containers (without removing them)
(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD $(compose_file_args true) stop)
(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD -f docker-compose.yml stop)
if [ $? -eq 0 ]; then
print_success "Onyx containers stopped (paused)"
else
@@ -294,17 +140,12 @@ if [ "$DELETE_DATA_MODE" = true ]; then
echo " • All downloaded files and configurations"
echo " • All user data and documents"
echo ""
if is_interactive; then
read -p "Are you sure you want to continue? Type 'DELETE' to confirm: " -r
echo ""
if [ "$REPLY" != "DELETE" ]; then
print_info "Operation cancelled."
exit 0
fi
else
print_error "Cannot confirm destructive operation in non-interactive mode."
print_info "Run interactively or remove the ${INSTALL_ROOT} directory manually."
exit 1
read -p "Are you sure you want to continue? Type 'DELETE' to confirm: " -r
echo ""
if [ "$REPLY" != "DELETE" ]; then
print_info "Operation cancelled."
exit 0
fi
print_info "Removing Onyx containers and volumes..."
@@ -323,7 +164,7 @@ if [ "$DELETE_DATA_MODE" = true ]; then
fi
# Stop and remove containers with volumes
(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD $(compose_file_args true) down -v)
(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD -f docker-compose.yml down -v)
if [ $? -eq 0 ]; then
print_success "Onyx containers and volumes removed"
else
@@ -345,117 +186,6 @@ if [ "$DELETE_DATA_MODE" = true ]; then
exit 0
fi
# --- Auto-install Docker (Linux only) ---
# Runs before the banner so a group-based re-exec doesn't repeat it.
install_docker_linux() {
local distro_id=""
if [[ -f /etc/os-release ]]; then
distro_id="$(. /etc/os-release && echo "${ID:-}")"
fi
case "$distro_id" in
amzn)
print_info "Detected Amazon Linux — installing Docker via package manager..."
if command -v dnf &> /dev/null; then
sudo dnf install -y docker
else
sudo yum install -y docker
fi
;;
*)
print_info "Installing Docker via get.docker.com..."
download_file "https://get.docker.com" /tmp/get-docker.sh
sudo sh /tmp/get-docker.sh
rm -f /tmp/get-docker.sh
;;
esac
sudo systemctl start docker 2>/dev/null || sudo service docker start 2>/dev/null || true
sudo systemctl enable docker 2>/dev/null || true
}
# Detect OS (including WSL)
IS_WSL=false
if [[ -n "${WSL_DISTRO_NAME:-}" ]] || grep -qi microsoft /proc/version 2>/dev/null; then
IS_WSL=true
fi
# Dry-run: show plan and exit
if [[ "$DRY_RUN" = true ]]; then
print_info "Dry run mode — showing what would happen:"
echo " • Install root: ${INSTALL_ROOT}"
echo " • Lite mode: ${LITE_MODE}"
echo " • Include Craft: ${INCLUDE_CRAFT}"
echo " • OS type: ${OSTYPE:-unknown} (WSL: ${IS_WSL})"
echo " • Downloader: ${DOWNLOADER}"
echo ""
print_success "Dry run complete (no changes made)"
exit 0
fi
if ! command -v docker &> /dev/null; then
if [[ "$OSTYPE" == "linux-gnu"* ]] || [[ -n "${WSL_DISTRO_NAME:-}" ]]; then
install_docker_linux
if ! command -v docker &> /dev/null; then
print_error "Docker installation failed."
echo " Visit: https://docs.docker.com/get-docker/"
exit 1
fi
print_success "Docker installed successfully"
fi
fi
# --- Auto-install Docker Compose plugin (Linux only) ---
if command -v docker &> /dev/null \
&& ! docker compose version &> /dev/null \
&& ! command -v docker-compose &> /dev/null \
&& { [[ "$OSTYPE" == "linux-gnu"* ]] || [[ -n "${WSL_DISTRO_NAME:-}" ]]; }; then
print_info "Docker Compose not found — installing plugin..."
COMPOSE_ARCH="$(uname -m)"
COMPOSE_URL="https://github.com/docker/compose/releases/latest/download/docker-compose-linux-${COMPOSE_ARCH}"
COMPOSE_DIR="/usr/local/lib/docker/cli-plugins"
COMPOSE_TMP="$(mktemp)"
sudo mkdir -p "$COMPOSE_DIR"
if download_file "$COMPOSE_URL" "$COMPOSE_TMP"; then
sudo mv "$COMPOSE_TMP" "$COMPOSE_DIR/docker-compose"
sudo chmod +x "$COMPOSE_DIR/docker-compose"
if docker compose version &> /dev/null; then
print_success "Docker Compose plugin installed"
else
print_error "Docker Compose plugin installed but not detected."
echo " Visit: https://docs.docker.com/compose/install/"
exit 1
fi
else
rm -f "$COMPOSE_TMP"
print_error "Failed to download Docker Compose plugin."
echo " Visit: https://docs.docker.com/compose/install/"
exit 1
fi
fi
# On Linux, ensure the current user can talk to the Docker daemon without
# sudo. If necessary, add them to the "docker" group and re-exec the
# script under that group so the rest of the install proceeds normally.
if command -v docker &> /dev/null \
&& { [[ "$OSTYPE" == "linux-gnu"* ]] || [[ -n "${WSL_DISTRO_NAME:-}" ]]; } \
&& [[ "$(id -u)" -ne 0 ]] \
&& ! docker info &> /dev/null; then
if [[ "${_ONYX_REEXEC:-}" = "1" ]]; then
print_error "Cannot connect to Docker after group re-exec."
print_info "Log out and back in, then run the script again."
exit 1
fi
if ! getent group docker &> /dev/null; then
sudo groupadd docker
fi
print_info "Adding $USER to the docker group..."
sudo usermod -aG docker "$USER"
print_info "Re-launching with docker group active..."
exec sg docker -c "_ONYX_REEXEC=1 bash $(printf '%q ' "$0" "$@")"
fi
# ASCII Art Banner
echo ""
echo -e "${BLUE}${BOLD}"
@@ -479,7 +209,8 @@ echo "2. Check your system resources (Docker, memory, disk space)"
echo "3. Guide you through deployment options (version, authentication)"
echo ""
if is_interactive; then
# Only prompt for acknowledgment if running interactively
if [ -t 0 ]; then
echo -e "${YELLOW}${BOLD}Please acknowledge and press Enter to continue...${NC}"
read -r
echo ""
@@ -529,35 +260,41 @@ else
exit 1
fi
# Returns 0 if $1 <= $2, 1 if $1 > $2
# Handles missing or non-numeric parts gracefully (treats them as 0)
# Function to compare version numbers
version_compare() {
local version1="${1:-0.0.0}"
local version2="${2:-0.0.0}"
# Returns 0 if $1 <= $2, 1 if $1 > $2
local version1=$1
local version2=$2
local v1_major v1_minor v1_patch v2_major v2_minor v2_patch
v1_major=$(echo "$version1" | cut -d. -f1)
v1_minor=$(echo "$version1" | cut -d. -f2)
v1_patch=$(echo "$version1" | cut -d. -f3)
v2_major=$(echo "$version2" | cut -d. -f1)
v2_minor=$(echo "$version2" | cut -d. -f2)
v2_patch=$(echo "$version2" | cut -d. -f3)
# Split versions into components
local v1_major=$(echo $version1 | cut -d. -f1)
local v1_minor=$(echo $version1 | cut -d. -f2)
local v1_patch=$(echo $version1 | cut -d. -f3)
# Default non-numeric or empty parts to 0
[[ "$v1_major" =~ ^[0-9]+$ ]] || v1_major=0
[[ "$v1_minor" =~ ^[0-9]+$ ]] || v1_minor=0
[[ "$v1_patch" =~ ^[0-9]+$ ]] || v1_patch=0
[[ "$v2_major" =~ ^[0-9]+$ ]] || v2_major=0
[[ "$v2_minor" =~ ^[0-9]+$ ]] || v2_minor=0
[[ "$v2_patch" =~ ^[0-9]+$ ]] || v2_patch=0
local v2_major=$(echo $version2 | cut -d. -f1)
local v2_minor=$(echo $version2 | cut -d. -f2)
local v2_patch=$(echo $version2 | cut -d. -f3)
if [ "$v1_major" -lt "$v2_major" ]; then return 0
elif [ "$v1_major" -gt "$v2_major" ]; then return 1; fi
# Compare major version
if [ "$v1_major" -lt "$v2_major" ]; then
return 0
elif [ "$v1_major" -gt "$v2_major" ]; then
return 1
fi
if [ "$v1_minor" -lt "$v2_minor" ]; then return 0
elif [ "$v1_minor" -gt "$v2_minor" ]; then return 1; fi
# Compare minor version
if [ "$v1_minor" -lt "$v2_minor" ]; then
return 0
elif [ "$v1_minor" -gt "$v2_minor" ]; then
return 1
fi
[ "$v1_patch" -le "$v2_patch" ]
# Compare patch version
if [ "$v1_patch" -le "$v2_patch" ]; then
return 0
else
return 1
fi
}
# Check Docker daemon
@@ -599,20 +336,10 @@ fi
# Convert to GB for display
if [ "$MEMORY_MB" -gt 0 ]; then
MEMORY_GB=$(awk "BEGIN {printf \"%.1f\", $MEMORY_MB / 1024}")
if [ "$(awk "BEGIN {print ($MEMORY_MB >= 1024)}")" = "1" ]; then
MEMORY_DISPLAY="~${MEMORY_GB}GB"
else
MEMORY_DISPLAY="${MEMORY_MB}MB"
fi
if [[ "$OSTYPE" == "darwin"* ]]; then
print_info "Docker memory allocation: ${MEMORY_DISPLAY}"
else
print_info "System memory: ${MEMORY_DISPLAY} (Docker uses host memory directly)"
fi
MEMORY_GB=$((MEMORY_MB / 1024))
print_info "Docker memory allocation: ~${MEMORY_GB}GB"
else
print_warning "Could not determine memory allocation"
MEMORY_DISPLAY="unknown"
print_warning "Could not determine Docker memory allocation"
MEMORY_MB=0
fi
@@ -631,7 +358,7 @@ RESOURCE_WARNING=false
EXPECTED_RAM_MB=$((EXPECTED_DOCKER_RAM_GB * 1024))
if [ "$MEMORY_MB" -gt 0 ] && [ "$MEMORY_MB" -lt "$EXPECTED_RAM_MB" ]; then
print_warning "Less than ${EXPECTED_DOCKER_RAM_GB}GB RAM available (found: ${MEMORY_DISPLAY})"
print_warning "Docker has less than ${EXPECTED_DOCKER_RAM_GB}GB RAM allocated (found: ~${MEMORY_GB}GB)"
RESOURCE_WARNING=true
fi
@@ -642,10 +369,10 @@ fi
if [ "$RESOURCE_WARNING" = true ]; then
echo ""
print_warning "Onyx recommends at least ${EXPECTED_DOCKER_RAM_GB}GB RAM and ${EXPECTED_DISK_GB}GB disk space for optimal performance in standard mode."
print_warning "Lite mode requires less resources (1-4GB RAM, 8-16GB disk depending on usage), but does not include a vector database."
print_warning "Onyx recommends at least ${EXPECTED_DOCKER_RAM_GB}GB RAM and ${EXPECTED_DISK_GB}GB disk space for optimal performance."
echo ""
read -p "Do you want to continue anyway? (y/N): " -n 1 -r
echo ""
prompt_yn_or_default "Do you want to continue anyway? (Y/n): " "y"
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
print_info "Installation cancelled. Please allocate more resources and try again."
exit 1
@@ -658,89 +385,117 @@ print_step "Creating directory structure"
if [ -d "${INSTALL_ROOT}" ]; then
print_info "Directory structure already exists"
print_success "Using existing ${INSTALL_ROOT} directory"
else
mkdir -p "${INSTALL_ROOT}/deployment"
mkdir -p "${INSTALL_ROOT}/data/nginx/local"
print_success "Directory structure created"
fi
mkdir -p "${INSTALL_ROOT}/deployment"
mkdir -p "${INSTALL_ROOT}/data/nginx/local"
print_success "Directory structure created"
# Ensure all required configuration files are present
# Download all required files
print_step "Downloading Onyx configuration files"
print_info "This step downloads all necessary configuration files from GitHub..."
echo ""
print_info "Downloading the following files:"
echo " • docker-compose.yml - Main Docker Compose configuration"
echo " • env.template - Environment variables template"
echo " • nginx/app.conf.template - Nginx web server configuration"
echo " • nginx/run-nginx.sh - Nginx startup script"
echo " • README.md - Documentation and setup instructions"
echo ""
# Download Docker Compose file
COMPOSE_FILE="${INSTALL_ROOT}/deployment/docker-compose.yml"
print_info "Downloading docker-compose.yml..."
if curl -fsSL -o "$COMPOSE_FILE" "${GITHUB_RAW_URL}/docker-compose.yml" 2>/dev/null; then
print_success "Docker Compose file downloaded successfully"
# Check if Docker Compose version is older than 2.24.0 and show warning
# Skip check for dev builds (assume they're recent enough)
if [ "$COMPOSE_VERSION" != "dev" ] && version_compare "$COMPOSE_VERSION" "2.24.0"; then
print_warning "Docker Compose version $COMPOSE_VERSION is older than 2.24.0"
echo ""
print_warning "The docker-compose.yml file uses the newer env_file format that requires Docker Compose 2.24.0 or later."
echo ""
print_info "To use this configuration with your current Docker Compose version, you have two options:"
echo ""
echo "1. Upgrade Docker Compose to version 2.24.0 or later (recommended)"
echo " Visit: https://docs.docker.com/compose/install/"
echo ""
echo "2. Manually replace all env_file sections in docker-compose.yml"
echo " Change from:"
echo " env_file:"
echo " - path: .env"
echo " required: false"
echo " To:"
echo " env_file: .env"
echo ""
print_warning "The installation will continue, but may fail if Docker Compose cannot parse the file."
echo ""
read -p "Do you want to continue anyway? (y/N): " -n 1 -r
echo ""
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
print_info "Installation cancelled. Please upgrade Docker Compose or manually edit the docker-compose.yml file."
exit 1
fi
print_info "Proceeding with installation despite Docker Compose version compatibility issues..."
fi
else
print_error "Failed to download Docker Compose file"
print_info "Please ensure you have internet connection and try again"
exit 1
fi
# Download env.template file
ENV_TEMPLATE="${INSTALL_ROOT}/deployment/env.template"
print_info "Downloading env.template..."
if curl -fsSL -o "$ENV_TEMPLATE" "${GITHUB_RAW_URL}/env.template" 2>/dev/null; then
print_success "Environment template downloaded successfully"
else
print_error "Failed to download env.template"
print_info "Please ensure you have internet connection and try again"
exit 1
fi
# Download nginx config files
NGINX_BASE_URL="https://raw.githubusercontent.com/onyx-dot-app/onyx/main/deployment/data/nginx"
if [[ "$USE_LOCAL_FILES" = true ]]; then
print_step "Verifying existing configuration files"
# Download app.conf.template
NGINX_CONFIG="${INSTALL_ROOT}/data/nginx/app.conf.template"
print_info "Downloading nginx configuration template..."
if curl -fsSL -o "$NGINX_CONFIG" "$NGINX_BASE_URL/app.conf.template" 2>/dev/null; then
print_success "Nginx configuration template downloaded"
else
print_step "Downloading Onyx configuration files"
print_info "This step downloads all necessary configuration files from GitHub..."
print_error "Failed to download nginx configuration template"
print_info "Please ensure you have internet connection and try again"
exit 1
fi
ensure_file "${INSTALL_ROOT}/deployment/docker-compose.yml" \
"${GITHUB_RAW_URL}/docker-compose.yml" "docker-compose.yml" || exit 1
# Check Docker Compose version compatibility after obtaining docker-compose.yml
if [ "$COMPOSE_VERSION" != "dev" ] && version_compare "$COMPOSE_VERSION" "2.24.0"; then
print_warning "Docker Compose version $COMPOSE_VERSION is older than 2.24.0"
echo ""
print_warning "The docker-compose.yml file uses the newer env_file format that requires Docker Compose 2.24.0 or later."
echo ""
print_info "To use this configuration with your current Docker Compose version, you have two options:"
echo ""
echo "1. Upgrade Docker Compose to version 2.24.0 or later (recommended)"
echo " Visit: https://docs.docker.com/compose/install/"
echo ""
echo "2. Manually replace all env_file sections in docker-compose.yml"
echo " Change from:"
echo " env_file:"
echo " - path: .env"
echo " required: false"
echo " To:"
echo " env_file: .env"
echo ""
print_warning "The installation will continue, but may fail if Docker Compose cannot parse the file."
echo ""
prompt_yn_or_default "Do you want to continue anyway? (Y/n): " "y"
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
print_info "Installation cancelled. Please upgrade Docker Compose or manually edit the docker-compose.yml file."
exit 1
fi
print_info "Proceeding with installation despite Docker Compose version compatibility issues..."
# Download run-nginx.sh script
NGINX_RUN_SCRIPT="${INSTALL_ROOT}/data/nginx/run-nginx.sh"
print_info "Downloading nginx startup script..."
if curl -fsSL -o "$NGINX_RUN_SCRIPT" "$NGINX_BASE_URL/run-nginx.sh" 2>/dev/null; then
chmod +x "$NGINX_RUN_SCRIPT"
print_success "Nginx startup script downloaded and made executable"
else
print_error "Failed to download nginx startup script"
print_info "Please ensure you have internet connection and try again"
exit 1
fi
# Handle lite overlay: ensure it if --lite, clean up stale copies otherwise
if [[ "$LITE_MODE" = true ]]; then
ensure_file "${INSTALL_ROOT}/deployment/${LITE_COMPOSE_FILE}" \
"${GITHUB_RAW_URL}/${LITE_COMPOSE_FILE}" "${LITE_COMPOSE_FILE}" || exit 1
elif [[ -f "${INSTALL_ROOT}/deployment/${LITE_COMPOSE_FILE}" ]]; then
if [[ -f "${INSTALL_ROOT}/deployment/.env" ]]; then
print_warning "Existing lite overlay found but --lite was not passed."
prompt_yn_or_default "Remove lite overlay and switch to standard mode? (y/N): " "n"
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
print_info "Keeping existing lite overlay. Pass --lite to keep using lite mode."
LITE_MODE=true
else
rm -f "${INSTALL_ROOT}/deployment/${LITE_COMPOSE_FILE}"
print_info "Removed lite overlay (switching to standard mode)"
fi
else
rm -f "${INSTALL_ROOT}/deployment/${LITE_COMPOSE_FILE}"
print_info "Removed previous lite overlay (switching to standard mode)"
fi
# Download README file
README_FILE="${INSTALL_ROOT}/README.md"
print_info "Downloading README.md..."
if curl -fsSL -o "$README_FILE" "${GITHUB_RAW_URL}/README.md" 2>/dev/null; then
print_success "README.md downloaded successfully"
else
print_error "Failed to download README.md"
print_info "Please ensure you have internet connection and try again"
exit 1
fi
ensure_file "${INSTALL_ROOT}/deployment/env.template" \
"${GITHUB_RAW_URL}/env.template" "env.template" || exit 1
ensure_file "${INSTALL_ROOT}/data/nginx/app.conf.template" \
"$NGINX_BASE_URL/app.conf.template" "nginx/app.conf.template" || exit 1
ensure_file "${INSTALL_ROOT}/data/nginx/run-nginx.sh" \
"$NGINX_BASE_URL/run-nginx.sh" "nginx/run-nginx.sh" || exit 1
chmod +x "${INSTALL_ROOT}/data/nginx/run-nginx.sh"
ensure_file "${INSTALL_ROOT}/README.md" \
"${GITHUB_RAW_URL}/README.md" "README.md" || exit 1
# Create empty local directory marker (if needed)
touch "${INSTALL_ROOT}/data/nginx/local/.gitkeep"
print_success "All configuration files ready"
print_success "All configuration files downloaded successfully"
# Set up deployment configuration
print_step "Setting up deployment configs"
@@ -758,7 +513,7 @@ if [ -d "${INSTALL_ROOT}/deployment" ] && [ -f "${INSTALL_ROOT}/deployment/docke
if [ -n "$COMPOSE_CMD" ]; then
# Check if any containers are running
RUNNING_CONTAINERS=$(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD $(compose_file_args true) ps -q 2>/dev/null | wc -l)
RUNNING_CONTAINERS=$(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD -f docker-compose.yml ps -q 2>/dev/null | wc -l)
if [ "$RUNNING_CONTAINERS" -gt 0 ]; then
print_error "Onyx services are currently running!"
echo ""
@@ -779,7 +534,7 @@ if [ -f "$ENV_FILE" ]; then
echo "• Press Enter to restart with current configuration"
echo "• Type 'update' to update to a newer version"
echo ""
prompt_or_default "Choose an option [default: restart]: " ""
read -p "Choose an option [default: restart]: " -r
echo ""
if [ "$REPLY" = "update" ]; then
@@ -788,30 +543,26 @@ if [ -f "$ENV_FILE" ]; then
echo "• Press Enter for latest (recommended)"
echo "• Type a specific tag (e.g., v0.1.0)"
echo ""
# If --include-craft was passed, default to craft-latest
if [ "$INCLUDE_CRAFT" = true ]; then
prompt_or_default "Enter tag [default: craft-latest]: " "craft-latest"
VERSION="$REPLY"
read -p "Enter tag [default: craft-latest]: " -r VERSION
else
prompt_or_default "Enter tag [default: latest]: " "latest"
VERSION="$REPLY"
read -p "Enter tag [default: latest]: " -r VERSION
fi
echo ""
if [ "$INCLUDE_CRAFT" = true ] && [ "$VERSION" = "craft-latest" ]; then
print_info "Selected: craft-latest (Craft enabled)"
elif [ "$VERSION" = "latest" ]; then
print_info "Selected: Latest version"
if [ -z "$VERSION" ]; then
if [ "$INCLUDE_CRAFT" = true ]; then
VERSION="craft-latest"
print_info "Selected: craft-latest (Craft enabled)"
else
VERSION="latest"
print_info "Selected: Latest version"
fi
else
print_info "Selected: $VERSION"
fi
# Reject craft image tags when running in lite mode
if [[ "$LITE_MODE" = true ]] && [[ "${VERSION:-}" == craft-* ]]; then
print_error "Cannot use a craft image tag (${VERSION}) with --lite."
print_info "Craft requires services (Vespa, Redis, background workers) that lite mode disables."
exit 1
fi
# Update .env file with new version
print_info "Updating configuration for version $VERSION..."
if grep -q "^IMAGE_TAG=" "$ENV_FILE"; then
@@ -830,67 +581,13 @@ if [ -f "$ENV_FILE" ]; then
fi
print_success "Configuration updated for upgrade"
else
# Reject restarting a craft deployment in lite mode
EXISTING_TAG=$(grep "^IMAGE_TAG=" "$ENV_FILE" | head -1 | cut -d'=' -f2 | tr -d ' "'"'"'')
if [[ "$LITE_MODE" = true ]] && [[ "${EXISTING_TAG:-}" == craft-* ]]; then
print_error "Cannot restart a craft deployment (${EXISTING_TAG}) with --lite."
print_info "Craft requires services (Vespa, Redis, background workers) that lite mode disables."
exit 1
fi
print_info "Keeping existing configuration..."
print_success "Will restart with current settings"
fi
# Ensure COMPOSE_PROFILES is cleared when running in lite mode on an
# existing .env (the template ships with s3-filestore enabled).
if [[ "$LITE_MODE" = true ]] && grep -q "^COMPOSE_PROFILES=.*s3-filestore" "$ENV_FILE" 2>/dev/null; then
sed -i.bak 's/^COMPOSE_PROFILES=.*/COMPOSE_PROFILES=/' "$ENV_FILE" 2>/dev/null || true
print_success "Cleared COMPOSE_PROFILES for lite mode"
fi
else
print_info "No existing .env file found. Setting up new deployment..."
echo ""
# Ask for deployment mode (standard vs lite) unless already set via --lite flag
if [[ "$LITE_MODE" = false ]]; then
print_info "Which deployment mode would you like?"
echo ""
echo " 1) Standard - Full deployment with search, connectors, and RAG"
echo " 2) Lite - Minimal deployment (no Vespa, Redis, or model servers)"
echo " LLM chat, tools, file uploads, and Projects still work"
echo ""
prompt_or_default "Choose a mode (1 or 2) [default: 1]: " "1"
echo ""
case "$REPLY" in
2)
LITE_MODE=true
print_info "Selected: Lite mode"
ensure_file "${INSTALL_ROOT}/deployment/${LITE_COMPOSE_FILE}" \
"${GITHUB_RAW_URL}/${LITE_COMPOSE_FILE}" "${LITE_COMPOSE_FILE}" || exit 1
;;
*)
print_info "Selected: Standard mode"
;;
esac
else
print_info "Deployment mode: Lite (set via --lite flag)"
fi
# Validate lite + craft combination (could now be set interactively)
if [[ "$LITE_MODE" = true ]] && [[ "$INCLUDE_CRAFT" = true ]]; then
print_error "--include-craft cannot be used with Lite mode."
print_info "Craft requires services (Vespa, Redis, background workers) that lite mode disables."
exit 1
fi
# Adjust resource expectations for lite mode
if [[ "$LITE_MODE" = true ]]; then
EXPECTED_DOCKER_RAM_GB=4
EXPECTED_DISK_GB=16
fi
# Ask for version
print_info "Which tag would you like to deploy?"
echo ""
@@ -898,21 +595,23 @@ else
echo "• Press Enter for craft-latest (recommended for Craft)"
echo "• Type a specific tag (e.g., craft-v1.0.0)"
echo ""
prompt_or_default "Enter tag [default: craft-latest]: " "craft-latest"
VERSION="$REPLY"
read -p "Enter tag [default: craft-latest]: " -r VERSION
else
echo "• Press Enter for latest (recommended)"
echo "• Type a specific tag (e.g., v0.1.0)"
echo ""
prompt_or_default "Enter tag [default: latest]: " "latest"
VERSION="$REPLY"
read -p "Enter tag [default: latest]: " -r VERSION
fi
echo ""
if [ "$INCLUDE_CRAFT" = true ] && [ "$VERSION" = "craft-latest" ]; then
print_info "Selected: craft-latest (Craft enabled)"
elif [ "$VERSION" = "latest" ]; then
print_info "Selected: Latest tag"
if [ -z "$VERSION" ]; then
if [ "$INCLUDE_CRAFT" = true ]; then
VERSION="craft-latest"
print_info "Selected: craft-latest (Craft enabled)"
else
VERSION="latest"
print_info "Selected: Latest tag"
fi
else
print_info "Selected: $VERSION"
fi
@@ -946,13 +645,6 @@ else
# Use basic auth by default
AUTH_SCHEMA="basic"
# Reject craft image tags when running in lite mode (must check before writing .env)
if [[ "$LITE_MODE" = true ]] && [[ "${VERSION:-}" == craft-* ]]; then
print_error "Cannot use a craft image tag (${VERSION}) with --lite."
print_info "Craft requires services (Vespa, Redis, background workers) that lite mode disables."
exit 1
fi
# Create .env file from template
print_info "Creating .env file with your selections..."
cp "$ENV_TEMPLATE" "$ENV_FILE"
@@ -962,13 +654,6 @@ else
sed -i.bak "s/^IMAGE_TAG=.*/IMAGE_TAG=$VERSION/" "$ENV_FILE"
print_success "IMAGE_TAG set to $VERSION"
# In lite mode, clear COMPOSE_PROFILES so profiled services (MinIO, etc.)
# stay disabled — the template ships with s3-filestore enabled by default.
if [[ "$LITE_MODE" = true ]]; then
sed -i.bak 's/^COMPOSE_PROFILES=.*/COMPOSE_PROFILES=/' "$ENV_FILE" 2>/dev/null || true
print_success "Cleared COMPOSE_PROFILES for lite mode"
fi
# Configure basic authentication (default)
sed -i.bak 's/^AUTH_TYPE=.*/AUTH_TYPE=basic/' "$ENV_FILE" 2>/dev/null || true
print_success "Basic authentication enabled in configuration"
@@ -1089,7 +774,7 @@ print_step "Pulling Docker images"
print_info "This may take several minutes depending on your internet connection..."
echo ""
print_info "Downloading Docker images (this may take a while)..."
(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD $(compose_file_args) pull --quiet)
(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD -f docker-compose.yml pull --quiet)
if [ $? -eq 0 ]; then
print_success "Docker images downloaded successfully"
else
@@ -1103,9 +788,9 @@ print_info "Launching containers..."
echo ""
if [ "$USE_LATEST" = true ]; then
print_info "Force pulling latest images and recreating containers..."
(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD $(compose_file_args) up -d --pull always --force-recreate)
(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD -f docker-compose.yml up -d --pull always --force-recreate)
else
(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD $(compose_file_args) up -d)
(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD -f docker-compose.yml up -d)
fi
if [ $? -ne 0 ]; then
print_error "Failed to start Onyx services"
@@ -1127,7 +812,7 @@ echo ""
# Check for restart loops
print_info "Checking container health status..."
RESTART_ISSUES=false
CONTAINERS=$(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD $(compose_file_args) ps -q 2>/dev/null)
CONTAINERS=$(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD -f docker-compose.yml ps -q 2>/dev/null)
for CONTAINER in $CONTAINERS; do
PROJECT_NAME="$(basename "$INSTALL_ROOT")_deployment_"
@@ -1156,7 +841,7 @@ if [ "$RESTART_ISSUES" = true ]; then
print_error "Some containers are experiencing issues!"
echo ""
print_info "Please check the logs for more information:"
echo " (cd \"${INSTALL_ROOT}/deployment\" && $COMPOSE_CMD $(compose_file_args) logs)"
echo " (cd \"${INSTALL_ROOT}/deployment\" && $COMPOSE_CMD -f docker-compose.yml logs)"
echo ""
print_info "If the issue persists, please contact: founders@onyx.app"
@@ -1175,12 +860,8 @@ check_onyx_health() {
echo ""
while [ $attempt -le $max_attempts ]; do
local http_code=""
if [[ "$DOWNLOADER" == "curl" ]]; then
http_code=$(curl -s -o /dev/null -w "%{http_code}" "http://localhost:$port" 2>/dev/null || echo "000")
else
http_code=$(wget -q --spider -S "http://localhost:$port" 2>&1 | grep "HTTP/" | tail -1 | awk '{print $2}' || echo "000")
fi
# Check for successful HTTP responses (200, 301, 302, etc.)
local http_code=$(curl -s -o /dev/null -w "%{http_code}" "http://localhost:$port")
if echo "$http_code" | grep -qE "^(200|301|302|303|307|308)$"; then
return 0
fi
@@ -1236,18 +917,6 @@ print_info "If authentication is enabled, you can create your admin account here
echo " • Visit http://localhost:${HOST_PORT}/auth/signup to create your admin account"
echo " • The first user created will automatically have admin privileges"
echo ""
if [[ "$LITE_MODE" = true ]]; then
echo ""
print_info "Running in Lite mode — the following services are NOT started:"
echo " • Vespa (vector database)"
echo " • Redis (cache)"
echo " • Model servers (embedding/inference)"
echo " • Background workers (Celery)"
echo ""
print_info "Connectors and RAG search are disabled. LLM chat, tools, user file"
print_info "uploads, Projects, Agent knowledge, and code interpreter still work."
fi
echo ""
print_info "Refer to the README in the ${INSTALL_ROOT} directory for more information."
echo ""
print_info "For help or issues, contact: founders@onyx.app"

6
uv.lock generated
View File

@@ -453,14 +453,14 @@ wheels = [
[[package]]
name = "authlib"
version = "1.6.9"
version = "1.6.7"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cryptography" },
]
sdist = { url = "https://files.pythonhosted.org/packages/af/98/00d3dd826d46959ad8e32af2dbb2398868fd9fd0683c26e56d0789bd0e68/authlib-1.6.9.tar.gz", hash = "sha256:d8f2421e7e5980cc1ddb4e32d3f5fa659cfaf60d8eaf3281ebed192e4ab74f04", size = 165134, upload-time = "2026-03-02T07:44:01.998Z" }
sdist = { url = "https://files.pythonhosted.org/packages/49/dc/ed1681bf1339dd6ea1ce56136bad4baabc6f7ad466e375810702b0237047/authlib-1.6.7.tar.gz", hash = "sha256:dbf10100011d1e1b34048c9d120e83f13b35d69a826ae762b93d2fb5aafc337b", size = 164950, upload-time = "2026-02-06T14:04:14.171Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/53/23/b65f568ed0c22f1efacb744d2db1a33c8068f384b8c9b482b52ebdbc3ef6/authlib-1.6.9-py2.py3-none-any.whl", hash = "sha256:f08b4c14e08f0861dc18a32357b33fbcfd2ea86cfe3fe149484b4d764c4a0ac3", size = 244197, upload-time = "2026-03-02T07:44:00.307Z" },
{ url = "https://files.pythonhosted.org/packages/f8/00/3ed12264094ec91f534fae429945efbaa9f8c666f3aa7061cc3b2a26a0cd/authlib-1.6.7-py2.py3-none-any.whl", hash = "sha256:c637340d9a02789d2efa1d003a7437d10d3e565237bcb5fcbc6c134c7b95bab0", size = 244115, upload-time = "2026-02-06T14:04:12.141Z" },
]
[[package]]

View File

@@ -391,13 +391,11 @@ export default function SearchUI({ onDocumentClick }: SearchResultsProps) {
{/* ── Bottom row: Pagination ── */}
{!showEmpty && (
<Section height="fit">
<Pagination
currentPage={currentPage}
totalPages={totalPages}
onChange={setCurrentPage}
/>
</Section>
<Pagination
currentPage={currentPage}
totalPages={totalPages}
onChange={setCurrentPage}
/>
)}
</div>
);

View File

@@ -1,7 +1,5 @@
import { useState, useRef, useCallback, useEffect } from "react";
import { IS_DEV } from "@/lib/constants";
// Target format for OpenAI Realtime API
const TARGET_SAMPLE_RATE = 24000;
const CHUNK_INTERVAL_MS = 250;
@@ -247,8 +245,9 @@ class VoiceRecorderSession {
const { token } = await tokenResponse.json();
const protocol = window.location.protocol === "https:" ? "wss:" : "ws:";
const host = IS_DEV ? "localhost:8080" : window.location.host;
const path = IS_DEV
const isDev = window.location.port === "3000";
const host = isDev ? "localhost:8080" : window.location.host;
const path = isDev
? "/voice/transcribe/stream"
: "/api/voice/transcribe/stream";
return `${protocol}//${host}${path}?token=${encodeURIComponent(token)}`;

View File

@@ -176,10 +176,7 @@ function AttachmentItemLayout({
<Section flexDirection="row" gap={0.25} padding={0.25}>
<div className={cn("h-[2.25rem] aspect-square rounded-08")}>
<Section>
<div
className="attachment-button__icon-wrapper"
data-testid="attachment-item-icon-wrapper"
>
<div className="attachment-button__icon-wrapper">
<Icon className="attachment-button__icon" />
</div>
</Section>
@@ -190,7 +187,7 @@ function AttachmentItemLayout({
alignItems="center"
gap={1.5}
>
<div data-testid="attachment-item-title" className="flex-1 min-w-0">
<div className="flex-1 min-w-0">
<Content
title={title}
description={description}

View File

@@ -1,5 +1,3 @@
export const IS_DEV = process.env.NODE_ENV === "development";
export enum AuthType {
BASIC = "basic",
GOOGLE_OAUTH = "google_oauth",

View File

@@ -3,8 +3,6 @@
* Plays audio chunks as they arrive for smooth, low-latency playback.
*/
import { IS_DEV } from "@/lib/constants";
/**
* HTTPStreamingTTSPlayer - Uses HTTP streaming with MediaSource Extensions
* for smooth, gapless audio playback. This is the recommended approach for
@@ -384,8 +382,9 @@ export class WebSocketStreamingTTSPlayer {
const { token } = await tokenResponse.json();
const protocol = window.location.protocol === "https:" ? "wss:" : "ws:";
const host = IS_DEV ? "localhost:8080" : window.location.host;
const path = IS_DEV
const isDev = window.location.port === "3000";
const host = isDev ? "localhost:8080" : window.location.host;
const path = isDev
? "/voice/synthesize/stream"
: "/api/voice/synthesize/stream";
return `${protocol}//${host}${path}?token=${encodeURIComponent(token)}`;

View File

@@ -4,7 +4,6 @@ import { cn } from "@/lib/utils";
import { Button, Pagination } from "@opal/components";
import Text from "@/refresh-components/texts/Text";
import { useTableSize } from "@/refresh-components/table/TableSizeContext";
import type { TableSize } from "@/refresh-components/table/TableSizeContext";
import { SvgEye, SvgXCircle } from "@opal/icons";
import type { ReactNode } from "react";
@@ -41,8 +40,6 @@ interface FooterSelectionModeProps {
totalPages: number;
/** Called when the user navigates to a different page. */
onPageChange: (page: number) => void;
/** Controls overall footer sizing. `"regular"` (default) or `"small"`. */
size?: TableSize;
className?: string;
}
@@ -144,8 +141,8 @@ export default function Footer(props: FooterProps) {
currentPage={props.currentPage}
totalPages={props.totalPages}
onChange={props.onPageChange}
units="items"
size={isSmall ? "sm" : "md"}
units={props.totalItems === 1 ? "item" : "items"}
size={isSmall ? "md" : "lg"}
/>
) : (
<Pagination

View File

@@ -1,340 +0,0 @@
"use client";
import { useState } from "react";
import { Button } from "@opal/components";
import { SvgUserPlus, SvgUserX, SvgXCircle, SvgKey } from "@opal/icons";
import { Disabled } from "@opal/core";
import ConfirmationModalLayout from "@/refresh-components/layouts/ConfirmationModalLayout";
import Text from "@/refresh-components/texts/Text";
import { toast } from "@/hooks/useToast";
import {
deactivateUser,
activateUser,
deleteUser,
cancelInvite,
resetPassword,
} from "./svc";
// ---------------------------------------------------------------------------
// Shared helper
// ---------------------------------------------------------------------------
async function runAction(
action: () => Promise<void>,
successMessage: string,
onDone: () => void,
setIsSubmitting: (v: boolean) => void
) {
setIsSubmitting(true);
try {
await action();
onDone();
toast.success(successMessage);
} catch (err) {
toast.error(err instanceof Error ? err.message : "An error occurred");
} finally {
setIsSubmitting(false);
}
}
// ---------------------------------------------------------------------------
// Cancel Invite Modal
// ---------------------------------------------------------------------------
interface CancelInviteModalProps {
email: string;
onClose: () => void;
onMutate: () => void;
}
export function CancelInviteModal({
email,
onClose,
onMutate,
}: CancelInviteModalProps) {
const [isSubmitting, setIsSubmitting] = useState(false);
return (
<ConfirmationModalLayout
icon={(props) => (
<SvgUserX {...props} className="text-action-danger-05" />
)}
title="Cancel Invite"
onClose={isSubmitting ? undefined : onClose}
submit={
<Disabled disabled={isSubmitting}>
<Button
variant="danger"
onClick={() =>
runAction(
() => cancelInvite(email),
"Invite cancelled",
() => {
onMutate();
onClose();
},
setIsSubmitting
)
}
>
Cancel Invite
</Button>
</Disabled>
}
>
<Text as="p" text03>
<Text as="span" text05>
{email}
</Text>{" "}
will no longer be able to join Onyx with this invite.
</Text>
</ConfirmationModalLayout>
);
}
// ---------------------------------------------------------------------------
// Deactivate User Modal
// ---------------------------------------------------------------------------
interface DeactivateUserModalProps {
email: string;
onClose: () => void;
onMutate: () => void;
}
export function DeactivateUserModal({
email,
onClose,
onMutate,
}: DeactivateUserModalProps) {
const [isSubmitting, setIsSubmitting] = useState(false);
return (
<ConfirmationModalLayout
icon={(props) => (
<SvgUserX {...props} className="text-action-danger-05" />
)}
title="Deactivate User"
onClose={isSubmitting ? undefined : onClose}
submit={
<Disabled disabled={isSubmitting}>
<Button
variant="danger"
onClick={() =>
runAction(
() => deactivateUser(email),
"User deactivated",
() => {
onMutate();
onClose();
},
setIsSubmitting
)
}
>
Deactivate
</Button>
</Disabled>
}
>
<Text as="p" text03>
<Text as="span" text05>
{email}
</Text>{" "}
will immediately lose access to Onyx. Their sessions and agents will be
preserved. Their license seat will be freed. You can reactivate this
account later.
</Text>
</ConfirmationModalLayout>
);
}
// ---------------------------------------------------------------------------
// Activate User Modal
// ---------------------------------------------------------------------------
interface ActivateUserModalProps {
email: string;
onClose: () => void;
onMutate: () => void;
}
export function ActivateUserModal({
email,
onClose,
onMutate,
}: ActivateUserModalProps) {
const [isSubmitting, setIsSubmitting] = useState(false);
return (
<ConfirmationModalLayout
icon={SvgUserPlus}
title="Activate User"
onClose={isSubmitting ? undefined : onClose}
submit={
<Disabled disabled={isSubmitting}>
<Button
onClick={() =>
runAction(
() => activateUser(email),
"User activated",
() => {
onMutate();
onClose();
},
setIsSubmitting
)
}
>
Activate
</Button>
</Disabled>
}
>
<Text as="p" text03>
<Text as="span" text05>
{email}
</Text>{" "}
will regain access to Onyx.
</Text>
</ConfirmationModalLayout>
);
}
// ---------------------------------------------------------------------------
// Delete User Modal
// ---------------------------------------------------------------------------
interface DeleteUserModalProps {
email: string;
onClose: () => void;
onMutate: () => void;
}
export function DeleteUserModal({
email,
onClose,
onMutate,
}: DeleteUserModalProps) {
const [isSubmitting, setIsSubmitting] = useState(false);
return (
<ConfirmationModalLayout
icon={(props) => (
<SvgUserX {...props} className="text-action-danger-05" />
)}
title="Delete User"
onClose={isSubmitting ? undefined : onClose}
submit={
<Disabled disabled={isSubmitting}>
<Button
variant="danger"
onClick={() =>
runAction(
() => deleteUser(email),
"User deleted",
() => {
onMutate();
onClose();
},
setIsSubmitting
)
}
>
Delete
</Button>
</Disabled>
}
>
<Text as="p" text03>
<Text as="span" text05>
{email}
</Text>{" "}
will be permanently removed from Onyx. All of their session history will
be deleted. Deletion cannot be undone.
</Text>
</ConfirmationModalLayout>
);
}
// ---------------------------------------------------------------------------
// Reset Password Modal
// ---------------------------------------------------------------------------
interface ResetPasswordModalProps {
email: string;
onClose: () => void;
}
export function ResetPasswordModal({
email,
onClose,
}: ResetPasswordModalProps) {
const [isSubmitting, setIsSubmitting] = useState(false);
const [newPassword, setNewPassword] = useState<string | null>(null);
const handleClose = () => {
onClose();
setNewPassword(null);
};
return (
<ConfirmationModalLayout
icon={SvgKey}
title={newPassword ? "Password Reset" : "Reset Password"}
onClose={isSubmitting ? undefined : handleClose}
submit={
newPassword ? (
<Button onClick={handleClose}>Done</Button>
) : (
<Disabled disabled={isSubmitting}>
<Button
variant="danger"
onClick={async () => {
setIsSubmitting(true);
try {
const result = await resetPassword(email);
setNewPassword(result.new_password);
} catch (err) {
toast.error(
err instanceof Error
? err.message
: "Failed to reset password"
);
} finally {
setIsSubmitting(false);
}
}}
>
Reset Password
</Button>
</Disabled>
)
}
>
{newPassword ? (
<div className="flex flex-col gap-2">
<Text as="p" text03>
The password for{" "}
<Text as="span" text05>
{email}
</Text>{" "}
has been reset. Copy the new password below it will not be shown
again.
</Text>
<code className="rounded-sm bg-background-neutral-02 px-3 py-2 text-sm select-all">
{newPassword}
</code>
</div>
) : (
<Text as="p" text03>
This will generate a new random password for{" "}
<Text as="span" text05>
{email}
</Text>
. Their current password will stop working immediately.
</Text>
)}
</ConfirmationModalLayout>
);
}

View File

@@ -166,7 +166,7 @@ export default function UserFilters({
<Popover>
<Popover.Trigger asChild>
<FilterButton
aria-label="Filter by role"
data-testid="filter-role"
leftIcon={SvgUsers}
active={hasRoleFilter}
onClear={() => onRolesChange([])}
@@ -214,7 +214,7 @@ export default function UserFilters({
>
<Popover.Trigger asChild>
<FilterButton
aria-label="Filter by group"
data-testid="filter-group"
leftIcon={SvgUsers}
active={hasGroupFilter}
onClear={() => onGroupsChange([])}
@@ -269,7 +269,7 @@ export default function UserFilters({
<Popover>
<Popover.Trigger asChild>
<FilterButton
aria-label="Filter by status"
data-testid="filter-status"
leftIcon={SvgUsers}
active={hasStatusFilter}
onClear={() => onStatusesChange([])}

View File

@@ -14,19 +14,20 @@ import {
import { Disabled } from "@opal/core";
import Popover from "@/refresh-components/Popover";
import Separator from "@/refresh-components/Separator";
import ConfirmationModalLayout from "@/refresh-components/layouts/ConfirmationModalLayout";
import { Section } from "@/layouts/general-layouts";
import Text from "@/refresh-components/texts/Text";
import { UserStatus } from "@/lib/types";
import { toast } from "@/hooks/useToast";
import { approveRequest } from "./svc";
import EditUserModal from "./EditUserModal";
import {
CancelInviteModal,
DeactivateUserModal,
ActivateUserModal,
DeleteUserModal,
ResetPasswordModal,
} from "./UserActionModals";
deactivateUser,
activateUser,
deleteUser,
cancelInvite,
approveRequest,
resetPassword,
} from "./svc";
import EditUserModal from "./EditUserModal";
import type { UserRow } from "./interfaces";
// ---------------------------------------------------------------------------
@@ -57,19 +58,31 @@ export default function UserRowActions({
}: UserRowActionsProps) {
const [modal, setModal] = useState<Modal | null>(null);
const [popoverOpen, setPopoverOpen] = useState(false);
const [isSubmitting, setIsSubmitting] = useState(false);
const [newPassword, setNewPassword] = useState<string | null>(null);
async function handleAction(
action: () => Promise<void>,
successMessage: string
) {
setIsSubmitting(true);
try {
await action();
onMutate();
toast.success(successMessage);
setModal(null);
} catch (err) {
toast.error(err instanceof Error ? err.message : "An error occurred");
} finally {
setIsSubmitting(false);
}
}
const openModal = (type: Modal) => {
setPopoverOpen(false);
setModal(type);
};
const closeModal = () => setModal(null);
const closeAndMutate = () => {
setModal(null);
onMutate();
};
// Status-aware action menus
const actionButtons = (() => {
// SCIM-managed users get limited actions — most changes would be
@@ -119,17 +132,10 @@ export default function UserRowActions({
icon={SvgUserCheck}
onClick={() => {
setPopoverOpen(false);
void (async () => {
try {
await approveRequest(user.email);
onMutate();
toast.success("Request approved");
} catch (err) {
toast.error(
err instanceof Error ? err.message : "An error occurred"
);
}
})();
handleAction(
() => approveRequest(user.email),
"Request approved"
);
}}
>
Approve
@@ -170,23 +176,6 @@ export default function UserRowActions({
case UserStatus.INACTIVE:
return (
<>
{user.id && (
<Button
prominence="tertiary"
icon={SvgUsers}
onClick={() => openModal(Modal.EDIT_GROUPS)}
>
Groups &amp; Roles
</Button>
)}
<Button
prominence="tertiary"
icon={SvgKey}
onClick={() => openModal(Modal.RESET_PASSWORD)}
>
Reset Password
</Button>
<Separator paddingXRem={0.5} />
<Button
prominence="tertiary"
icon={SvgUserPlus}
@@ -234,45 +223,211 @@ export default function UserRowActions({
{modal === Modal.EDIT_GROUPS && user.id && (
<EditUserModal
user={user as UserRow & { id: string }}
onClose={closeModal}
onClose={() => setModal(null)}
onMutate={onMutate}
/>
)}
{modal === Modal.CANCEL_INVITE && (
<CancelInviteModal
email={user.email}
onClose={closeModal}
onMutate={onMutate}
/>
<ConfirmationModalLayout
icon={(props) => (
<SvgUserX {...props} className="text-action-danger-05" />
)}
title="Cancel Invite"
onClose={isSubmitting ? undefined : () => setModal(null)}
submit={
<Disabled disabled={isSubmitting}>
<Button
variant="danger"
onClick={() => {
handleAction(
() => cancelInvite(user.email),
"Invite cancelled"
);
}}
>
Cancel Invite
</Button>
</Disabled>
}
>
<Text as="p" text03>
<Text as="span" text05>
{user.email}
</Text>{" "}
will no longer be able to join Onyx with this invite.
</Text>
</ConfirmationModalLayout>
)}
{modal === Modal.DEACTIVATE && (
<DeactivateUserModal
email={user.email}
onClose={closeModal}
onMutate={onMutate}
/>
<ConfirmationModalLayout
icon={(props) => (
<SvgUserX {...props} className="text-action-danger-05" />
)}
title="Deactivate User"
onClose={isSubmitting ? undefined : () => setModal(null)}
submit={
<Disabled disabled={isSubmitting}>
<Button
variant="danger"
onClick={async () => {
await handleAction(
() => deactivateUser(user.email),
"User deactivated"
);
}}
>
Deactivate
</Button>
</Disabled>
}
>
<Text as="p" text03>
<Text as="span" text05>
{user.email}
</Text>{" "}
will immediately lose access to Onyx. Their sessions and agents will
be preserved. Their license seat will be freed. You can reactivate
this account later.
</Text>
</ConfirmationModalLayout>
)}
{modal === Modal.ACTIVATE && (
<ActivateUserModal
email={user.email}
onClose={closeModal}
onMutate={onMutate}
/>
<ConfirmationModalLayout
icon={SvgUserPlus}
title="Activate User"
onClose={isSubmitting ? undefined : () => setModal(null)}
submit={
<Disabled disabled={isSubmitting}>
<Button
onClick={async () => {
await handleAction(
() => activateUser(user.email),
"User activated"
);
}}
>
Activate
</Button>
</Disabled>
}
>
<Text as="p" text03>
<Text as="span" text05>
{user.email}
</Text>{" "}
will regain access to Onyx.
</Text>
</ConfirmationModalLayout>
)}
{modal === Modal.DELETE && (
<DeleteUserModal
email={user.email}
onClose={closeModal}
onMutate={onMutate}
/>
<ConfirmationModalLayout
icon={(props) => (
<SvgUserX {...props} className="text-action-danger-05" />
)}
title="Delete User"
onClose={isSubmitting ? undefined : () => setModal(null)}
submit={
<Disabled disabled={isSubmitting}>
<Button
variant="danger"
onClick={async () => {
await handleAction(
() => deleteUser(user.email),
"User deleted"
);
}}
>
Delete
</Button>
</Disabled>
}
>
<Text as="p" text03>
<Text as="span" text05>
{user.email}
</Text>{" "}
will be permanently removed from Onyx. All of their session history
will be deleted. Deletion cannot be undone.
</Text>
</ConfirmationModalLayout>
)}
{modal === Modal.RESET_PASSWORD && (
<ResetPasswordModal email={user.email} onClose={closeModal} />
<ConfirmationModalLayout
icon={SvgKey}
title={newPassword ? "Password Reset" : "Reset Password"}
onClose={
isSubmitting
? undefined
: () => {
setModal(null);
setNewPassword(null);
}
}
submit={
newPassword ? (
<Button
onClick={() => {
setModal(null);
setNewPassword(null);
}}
>
Done
</Button>
) : (
<Disabled disabled={isSubmitting}>
<Button
variant="danger"
onClick={async () => {
setIsSubmitting(true);
try {
const result = await resetPassword(user.email);
setNewPassword(result.new_password);
} catch (err) {
toast.error(
err instanceof Error
? err.message
: "Failed to reset password"
);
} finally {
setIsSubmitting(false);
}
}}
>
Reset Password
</Button>
</Disabled>
)
}
>
{newPassword ? (
<div className="flex flex-col gap-2">
<Text as="p" text03>
The password for{" "}
<Text as="span" text05>
{user.email}
</Text>{" "}
has been reset. Copy the new password below it will not be
shown again.
</Text>
<code className="rounded-sm bg-background-neutral-02 px-3 py-2 text-sm select-all">
{newPassword}
</code>
</div>
) : (
<Text as="p" text03>
This will generate a new random password for{" "}
<Text as="span" text05>
{user.email}
</Text>
. Their current password will stop working immediately.
</Text>
)}
</ConfirmationModalLayout>
)}
</>
);

View File

@@ -598,12 +598,18 @@ const MemoizedAppSidebarInner = memo(
setShowIntroAnimation(true);
}, []);
const vectorDbEnabled =
combinedSettings?.settings?.vector_db_enabled !== false;
const adminDefaultHref = vectorDbEnabled
? "/admin/indexing/status"
: "/admin/agents";
const settingsButton = useMemo(
() => (
<div>
{(isAdmin || isCurator) && (
<SidebarTab
href={isCurator ? "/admin/agents" : "/admin/configuration/llm"}
href={adminDefaultHref}
icon={SvgSettings}
folded={folded}
>
@@ -618,7 +624,14 @@ const MemoizedAppSidebarInner = memo(
/>
</div>
),
[folded, isAdmin, isCurator, handleShowBuildIntro, isOnyxCraftEnabled]
[
folded,
isAdmin,
isCurator,
handleShowBuildIntro,
isOnyxCraftEnabled,
adminDefaultHref,
]
);
return (

View File

@@ -21,8 +21,8 @@ export default function SidebarBody({
scrollKey,
}: SidebarBodyProps) {
return (
<div className="flex flex-col min-h-0 h-full gap-3">
<div className="flex flex-col gap-1.5 px-2">
<div className="flex flex-col min-h-0 h-full gap-3 px-2">
<div className="flex flex-col gap-1.5">
{actionButtons &&
(Array.isArray(actionButtons)
? actionButtons.map((button, index) => (
@@ -30,10 +30,10 @@ export default function SidebarBody({
))
: actionButtons)}
</div>
<OverflowDiv className="gap-3 px-2" scrollKey={scrollKey}>
<OverflowDiv className="gap-3" scrollKey={scrollKey}>
{children}
</OverflowDiv>
{footer && <div className="px-2">{footer}</div>}
{footer}
</div>
);
}

View File

@@ -1,324 +0,0 @@
/**
* Page Object Model for the Admin Users page (/admin/users).
*
* Encapsulates all locators and interactions so specs remain declarative.
*/
import { type Page, type Locator, expect } from "@playwright/test";
/** URL pattern that matches the users data fetch. */
const USERS_API = /\/api\/manage\/users\/(accepted\/all|invited)/;
export class UsersAdminPage {
readonly page: Page;
// Top-level elements
readonly inviteButton: Locator;
readonly searchInput: Locator;
// Filter buttons
readonly accountTypesFilter: Locator;
readonly groupsFilter: Locator;
readonly statusFilter: Locator;
// Table
readonly table: Locator;
readonly tableRows: Locator;
// Pagination & footer
readonly paginationSummary: Locator;
readonly downloadCsvButton: Locator;
constructor(page: Page) {
this.page = page;
this.inviteButton = page.getByRole("button", { name: "Invite Users" });
this.searchInput = page.getByPlaceholder("Search users...");
this.accountTypesFilter = page.getByLabel("Filter by role");
this.groupsFilter = page.getByLabel("Filter by group");
this.statusFilter = page.getByLabel("Filter by status");
this.table = page.getByRole("table");
this.tableRows = page.getByRole("table").locator("tbody tr");
this.paginationSummary = page.getByText(/Showing \d/);
this.downloadCsvButton = page.getByRole("button", {
name: "Download CSV",
});
}
// ---------------------------------------------------------------------------
// Popover helper
// ---------------------------------------------------------------------------
/**
* Returns a locator for the currently open popover / filter dropdown.
* Radix Popover renders its content with `role="dialog"`. Using
* `getByRole("dialog").first()` targets the oldest open dialog, which is
* always the popover during row-action or filter flows (confirmation
* modals open later and would be `.last()`).
*/
get popover(): Locator {
return this.page.getByRole("dialog").first();
}
// ---------------------------------------------------------------------------
// Navigation
// ---------------------------------------------------------------------------
async goto() {
await this.page.goto("/admin/users");
await expect(this.page.getByText("Users & Requests")).toBeVisible({
timeout: 15000,
});
// Wait for the table to finish loading (pagination summary only appears
// after the async data fetch completes).
await expect(this.paginationSummary).toBeVisible({ timeout: 15000 });
}
// ---------------------------------------------------------------------------
// Waiting helpers
// ---------------------------------------------------------------------------
/** Wait for the users API response that follows a table-refreshing action. */
private async waitForTableRefresh(): Promise<void> {
await this.page.waitForResponse(USERS_API);
}
// ---------------------------------------------------------------------------
// Search
// ---------------------------------------------------------------------------
async search(term: string) {
await this.searchInput.fill(term);
}
async clearSearch() {
await this.searchInput.fill("");
}
// ---------------------------------------------------------------------------
// Filters
// ---------------------------------------------------------------------------
async openAccountTypesFilter() {
await this.accountTypesFilter.click();
await expect(this.popover).toBeVisible();
}
async selectAccountType(label: string) {
await this.popover.getByText(label, { exact: false }).first().click();
}
async openStatusFilter() {
await this.statusFilter.click();
await expect(this.popover).toBeVisible();
}
async selectStatus(label: string) {
await this.popover.getByText(label, { exact: false }).first().click();
}
async openGroupsFilter() {
await this.groupsFilter.click();
await expect(this.popover).toBeVisible();
}
async selectGroup(label: string) {
await this.popover.getByText(label, { exact: false }).first().click();
}
async closePopover() {
await this.page.keyboard.press("Escape");
await expect(this.page.getByRole("dialog")).not.toBeVisible();
}
// ---------------------------------------------------------------------------
// Table interactions
// ---------------------------------------------------------------------------
async getVisibleRowCount(): Promise<number> {
return await this.tableRows.count();
}
/**
* Returns the text content of a specific column across all visible rows.
* Column indices: 0=Name, 1=Groups, 2=Account Type, 3=Status, 4=Last Updated.
*/
async getColumnTexts(columnIndex: number): Promise<string[]> {
const cells = this.tableRows.locator(`td:nth-child(${columnIndex + 2})`);
const count = await cells.count();
const texts: string[] = [];
for (let i = 0; i < count; i++) {
const text = await cells.nth(i).textContent();
if (text) texts.push(text.trim());
}
return texts;
}
getRowByEmail(email: string): Locator {
return this.table.getByRole("row").filter({ hasText: email });
}
/** Click the sort button on a column header. */
async sortByColumn(columnName: string) {
// Column headers are <th> elements. The sort button is a child <button>
// that only appears on hover — hover first to reveal it.
const header = this.table.locator("th").filter({ hasText: columnName });
await header.hover();
await header.locator("button").first().click();
}
// ---------------------------------------------------------------------------
// Pagination
// ---------------------------------------------------------------------------
/** Click a numbered page button in the table footer. */
async goToPage(pageNumber: number) {
const footer = this.page.locator(".table-footer");
await footer
.getByRole("button")
.filter({ hasText: String(pageNumber) })
.click();
}
// ---------------------------------------------------------------------------
// Row actions
// ---------------------------------------------------------------------------
async openRowActions(email: string) {
const row = this.getRowByEmail(email);
const actionsButton = row.getByRole("button").last();
await actionsButton.click();
await expect(this.popover).toBeVisible();
}
async clickRowAction(actionName: string) {
await this.popover.getByText(actionName).first().click();
}
// ---------------------------------------------------------------------------
// Confirmation modals
// ---------------------------------------------------------------------------
/**
* Returns the most recently opened dialog (modal).
* Uses `.last()` because confirmation modals are portaled after row-action
* popovers, and a closing popover (role="dialog") may briefly remain in the
* DOM during its exit animation.
*/
get dialog(): Locator {
return this.page.getByRole("dialog").last();
}
async confirmModalAction(buttonName: string) {
await this.dialog.getByRole("button", { name: buttonName }).first().click();
}
async cancelModal() {
await this.dialog.getByRole("button", { name: "Cancel" }).first().click();
}
async expectToast(message: string | RegExp) {
await expect(this.page.getByText(message)).toBeVisible();
}
// ---------------------------------------------------------------------------
// Invite modal
// ---------------------------------------------------------------------------
/** The email input inside the invite modal. */
get inviteEmailInput(): Locator {
return this.dialog.getByPlaceholder("Add an email and press enter");
}
async openInviteModal() {
await this.inviteButton.click();
await expect(this.dialog.getByText("Invite Users")).toBeVisible();
}
async addInviteEmail(email: string) {
await this.inviteEmailInput.pressSequentially(email, { delay: 20 });
await this.inviteEmailInput.press("Enter");
// Wait for the chip to appear in the dialog
await expect(this.dialog.getByText(email)).toBeVisible();
}
async submitInvite() {
await this.dialog.getByRole("button", { name: "Invite" }).click();
}
// ---------------------------------------------------------------------------
// Inline role editing (Popover + OpenButton + LineItem)
// ---------------------------------------------------------------------------
async openRoleDropdown(email: string) {
const row = this.getRowByEmail(email);
const roleButton = row
.locator("button")
.filter({ hasText: /Basic|Admin|Global Curator|Slack User/ });
await roleButton.click();
await expect(this.popover).toBeVisible();
}
async selectRole(roleName: string) {
await this.popover.getByText(roleName).first().click();
await this.waitForTableRefresh();
}
// ---------------------------------------------------------------------------
// Edit groups modal
// ---------------------------------------------------------------------------
/**
* Stable locator for the edit-groups modal.
*
* We can't use the generic `dialog` getter (`.last()`) here because the
* groups search opens a Radix Popover (also `role="dialog"`) inside the
* modal, which shifts what `.last()` resolves to. Targeting by accessible
* name keeps the reference pinned to the modal itself.
*/
get editGroupsDialog(): Locator {
return this.page.getByRole("dialog", { name: /Edit User/ });
}
/** The search input inside the edit groups modal. */
get groupSearchInput(): Locator {
return this.editGroupsDialog.getByPlaceholder("Search groups to join...");
}
async openEditGroupsModal(email: string) {
await this.openRowActions(email);
await this.clickRowAction("Groups");
await expect(
this.editGroupsDialog.getByText("Edit User's Groups & Roles")
).toBeVisible();
}
async searchGroupsInModal(term: string) {
// Click the input first to open the popover (Radix Popover.Trigger
// wraps the input — fill() alone bypasses the trigger's click handler).
await this.groupSearchInput.click();
await this.groupSearchInput.fill(term);
// The group name appears in the popover dropdown (nested dialog).
// Use page-level search since the popover may be portaled.
await expect(this.page.getByText(term).first()).toBeVisible();
}
async toggleGroupInModal(groupName: string) {
// LineItem renders as a <div>, not <button>.
// The popover dropdown is a nested dialog inside the modal.
await this.page
.getByRole("dialog")
.last()
.getByText(groupName)
.first()
.click();
}
async saveGroupsModal() {
await this.editGroupsDialog
.getByRole("button", { name: "Save Changes" })
.click();
}
}

View File

@@ -1,37 +0,0 @@
/**
* Playwright fixtures for Admin Users page tests.
*
* Provides:
* - Authenticated admin page
* - OnyxApiClient for API-level setup/teardown
* - UsersAdminPage page object
*/
import { test as base, expect, type Page } from "@playwright/test";
import { loginAs } from "@tests/e2e/utils/auth";
import { OnyxApiClient } from "@tests/e2e/utils/onyxApiClient";
import { UsersAdminPage } from "./UsersAdminPage";
export const test = base.extend<{
adminPage: Page;
api: OnyxApiClient;
usersPage: UsersAdminPage;
}>({
adminPage: async ({ page }, use) => {
await page.context().clearCookies();
await loginAs(page, "admin");
await use(page);
},
api: async ({ adminPage }, use) => {
const client = new OnyxApiClient(adminPage.request);
await use(client);
},
usersPage: async ({ adminPage }, use) => {
const usersPage = new UsersAdminPage(adminPage);
await use(usersPage);
},
});
export { expect };

View File

@@ -1,620 +0,0 @@
/**
* E2E Tests: Admin Users Page
*
* Tests the full users management page — search, filters, sorting,
* inline role editing, row actions, invite modal, and group management.
*
* Read-only tests (layout, search, filters, sorting, pagination) run against
* whatever users already exist in the database (at minimum 10 from global-setup:
* 2 admins + 8 workers). Mutation tests create their own ephemeral users.
*/
import { test, expect } from "./fixtures";
import { TEST_ADMIN_CREDENTIALS } from "@tests/e2e/constants";
import type { Browser } from "@playwright/test";
import type { OnyxApiClient } from "@tests/e2e/utils/onyxApiClient";
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
function uniqueEmail(prefix: string): string {
return `e2e-${prefix}-${Date.now()}@test.onyx`;
}
const TEST_PASSWORD = "TestPassword123!";
/** Best-effort cleanup — logs failures instead of silently swallowing them. */
async function softCleanup(fn: () => Promise<unknown>): Promise<void> {
await fn().catch((e) => console.warn("cleanup:", e));
}
/**
* Creates an authenticated API context for beforeAll/afterAll hooks.
* Handles browser context lifecycle so callers only write the setup logic.
*/
async function withApiContext(
browser: Browser,
fn: (api: OnyxApiClient) => Promise<void>
): Promise<void> {
const context = await browser.newContext({
storageState: "admin_auth.json",
});
try {
const { OnyxApiClient } = await import("@tests/e2e/utils/onyxApiClient");
const api = new OnyxApiClient(context.request);
await fn(api);
} finally {
await context.close();
}
}
// ---------------------------------------------------------------------------
// Page load & layout
// ---------------------------------------------------------------------------
test.describe("Users page — layout", () => {
test("renders page title, invite button, search, and stats bar", async ({
usersPage,
}) => {
await usersPage.goto();
await expect(usersPage.page.getByText("Users & Requests")).toBeVisible();
await expect(usersPage.inviteButton).toBeVisible();
await expect(usersPage.searchInput).toBeVisible();
// Stats bar renders number and label as separate elements
await expect(usersPage.page.getByText("active users")).toBeVisible();
});
test("table renders with correct column headers", async ({ usersPage }) => {
await usersPage.goto();
for (const header of [
"Name",
"Groups",
"Account Type",
"Status",
"Last Updated",
]) {
await expect(
usersPage.table.locator("th").filter({ hasText: header })
).toBeVisible();
}
});
test("pagination shows summary and controls", async ({ usersPage }) => {
await usersPage.goto();
await expect(usersPage.paginationSummary).toBeVisible();
await expect(usersPage.paginationSummary).toContainText("Showing");
});
test("CSV download button is visible in footer", async ({ usersPage }) => {
await usersPage.goto();
await expect(usersPage.downloadCsvButton).toBeVisible();
});
});
// ---------------------------------------------------------------------------
// Search (uses existing DB users — at least admin_user@example.com)
// ---------------------------------------------------------------------------
test.describe("Users page — search", () => {
test("search filters table rows by email", async ({ usersPage }) => {
await usersPage.goto();
await usersPage.search(TEST_ADMIN_CREDENTIALS.email);
const row = usersPage.getRowByEmail(TEST_ADMIN_CREDENTIALS.email);
await expect(row).toBeVisible();
const rowCount = await usersPage.getVisibleRowCount();
expect(rowCount).toBeGreaterThanOrEqual(1);
});
test("search with no results shows empty state", async ({ usersPage }) => {
await usersPage.goto();
await usersPage.search("zzz-no-match-exists-xyz@nowhere.invalid");
await expect(usersPage.page.getByText("No users found")).toBeVisible();
});
test("clearing search restores all results", async ({ usersPage }) => {
await usersPage.goto();
await usersPage.search("zzz-no-match-exists-xyz@nowhere.invalid");
await expect(usersPage.page.getByText("No users found")).toBeVisible();
await usersPage.clearSearch();
await expect(usersPage.table).toBeVisible();
const rowCount = await usersPage.getVisibleRowCount();
expect(rowCount).toBeGreaterThan(0);
});
});
// ---------------------------------------------------------------------------
// Filters (uses existing DB users)
// ---------------------------------------------------------------------------
test.describe("Users page — filters", () => {
test("account types filter shows expected roles", async ({ usersPage }) => {
await usersPage.goto();
await usersPage.openAccountTypesFilter();
await expect(
usersPage.popover.getByText("All Account Types").first()
).toBeVisible();
await expect(usersPage.popover.getByText("Admin").first()).toBeVisible();
await expect(usersPage.popover.getByText("Basic").first()).toBeVisible();
await usersPage.closePopover();
});
test("filtering by Admin role shows only admin users", async ({
usersPage,
}) => {
await usersPage.goto();
await usersPage.openAccountTypesFilter();
await usersPage.selectAccountType("Admin");
await usersPage.closePopover();
await expect(usersPage.accountTypesFilter).toContainText("Admin");
const rowCount = await usersPage.getVisibleRowCount();
expect(rowCount).toBeGreaterThan(0);
// Every visible row's Account Type column must say "Admin"
const roleTexts = await usersPage.getColumnTexts(2);
for (const role of roleTexts) {
expect(role).toBe("Admin");
}
});
test("status filter for Active shows only active users", async ({
usersPage,
}) => {
await usersPage.goto();
await usersPage.openStatusFilter();
await usersPage.selectStatus("Active");
await usersPage.closePopover();
await expect(usersPage.statusFilter).toContainText("Active");
const rowCount = await usersPage.getVisibleRowCount();
expect(rowCount).toBeGreaterThan(0);
// Every visible row's Status column must say "Active"
const statusTexts = await usersPage.getColumnTexts(3);
for (const status of statusTexts) {
expect(status).toBe("Active");
}
});
test("resetting filter shows all users again", async ({ usersPage }) => {
await usersPage.goto();
await usersPage.openStatusFilter();
await usersPage.selectStatus("Active");
await usersPage.closePopover();
const filteredCount = await usersPage.getVisibleRowCount();
await usersPage.openStatusFilter();
await usersPage.selectStatus("All Status");
await usersPage.closePopover();
const allCount = await usersPage.getVisibleRowCount();
expect(allCount).toBeGreaterThanOrEqual(filteredCount);
});
});
// ---------------------------------------------------------------------------
// Sorting (uses existing DB users)
// ---------------------------------------------------------------------------
test.describe("Users page — sorting", () => {
test("clicking Name sort twice reverses row order", async ({ usersPage }) => {
await usersPage.goto();
const firstRowBefore = await usersPage.tableRows.first().textContent();
// Click twice — first click may match default order; second guarantees reversal
await usersPage.sortByColumn("Name");
await usersPage.sortByColumn("Name");
const firstRowAfter = await usersPage.tableRows.first().textContent();
expect(firstRowAfter).not.toBe(firstRowBefore);
});
test("clicking Account Type sort twice reorders rows", async ({
usersPage,
}) => {
await usersPage.goto();
const rolesBefore = await usersPage.getColumnTexts(2);
// Click twice to guarantee a different order from default
await usersPage.sortByColumn("Account Type");
await usersPage.sortByColumn("Account Type");
const rolesAfter = await usersPage.getColumnTexts(2);
expect(rolesAfter.length).toBeGreaterThan(0);
expect(rolesAfter).not.toEqual(rolesBefore);
});
});
// ---------------------------------------------------------------------------
// Pagination (uses existing DB users — need > 8 for multi-page)
// ---------------------------------------------------------------------------
test.describe("Users page — pagination", () => {
test("clicking page 2 navigates to second page", async ({ usersPage }) => {
await usersPage.goto();
const summaryBefore = await usersPage.paginationSummary.textContent();
// With 10+ users and page size 8, page 2 should exist
await usersPage.goToPage(2);
await expect(usersPage.paginationSummary).not.toHaveText(summaryBefore!);
// Go back to page 1
await usersPage.goToPage(1);
await expect(usersPage.paginationSummary).toHaveText(summaryBefore!);
});
});
// ---------------------------------------------------------------------------
// Invite users (creates ephemeral data)
// ---------------------------------------------------------------------------
test.describe("Users page — invite users", () => {
test("invite modal opens with correct structure", async ({ usersPage }) => {
await usersPage.goto();
await usersPage.openInviteModal();
await expect(usersPage.dialog.getByText("Invite Users")).toBeVisible();
await expect(usersPage.inviteEmailInput).toBeVisible();
await usersPage.cancelModal();
await expect(usersPage.dialog).not.toBeVisible();
});
test("invite a user and verify Invite Pending status", async ({
usersPage,
api,
}) => {
const email = uniqueEmail("invite");
await usersPage.goto();
await usersPage.openInviteModal();
await usersPage.addInviteEmail(email);
await usersPage.submitInvite();
await usersPage.expectToast(/Invited 1 user/);
// Reload and search
await usersPage.goto();
await usersPage.search(email);
const row = usersPage.getRowByEmail(email);
await expect(row).toBeVisible();
await expect(row).toContainText("Invite Pending");
// Cleanup
await api.cancelInvite(email);
});
test("invite multiple users at once", async ({ usersPage, api }) => {
const email1 = uniqueEmail("multi1");
const email2 = uniqueEmail("multi2");
await usersPage.goto();
await usersPage.openInviteModal();
await usersPage.addInviteEmail(email1);
await usersPage.addInviteEmail(email2);
await usersPage.submitInvite();
await usersPage.expectToast(/Invited 2 users/);
// Cleanup
await api.cancelInvite(email1);
await api.cancelInvite(email2);
});
test("invite modal shows error icon for invalid emails", async ({
usersPage,
}) => {
await usersPage.goto();
await usersPage.openInviteModal();
await usersPage.addInviteEmail("not-an-email");
// The chip should be rendered with an error state
await expect(usersPage.dialog.getByText("not-an-email")).toBeVisible();
await usersPage.cancelModal();
});
});
// ---------------------------------------------------------------------------
// Row actions — deactivate / activate (creates ephemeral data)
// ---------------------------------------------------------------------------
test.describe("Users page — deactivate & activate", () => {
let testUserEmail: string;
test.beforeAll(async ({ browser }) => {
testUserEmail = uniqueEmail("deact");
await withApiContext(browser, async (api) => {
await api.registerUser(testUserEmail, TEST_PASSWORD);
});
});
test("deactivate and then reactivate a user", async ({ usersPage }) => {
await usersPage.goto();
await usersPage.search(testUserEmail);
const row = usersPage.getRowByEmail(testUserEmail);
await expect(row).toBeVisible();
await expect(row).toContainText("Active");
// Deactivate
await usersPage.openRowActions(testUserEmail);
await usersPage.clickRowAction("Deactivate User");
await expect(usersPage.dialog.getByText("Deactivate User")).toBeVisible();
await expect(usersPage.dialog.getByText(testUserEmail)).toBeVisible();
await expect(
usersPage.dialog.getByText("will immediately lose access")
).toBeVisible();
await usersPage.confirmModalAction("Deactivate");
await usersPage.expectToast("User deactivated");
// Verify Inactive
await usersPage.goto();
await usersPage.search(testUserEmail);
const inactiveRow = usersPage.getRowByEmail(testUserEmail);
await expect(inactiveRow).toContainText("Inactive");
// Reactivate
await usersPage.openRowActions(testUserEmail);
await usersPage.clickRowAction("Activate User");
await expect(usersPage.dialog.getByText("Activate User")).toBeVisible();
await usersPage.confirmModalAction("Activate");
await usersPage.expectToast("User activated");
// Verify Active again
await usersPage.goto();
await usersPage.search(testUserEmail);
const reactivatedRow = usersPage.getRowByEmail(testUserEmail);
await expect(reactivatedRow).toContainText("Active");
});
test.afterAll(async ({ browser }) => {
await withApiContext(browser, async (api) => {
await softCleanup(() => api.deactivateUser(testUserEmail));
await softCleanup(() => api.deleteUser(testUserEmail));
});
});
});
// ---------------------------------------------------------------------------
// Row actions — delete user (creates ephemeral data)
// ---------------------------------------------------------------------------
test.describe("Users page — delete user", () => {
test("delete an inactive user", async ({ usersPage, api }) => {
const email = uniqueEmail("delete");
await api.registerUser(email, TEST_PASSWORD);
await api.deactivateUser(email);
await usersPage.goto();
await usersPage.search(email);
const row = usersPage.getRowByEmail(email);
await expect(row).toBeVisible();
await expect(row).toContainText("Inactive");
await usersPage.openRowActions(email);
await usersPage.clickRowAction("Delete User");
await expect(usersPage.dialog.getByText("Delete User")).toBeVisible();
await expect(
usersPage.dialog.getByText("will be permanently removed")
).toBeVisible();
await usersPage.confirmModalAction("Delete");
await usersPage.expectToast("User deleted");
// User gone
await usersPage.goto();
await usersPage.search(email);
await expect(usersPage.page.getByText("No users found")).toBeVisible();
});
});
// ---------------------------------------------------------------------------
// Row actions — cancel invite (creates ephemeral data)
// ---------------------------------------------------------------------------
test.describe("Users page — cancel invite", () => {
test("cancel a pending invite", async ({ usersPage, api }) => {
const email = uniqueEmail("cancel-inv");
await api.inviteUsers([email]);
await usersPage.goto();
await usersPage.search(email);
const row = usersPage.getRowByEmail(email);
await expect(row).toBeVisible();
await expect(row).toContainText("Invite Pending");
await usersPage.openRowActions(email);
await usersPage.clickRowAction("Cancel Invite");
await expect(
usersPage.dialog.getByText("Cancel Invite").first()
).toBeVisible();
await usersPage.confirmModalAction("Cancel Invite");
await usersPage.expectToast("Invite cancelled");
// User gone
await usersPage.goto();
await usersPage.search(email);
await expect(usersPage.page.getByText("No users found")).toBeVisible();
});
});
// ---------------------------------------------------------------------------
// Inline role editing (creates ephemeral data)
// ---------------------------------------------------------------------------
test.describe("Users page — inline role editing", () => {
let testUserEmail: string;
test.beforeAll(async ({ browser }) => {
testUserEmail = uniqueEmail("role");
await withApiContext(browser, async (api) => {
await api.registerUser(testUserEmail, TEST_PASSWORD);
});
});
test("change user role from Basic to Admin and back", async ({
usersPage,
}) => {
await usersPage.goto();
await usersPage.search(testUserEmail);
const row = usersPage.getRowByEmail(testUserEmail);
await expect(row).toBeVisible();
// Initially Basic
await expect(row.getByText("Basic")).toBeVisible();
// Change to Admin
await usersPage.openRoleDropdown(testUserEmail);
await usersPage.selectRole("Admin");
await expect(row.getByText("Admin")).toBeVisible();
// Change back to Basic
await usersPage.openRoleDropdown(testUserEmail);
await usersPage.selectRole("Basic");
await expect(row.getByText("Basic")).toBeVisible();
});
test.afterAll(async ({ browser }) => {
await withApiContext(browser, async (api) => {
await softCleanup(() => api.deactivateUser(testUserEmail));
await softCleanup(() => api.deleteUser(testUserEmail));
});
});
});
// ---------------------------------------------------------------------------
// Group management (creates ephemeral data)
// ---------------------------------------------------------------------------
test.describe("Users page — group management", () => {
let testUserEmail: string;
let testGroupId: number;
const groupName = `E2E-UsersTest-${Date.now()}`;
test.beforeAll(async ({ browser }) => {
testUserEmail = uniqueEmail("grp");
await withApiContext(browser, async (api) => {
await api.registerUser(testUserEmail, TEST_PASSWORD);
testGroupId = await api.createUserGroup(groupName);
await api.waitForGroupSync(testGroupId);
});
});
test("add user to group via edit groups modal", async ({ usersPage }) => {
await usersPage.goto();
await usersPage.search(testUserEmail);
const row = usersPage.getRowByEmail(testUserEmail);
await expect(row).toBeVisible();
await usersPage.openEditGroupsModal(testUserEmail);
await usersPage.searchGroupsInModal(groupName);
await usersPage.toggleGroupInModal(groupName);
await usersPage.saveGroupsModal();
await usersPage.expectToast("User updated");
// Verify group shows in the row
await usersPage.goto();
await usersPage.search(testUserEmail);
const rowWithGroup = usersPage.getRowByEmail(testUserEmail);
await expect(rowWithGroup).toContainText(groupName);
});
test("remove user from group via edit groups modal", async ({
usersPage,
}) => {
await usersPage.goto();
await usersPage.search(testUserEmail);
const row = usersPage.getRowByEmail(testUserEmail);
await expect(row).toBeVisible();
await usersPage.openEditGroupsModal(testUserEmail);
// Group shows as joined — click to remove
await usersPage.toggleGroupInModal(groupName);
await usersPage.saveGroupsModal();
await usersPage.expectToast("User updated");
// Verify group removed
await usersPage.goto();
await usersPage.search(testUserEmail);
await expect(usersPage.getRowByEmail(testUserEmail)).not.toContainText(
groupName
);
});
test.afterAll(async ({ browser }) => {
await withApiContext(browser, async (api) => {
await softCleanup(() => api.deleteUserGroup(testGroupId));
await softCleanup(() => api.deactivateUser(testUserEmail));
await softCleanup(() => api.deleteUser(testUserEmail));
});
});
});
// ---------------------------------------------------------------------------
// Stats bar
// ---------------------------------------------------------------------------
test.describe("Users page — stats bar", () => {
test("stats bar shows active users count", async ({ usersPage }) => {
await usersPage.goto();
// Number and label are separate elements; check for the label
await expect(usersPage.page.getByText("active users")).toBeVisible();
});
test("stats bar updates after inviting a user", async ({
usersPage,
api,
}) => {
const email = uniqueEmail("stats");
await usersPage.goto();
await usersPage.openInviteModal();
await usersPage.addInviteEmail(email);
await usersPage.submitInvite();
await usersPage.expectToast(/Invited 1 user/);
// Stats bar should reflect the new invite
await usersPage.goto();
await expect(usersPage.page.getByText("pending invites")).toBeVisible();
// Cleanup
await api.cancelInvite(email);
});
});

View File

@@ -1,181 +0,0 @@
import { expect, test, type Locator, type Page } from "@playwright/test";
import { loginAsWorkerUser } from "@tests/e2e/utils/auth";
import { OnyxApiClient } from "@tests/e2e/utils/onyxApiClient";
import { expectElementScreenshot } from "@tests/e2e/utils/visualRegression";
const PROJECT_NAME = "E2E-PROJECT-FILES-VISUAL";
const ATTACHMENT_ITEM_TITLE_TEST_ID = "attachment-item-title";
const ATTACHMENT_ITEM_ICON_WRAPPER_TEST_ID = "attachment-item-icon-wrapper";
const LONG_FILE_NAME =
"CSE_202_Final_Project_Solution_Regression_Check_Long_Name.txt";
const FILE_CONTENT = "Visual regression test content for long filename cards.";
let projectId: number | null = null;
type Geometry = {
elementLeft: number;
elementRight: number;
elementTop: number;
elementBottom: number;
cardLeft: number;
cardRight: number;
cardTop: number;
cardBottom: number;
};
function getFilesSection(page: Page): Locator {
return page
.locator("div")
.filter({ has: page.getByRole("button", { name: "Add Files" }) })
.filter({ hasText: "Chats in this project can access these files." })
.first();
}
async function uploadFileToProject(
page: Page,
targetProjectId: number,
fileName: string,
content: string
): Promise<void> {
const response = await page.request.post("/api/user/projects/file/upload", {
multipart: {
project_id: String(targetProjectId),
files: {
name: fileName,
mimeType: "text/plain",
buffer: Buffer.from(content, "utf-8"),
},
},
});
expect(response.ok()).toBeTruthy();
}
async function getElementGeometryInCard(
element: Locator
): Promise<Geometry | null> {
return element.evaluate((targetEl) => {
let cardEl: HTMLElement | null = targetEl.parentElement;
while (cardEl) {
const style = window.getComputedStyle(cardEl);
const hasBorder =
parseFloat(style.borderTopWidth) > 0 ||
parseFloat(style.borderLeftWidth) > 0;
const hasRadius = parseFloat(style.borderTopLeftRadius) > 0;
if (hasBorder && hasRadius) {
break;
}
cardEl = cardEl.parentElement;
}
if (!cardEl) {
return null;
}
const elementRect = targetEl.getBoundingClientRect();
const cardRect = cardEl.getBoundingClientRect();
return {
elementLeft: elementRect.left,
elementRight: elementRect.right,
elementTop: elementRect.top,
elementBottom: elementRect.bottom,
cardLeft: cardRect.left,
cardRight: cardRect.right,
cardTop: cardRect.top,
cardBottom: cardRect.bottom,
};
});
}
function expectGeometryWithinCard(geometry: Geometry | null): void {
expect(geometry).not.toBeNull();
expect(geometry!.elementLeft).toBeGreaterThanOrEqual(geometry!.cardLeft - 1);
expect(geometry!.elementRight).toBeLessThanOrEqual(geometry!.cardRight + 1);
expect(geometry!.elementTop).toBeGreaterThanOrEqual(geometry!.cardTop - 1);
expect(geometry!.elementBottom).toBeLessThanOrEqual(geometry!.cardBottom + 1);
}
test.describe("Project Files visual regression", () => {
test.beforeAll(async ({ browser }, workerInfo) => {
const context = await browser.newContext();
const page = await context.newPage();
await loginAsWorkerUser(page, workerInfo.workerIndex);
const client = new OnyxApiClient(page.request);
projectId = await client.createProject(PROJECT_NAME);
await uploadFileToProject(page, projectId, LONG_FILE_NAME, FILE_CONTENT);
await context.close();
});
test.afterAll(async ({ browser }, workerInfo) => {
if (!projectId) {
return;
}
const context = await browser.newContext();
const page = await context.newPage();
await loginAsWorkerUser(page, workerInfo.workerIndex);
const client = new OnyxApiClient(page.request);
await client.deleteProject(projectId);
await context.close();
});
test.beforeEach(async ({ page }, workerInfo) => {
if (projectId === null) {
throw new Error(
"Project setup failed in beforeAll; cannot run visual regression test"
);
}
await page.context().clearCookies();
await loginAsWorkerUser(page, workerInfo.workerIndex);
await page.goto(`/app?projectId=${projectId}`);
await page.waitForLoadState("networkidle");
await expect(
page.getByText("Chats in this project can access these files.")
).toBeVisible();
});
test("long underscore filename stays visually contained in file card", async ({
page,
}) => {
const filesSection = getFilesSection(page);
await expect(filesSection).toBeVisible();
const fileTitle = filesSection
.locator(`[data-testid="${ATTACHMENT_ITEM_TITLE_TEST_ID}"]`)
.filter({ hasText: LONG_FILE_NAME })
.first();
await expect(fileTitle).toBeVisible();
// Wait for deterministic post-processing state before geometry checks/screenshot.
await expect(fileTitle).not.toContainText("Processing...", {
timeout: 30_000,
});
await expect(fileTitle).not.toContainText("Uploading...", {
timeout: 30_000,
});
await expect(fileTitle).toContainText("TXT", { timeout: 30_000 });
const iconWrapper = filesSection
.locator(`[data-testid="${ATTACHMENT_ITEM_ICON_WRAPPER_TEST_ID}"]`)
.first();
await expect(iconWrapper).toBeVisible();
await expectElementScreenshot(filesSection, {
name: "project-files-long-underscore-filename",
});
const iconGeometry = await getElementGeometryInCard(iconWrapper);
const titleGeometry = await getElementGeometryInCard(fileTitle);
expectGeometryWithinCard(iconGeometry);
expectGeometryWithinCard(titleGeometry);
});
});

View File

@@ -588,34 +588,6 @@ export class OnyxApiClient {
return responseData.id;
}
/**
* Polls until a user group has finished syncing (is_up_to_date === true).
* Newly created groups start syncing immediately; many mutation endpoints
* reject requests while the group is still syncing.
*/
async waitForGroupSync(
groupId: number,
timeout: number = 30000
): Promise<void> {
await expect
.poll(
async () => {
const res = await this.get("/manage/admin/user-group");
const groups = await res.json();
const group = groups.find(
(g: { id: number; is_up_to_date: boolean }) => g.id === groupId
);
return group?.is_up_to_date ?? false;
},
{
message: `User group ${groupId} did not finish syncing`,
timeout,
}
)
.toBe(true);
this.log(`User group ${groupId} finished syncing`);
}
/**
* Deletes a user group.
*
@@ -1101,62 +1073,6 @@ export class OnyxApiClient {
);
}
// === User Management Methods ===
async deactivateUser(email: string): Promise<void> {
const response = await this.request.patch(
`${this.baseUrl}/manage/admin/deactivate-user`,
{ data: { user_email: email } }
);
await this.handleResponse(response, `Failed to deactivate user ${email}`);
this.log(`Deactivated user: ${email}`);
}
async activateUser(email: string): Promise<void> {
const response = await this.request.patch(
`${this.baseUrl}/manage/admin/activate-user`,
{ data: { user_email: email } }
);
await this.handleResponse(response, `Failed to activate user ${email}`);
this.log(`Activated user: ${email}`);
}
async deleteUser(email: string): Promise<void> {
const response = await this.request.delete(
`${this.baseUrl}/manage/admin/delete-user`,
{ data: { user_email: email } }
);
await this.handleResponse(response, `Failed to delete user ${email}`);
this.log(`Deleted user: ${email}`);
}
async cancelInvite(email: string): Promise<void> {
const response = await this.request.patch(
`${this.baseUrl}/manage/admin/remove-invited-user`,
{ data: { user_email: email } }
);
await this.handleResponse(response, `Failed to cancel invite for ${email}`);
this.log(`Cancelled invite for: ${email}`);
}
async inviteUsers(emails: string[]): Promise<void> {
const response = await this.put("/manage/admin/users", { emails });
await this.handleResponse(response, `Failed to invite users`);
this.log(`Invited users: ${emails.join(", ")}`);
}
async setPersonalName(name: string): Promise<void> {
const response = await this.request.patch(
`${this.baseUrl}/user/personalization`,
{ data: { name } }
);
await this.handleResponse(
response,
`Failed to set personal name to ${name}`
);
this.log(`Set personal name: ${name}`);
}
// === Chat Session Methods ===
/**