Compare commits

..

31 Commits

Author SHA1 Message Date
Jessica Singh
89593b353f chore(auth): backend cleanup (#8558)
Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
2026-03-10 01:05:29 +00:00
Jamison Lahman
91e24ae63a fix(code-interpreter): set default CODE_INTERPRETER_BASE_URL w/ docke… (#9215) 2026-03-10 00:50:10 +00:00
Jamison Lahman
d2b37724d1 fix(fe): fix chat content padding (#9216) 2026-03-10 00:48:53 +00:00
acaprau
87f0849330 feat(opensearch): Enable by default (#9211) 2026-03-09 17:30:32 -07:00
Bo-Onyx
2ec7526772 fix(api memory): replace glibc with jemalloc for memory allocating (#9196) 2026-03-10 00:02:31 +00:00
Wenxi
bbd68e2795 fix: impropoer kv store strings (#9213) 2026-03-09 23:48:44 +00:00
Nikolas Garza
e74c36001a feat(storybook): add Storybook infrastructure - 1/3 (#9195) 2026-03-09 15:55:05 -07:00
Jamison Lahman
fe593a15da fix(safari): Search results dont shrink (#9126) 2026-03-09 21:04:20 +00:00
Wenxi
27df690a8d fix: discord connector async resource cleanup (#9203) 2026-03-09 20:46:58 +00:00
Wenxi
edbe569edd fix: don't fetch mcp tools when no llms are configured (#9173) 2026-03-09 20:45:55 +00:00
Jamison Lahman
5118193d16 fix(fe): move app padding inside overflow container (#9206) 2026-03-09 20:38:47 +00:00
Raunak Bhagat
63d3efd380 refactor: default width from w-autow-fit (#9146)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-09 19:58:16 +00:00
Wenxi
ec978d9a3f fix(mcp): use CE-compatible chat endpoint for search_indexed_documents (#9193)
Co-authored-by: Fizza-Mukhtar <fizzamukhtar01@gmail.com>
2026-03-09 19:44:08 +00:00
dependabot[bot]
d4d98a6cd0 chore(deps): bump hashicorp/setup-terraform from 3.1.2 to 4.0.0 (#9198)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-09 12:42:30 -07:00
dependabot[bot]
dc40e86dac chore(deps): bump actions/download-artifact from 7.0.0 to 8.0.0 (#9199)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-09 12:41:21 -07:00
dependabot[bot]
e495f7a13e chore(deps): bump astral-sh/setup-uv from 7.2.0 to 7.3.1 (#9200)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-09 12:40:58 -07:00
Wenxi
4761e4b132 fix: fallback doc access when drive item is externally owned (#9053) 2026-03-09 17:58:14 +00:00
Raunak Bhagat
6b5ab54b85 feat: add LineItemButton component (#9137)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-09 17:49:37 +00:00
Wenxi
959cf444f8 fix: set event hook for wrapping values into SensitiveValue (#9177) 2026-03-09 17:37:33 +00:00
Wenxi
2ebccea6d6 fix: move available context tokens to useChatController and remove arbitrary 50% cap (#9174) 2026-03-09 16:32:28 +00:00
Wenxi
5fe7a474db chore: update decryption utility (#9176) 2026-03-09 16:32:14 +00:00
Wenxi
9d7dc3da21 fix: ph ssl upgrade on redirect for local development (#9175) 2026-03-08 23:35:59 +00:00
Wenxi
2899be4c5e fix: remove unnecessary multitenant check in migration (#9172)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2026-03-08 20:53:11 +00:00
Nikolas Garza
64ee7fc23f fix(fe): fix broken slack bot admin pages (#9168) 2026-03-08 20:11:17 +00:00
Justin Tahara
e07764285d chore(llm): Adding Integration test for Model state cache 2/2 (#9142) 2026-03-08 19:07:11 +00:00
Justin Tahara
cc2e6ffa8a fix(user files): Add frontend precheck for oversized user uploads 3/3 (#9159) 2026-03-08 18:47:25 +00:00
Justin Tahara
d3ee5c9b59 fix(user files): Enforce user upload file size limit in projects/chat upload path 2/3 (#9158) 2026-03-08 17:42:44 +00:00
Justin Tahara
dfa0efc093 fix(user files): Add configurable user file max upload size setting 1/3 (#9157) 2026-03-08 17:01:55 +00:00
Danelegend
9aad4077f1 feat: Tool call arg streaming (#9095) 2026-03-07 09:02:39 +00:00
Wenxi
29d9ebf7b3 feat: rotate encryption key utility (#9162) 2026-03-07 06:17:21 +00:00
Jamison Lahman
f1df36e306 feat(cli): package as docker image (#9167)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2026-03-07 03:18:47 +00:00
127 changed files with 5399 additions and 729 deletions

View File

@@ -151,7 +151,7 @@ jobs:
fetch-depth: 0
- name: Setup uv
uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # ratchet:astral-sh/setup-uv@v7
uses: astral-sh/setup-uv@5a095e7a2014a4212f075830d4f7277575a9d098 # ratchet:astral-sh/setup-uv@v7
with:
version: "0.9.9"
# NOTE: This isn't caching much and zizmor suggests this could be poisoned, so disable.

View File

@@ -70,7 +70,7 @@ jobs:
- name: Install the latest version of uv
if: steps.gate.outputs.should_cherrypick == 'true'
uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # ratchet:astral-sh/setup-uv@v7
uses: astral-sh/setup-uv@5a095e7a2014a4212f075830d4f7277575a9d098 # ratchet:astral-sh/setup-uv@v7
with:
enable-cache: false
version: "0.9.9"

View File

@@ -316,6 +316,7 @@ jobs:
# Base config shared by both editions
cat <<EOF > deployment/docker_compose/.env
COMPOSE_PROFILES=s3-filestore
OPENSEARCH_FOR_ONYX_ENABLED=false
AUTH_TYPE=basic
POSTGRES_POOL_PRE_PING=true
POSTGRES_USE_NULL_POOL=true
@@ -418,6 +419,7 @@ jobs:
-e POSTGRES_POOL_PRE_PING=true \
-e POSTGRES_USE_NULL_POOL=true \
-e VESPA_HOST=index \
-e ENABLE_OPENSEARCH_INDEXING_FOR_ONYX=false \
-e REDIS_HOST=cache \
-e API_SERVER_HOST=api_server \
-e OPENAI_API_KEY=${OPENAI_API_KEY} \
@@ -637,6 +639,7 @@ jobs:
ONYX_BACKEND_IMAGE=${ECR_CACHE}:integration-test-backend-test-${RUN_ID} \
ONYX_MODEL_SERVER_IMAGE=${ECR_CACHE}:integration-test-model-server-test-${RUN_ID} \
DEV_MODE=true \
OPENSEARCH_FOR_ONYX_ENABLED=false \
docker compose -f docker-compose.multitenant-dev.yml up \
relational_db \
index \
@@ -691,6 +694,7 @@ jobs:
-e POSTGRES_DB=postgres \
-e POSTGRES_USE_NULL_POOL=true \
-e VESPA_HOST=index \
-e ENABLE_OPENSEARCH_INDEXING_FOR_ONYX=false \
-e REDIS_HOST=cache \
-e API_SERVER_HOST=api_server \
-e OPENAI_API_KEY=${OPENAI_API_KEY} \

View File

@@ -468,7 +468,7 @@ jobs:
- name: Install the latest version of uv
if: always()
uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # ratchet:astral-sh/setup-uv@v7
uses: astral-sh/setup-uv@5a095e7a2014a4212f075830d4f7277575a9d098 # ratchet:astral-sh/setup-uv@v7
with:
enable-cache: false
version: "0.9.9"
@@ -707,7 +707,7 @@ jobs:
pull-requests: write
steps:
- name: Download visual diff summaries
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3
with:
pattern: screenshot-diff-summary-*
path: summaries/

View File

@@ -28,7 +28,7 @@ jobs:
with:
python-version: "3.11"
- name: Setup Terraform
uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd # ratchet:hashicorp/setup-terraform@v3
uses: hashicorp/setup-terraform@5e8dbf3c6d9deaf4193ca7a8fb23f2ac83bb6c85 # ratchet:hashicorp/setup-terraform@v4.0.0
- name: Setup node
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # ratchet:actions/setup-node@v6
with: # zizmor: ignore[cache-poisoning]

View File

@@ -26,7 +26,7 @@ jobs:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # ratchet:actions/checkout@v6
with:
persist-credentials: false
- uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # ratchet:astral-sh/setup-uv@v7
- uses: astral-sh/setup-uv@5a095e7a2014a4212f075830d4f7277575a9d098 # ratchet:astral-sh/setup-uv@v7
with:
enable-cache: false
version: "0.9.9"
@@ -37,3 +37,178 @@ jobs:
working-directory: cli
- run: uv publish
working-directory: cli
docker-amd64:
runs-on:
- runs-on
- runner=2cpu-linux-x64
- run-id=${{ github.run_id }}-cli-amd64
- extras=ecr-cache
environment: deploy
permissions:
id-token: write
timeout-minutes: 30
outputs:
digest: ${{ steps.build.outputs.digest }}
env:
REGISTRY_IMAGE: onyxdotapp/onyx-cli
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # ratchet:actions/checkout@v6
with:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # ratchet:aws-actions/configure-aws-credentials@v6.0.0
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
- name: Get AWS Secrets
uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802 # ratchet:aws-actions/aws-secretsmanager-get-secrets@v2.0.10
with:
secret-ids: |
DOCKER_USERNAME, deploy/docker-username
DOCKER_TOKEN, deploy/docker-token
parse-json-secrets: true
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # ratchet:docker/setup-buildx-action@v4
- name: Login to Docker Hub
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # ratchet:docker/login-action@v4
with:
username: ${{ env.DOCKER_USERNAME }}
password: ${{ env.DOCKER_TOKEN }}
- name: Build and push AMD64
id: build
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # ratchet:docker/build-push-action@v7
with:
context: ./cli
file: ./cli/Dockerfile
platforms: linux/amd64
cache-from: type=registry,ref=${{ env.REGISTRY_IMAGE }}:latest
cache-to: type=inline
outputs: type=image,name=${{ env.REGISTRY_IMAGE }},push-by-digest=true,name-canonical=true,push=true
docker-arm64:
runs-on:
- runs-on
- runner=2cpu-linux-arm64
- run-id=${{ github.run_id }}-cli-arm64
- extras=ecr-cache
environment: deploy
permissions:
id-token: write
timeout-minutes: 30
outputs:
digest: ${{ steps.build.outputs.digest }}
env:
REGISTRY_IMAGE: onyxdotapp/onyx-cli
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # ratchet:actions/checkout@v6
with:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # ratchet:aws-actions/configure-aws-credentials@v6.0.0
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
- name: Get AWS Secrets
uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802 # ratchet:aws-actions/aws-secretsmanager-get-secrets@v2.0.10
with:
secret-ids: |
DOCKER_USERNAME, deploy/docker-username
DOCKER_TOKEN, deploy/docker-token
parse-json-secrets: true
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # ratchet:docker/setup-buildx-action@v4
- name: Login to Docker Hub
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # ratchet:docker/login-action@v4
with:
username: ${{ env.DOCKER_USERNAME }}
password: ${{ env.DOCKER_TOKEN }}
- name: Build and push ARM64
id: build
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # ratchet:docker/build-push-action@v7
with:
context: ./cli
file: ./cli/Dockerfile
platforms: linux/arm64
cache-from: type=registry,ref=${{ env.REGISTRY_IMAGE }}:latest
cache-to: type=inline
outputs: type=image,name=${{ env.REGISTRY_IMAGE }},push-by-digest=true,name-canonical=true,push=true
merge-docker:
needs:
- docker-amd64
- docker-arm64
runs-on:
- runs-on
- runner=2cpu-linux-x64
- run-id=${{ github.run_id }}-cli-merge
environment: deploy
permissions:
id-token: write
timeout-minutes: 10
env:
REGISTRY_IMAGE: onyxdotapp/onyx-cli
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # ratchet:aws-actions/configure-aws-credentials@v6.0.0
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
- name: Get AWS Secrets
uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802 # ratchet:aws-actions/aws-secretsmanager-get-secrets@v2.0.10
with:
secret-ids: |
DOCKER_USERNAME, deploy/docker-username
DOCKER_TOKEN, deploy/docker-token
parse-json-secrets: true
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # ratchet:docker/setup-buildx-action@v4
- name: Login to Docker Hub
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # ratchet:docker/login-action@v4
with:
username: ${{ env.DOCKER_USERNAME }}
password: ${{ env.DOCKER_TOKEN }}
- name: Create and push manifest
env:
AMD64_DIGEST: ${{ needs.docker-amd64.outputs.digest }}
ARM64_DIGEST: ${{ needs.docker-arm64.outputs.digest }}
TAG: ${{ github.ref_name }}
run: |
SANITIZED_TAG="${TAG#cli/}"
IMAGES=(
"${REGISTRY_IMAGE}@${AMD64_DIGEST}"
"${REGISTRY_IMAGE}@${ARM64_DIGEST}"
)
if [[ "$TAG" =~ ^cli/v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
docker buildx imagetools create \
-t "${REGISTRY_IMAGE}:${SANITIZED_TAG}" \
-t "${REGISTRY_IMAGE}:latest" \
"${IMAGES[@]}"
else
docker buildx imagetools create \
-t "${REGISTRY_IMAGE}:${SANITIZED_TAG}" \
"${IMAGES[@]}"
fi

View File

@@ -26,7 +26,7 @@ jobs:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # ratchet:actions/checkout@v6
with:
persist-credentials: false
- uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # ratchet:astral-sh/setup-uv@v7
- uses: astral-sh/setup-uv@5a095e7a2014a4212f075830d4f7277575a9d098 # ratchet:astral-sh/setup-uv@v7
with:
enable-cache: false
version: "0.9.9"

View File

@@ -24,7 +24,7 @@ jobs:
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # ratchet:astral-sh/setup-uv@v7
uses: astral-sh/setup-uv@5a095e7a2014a4212f075830d4f7277575a9d098 # ratchet:astral-sh/setup-uv@v7
with:
enable-cache: false
version: "0.9.9"

View File

@@ -46,7 +46,9 @@ RUN apt-get update && \
pkg-config \
gcc \
nano \
vim && \
vim \
libjemalloc2 \
&& \
rm -rf /var/lib/apt/lists/* && \
apt-get clean
@@ -141,6 +143,7 @@ COPY --chown=onyx:onyx ./scripts/debugging /app/scripts/debugging
COPY --chown=onyx:onyx ./scripts/force_delete_connector_by_id.py /app/scripts/force_delete_connector_by_id.py
COPY --chown=onyx:onyx ./scripts/supervisord_entrypoint.sh /app/scripts/supervisord_entrypoint.sh
COPY --chown=onyx:onyx ./scripts/setup_craft_templates.sh /app/scripts/setup_craft_templates.sh
COPY --chown=onyx:onyx ./scripts/reencrypt_secrets.py /app/scripts/reencrypt_secrets.py
RUN chmod +x /app/scripts/supervisord_entrypoint.sh /app/scripts/setup_craft_templates.sh
# Run Craft template setup at build time when ENABLE_CRAFT=true
@@ -164,6 +167,13 @@ ENV PYTHONPATH=/app
ARG ONYX_VERSION=0.0.0-dev
ENV ONYX_VERSION=${ONYX_VERSION}
# Use jemalloc instead of glibc malloc to reduce memory fragmentation
# in long-running Python processes (API server, Celery workers).
# The soname is architecture-independent; the dynamic linker resolves
# the correct path from standard library directories.
# Placed after all RUN steps so build-time processes are unaffected.
ENV LD_PRELOAD=libjemalloc.so.2
# Default command which does nothing
# This container is used by api server and background which specify their own CMD
CMD ["tail", "-f", "/dev/null"]

View File

@@ -11,7 +11,6 @@ from sqlalchemy import text
from alembic import op
from onyx.configs.app_configs import DB_READONLY_PASSWORD
from onyx.configs.app_configs import DB_READONLY_USER
from shared_configs.configs import MULTI_TENANT
# revision identifiers, used by Alembic.
@@ -22,59 +21,52 @@ depends_on = None
def upgrade() -> None:
if MULTI_TENANT:
# Enable pg_trgm extension if not already enabled
op.execute("CREATE EXTENSION IF NOT EXISTS pg_trgm")
# Enable pg_trgm extension if not already enabled
op.execute("CREATE EXTENSION IF NOT EXISTS pg_trgm")
# Create the read-only db user if it does not already exist.
if not (DB_READONLY_USER and DB_READONLY_PASSWORD):
raise Exception("DB_READONLY_USER or DB_READONLY_PASSWORD is not set")
# Create read-only db user here only in multi-tenant mode. For single-tenant mode,
# the user is created in the standard migration.
if not (DB_READONLY_USER and DB_READONLY_PASSWORD):
raise Exception("DB_READONLY_USER or DB_READONLY_PASSWORD is not set")
op.execute(
text(
f"""
DO $$
BEGIN
-- Check if the read-only user already exists
IF NOT EXISTS (SELECT FROM pg_catalog.pg_roles WHERE rolname = '{DB_READONLY_USER}') THEN
-- Create the read-only user with the specified password
EXECUTE format('CREATE USER %I WITH PASSWORD %L', '{DB_READONLY_USER}', '{DB_READONLY_PASSWORD}');
-- First revoke all privileges to ensure a clean slate
EXECUTE format('REVOKE ALL ON DATABASE %I FROM %I', current_database(), '{DB_READONLY_USER}');
-- Grant only the CONNECT privilege to allow the user to connect to the database
-- but not perform any operations without additional specific grants
EXECUTE format('GRANT CONNECT ON DATABASE %I TO %I', current_database(), '{DB_READONLY_USER}');
END IF;
END
$$;
"""
)
)
def downgrade() -> None:
if MULTI_TENANT:
# Drop read-only db user here only in single tenant mode. For multi-tenant mode,
# the user is dropped in the alembic_tenants migration.
op.execute(
text(
f"""
op.execute(
text(
f"""
DO $$
BEGIN
IF EXISTS (SELECT FROM pg_catalog.pg_roles WHERE rolname = '{DB_READONLY_USER}') THEN
-- First revoke all privileges from the database
-- Check if the read-only user already exists
IF NOT EXISTS (SELECT FROM pg_catalog.pg_roles WHERE rolname = '{DB_READONLY_USER}') THEN
-- Create the read-only user with the specified password
EXECUTE format('CREATE USER %I WITH PASSWORD %L', '{DB_READONLY_USER}', '{DB_READONLY_PASSWORD}');
-- First revoke all privileges to ensure a clean slate
EXECUTE format('REVOKE ALL ON DATABASE %I FROM %I', current_database(), '{DB_READONLY_USER}');
-- Then revoke all privileges from the public schema
EXECUTE format('REVOKE ALL ON SCHEMA public FROM %I', '{DB_READONLY_USER}');
-- Then drop the user
EXECUTE format('DROP USER %I', '{DB_READONLY_USER}');
-- Grant only the CONNECT privilege to allow the user to connect to the database
-- but not perform any operations without additional specific grants
EXECUTE format('GRANT CONNECT ON DATABASE %I TO %I', current_database(), '{DB_READONLY_USER}');
END IF;
END
$$;
"""
)
"""
)
op.execute(text("DROP EXTENSION IF EXISTS pg_trgm"))
)
def downgrade() -> None:
op.execute(
text(
f"""
DO $$
BEGIN
IF EXISTS (SELECT FROM pg_catalog.pg_roles WHERE rolname = '{DB_READONLY_USER}') THEN
-- First revoke all privileges from the database
EXECUTE format('REVOKE ALL ON DATABASE %I FROM %I', current_database(), '{DB_READONLY_USER}');
-- Then revoke all privileges from the public schema
EXECUTE format('REVOKE ALL ON SCHEMA public FROM %I', '{DB_READONLY_USER}');
-- Then drop the user
EXECUTE format('DROP USER %I', '{DB_READONLY_USER}');
END IF;
END
$$;
"""
)
)
op.execute(text("DROP EXTENSION IF EXISTS pg_trgm"))

View File

@@ -1,3 +1,4 @@
import os
from datetime import datetime
import jwt
@@ -20,7 +21,13 @@ logger = setup_logger()
def verify_auth_setting() -> None:
# All the Auth flows are valid for EE version
# All the Auth flows are valid for EE version, but warn about deprecated 'disabled'
raw_auth_type = (os.environ.get("AUTH_TYPE") or "").lower()
if raw_auth_type == "disabled":
logger.warning(
"AUTH_TYPE='disabled' is no longer supported. "
"Using 'basic' instead. Please update your configuration."
)
logger.notice(f"Using Auth Type: {AUTH_TYPE.value}")

View File

@@ -18,7 +18,7 @@ from onyx.db.models import HierarchyNode
def _build_hierarchy_access_filter(
user_email: str | None,
user_email: str,
external_group_ids: list[str],
) -> ColumnElement[bool]:
"""Build SQLAlchemy filter for hierarchy node access.
@@ -43,7 +43,7 @@ def _build_hierarchy_access_filter(
def _get_accessible_hierarchy_nodes_for_source(
db_session: Session,
source: DocumentSource,
user_email: str | None,
user_email: str,
external_group_ids: list[str],
) -> list[HierarchyNode]:
"""

View File

@@ -68,6 +68,7 @@ def get_external_access_for_raw_gdrive_file(
company_domain: str,
retriever_drive_service: GoogleDriveService | None,
admin_drive_service: GoogleDriveService,
fallback_user_email: str,
add_prefix: bool = False,
) -> ExternalAccess:
"""
@@ -79,6 +80,11 @@ def get_external_access_for_raw_gdrive_file(
set add_prefix to True so group IDs are prefixed with the source type.
When invoked from doc_sync (permission sync), use the default (False)
since upsert_document_external_perms handles prefixing.
fallback_user_email: When we cannot retrieve any permission info for a file
(e.g. externally-owned files where the API returns no permissions
and permissions.list returns 403), fall back to granting access
to this user. This is typically the impersonated org user whose
drive contained the file.
"""
doc_id = file.get("id")
if not doc_id:
@@ -117,6 +123,26 @@ def get_external_access_for_raw_gdrive_file(
[permissions_list, backup_permissions_list]
)
# For externally-owned files, the Drive API may return no permissions
# and permissions.list may return 403. In this case, fall back to
# granting access to the user who found the file in their drive.
# Note, even if other users also have access to this file,
# they will not be granted access in Onyx.
# We check permissions_list (the final result after all fetch attempts)
# rather than the raw fields, because permission_ids may be present
# but the actual fetch can still return empty due to a 403.
if not permissions_list:
logger.info(
f"No permission info available for file {doc_id} "
f"(likely owned by a user outside of your organization). "
f"Falling back to granting access to retriever user: {fallback_user_email}"
)
return ExternalAccess(
external_user_emails={fallback_user_email},
external_user_group_ids=set(),
is_public=False,
)
folder_ids_to_inherit_permissions_from: set[str] = set()
user_emails: set[str] = set()
group_emails: set[str] = set()

View File

@@ -27,6 +27,8 @@ def _get_trimmed_key(key: str) -> bytes:
if key_length >= size:
return encoded_key[:size]
raise AssertionError("unreachable")
def _encrypt_string(input_str: str, key: str | None = None) -> bytes:
effective_key = key if key is not None else ENCRYPTION_KEY_SECRET
@@ -69,13 +71,20 @@ def _decrypt_bytes(input_bytes: bytes, key: str | None = None) -> str:
if key is not None:
# Explicit key was provided — don't fall back silently
raise
# Read path: fall back to raw decode for key rotation compatibility.
# Run the re-encrypt-secrets script to rotate to the current key.
# Read path: attempt raw UTF-8 decode as a fallback for legacy data.
# Does NOT handle data encrypted with a different key — that
# ciphertext is not valid UTF-8 and will raise below.
logger.warning(
"AES decryption failed — falling back to raw decode. "
"Run the re-encrypt secrets script to rotate to the current key."
)
return input_bytes.decode()
try:
return input_bytes.decode()
except UnicodeDecodeError:
raise ValueError(
"Data is not valid UTF-8 — likely encrypted with a different key. "
"Run the re-encrypt secrets script to rotate to the current key."
) from None
def encrypt_string_to_bytes(input_str: str, key: str | None = None) -> bytes:

View File

@@ -1,4 +1,5 @@
import json
import os
import random
import secrets
import string
@@ -145,10 +146,22 @@ def is_user_admin(user: User) -> bool:
def verify_auth_setting() -> None:
if AUTH_TYPE == AuthType.CLOUD:
"""Log warnings for AUTH_TYPE issues.
This only runs on app startup not during migrations/scripts.
"""
raw_auth_type = (os.environ.get("AUTH_TYPE") or "").lower()
if raw_auth_type == "cloud":
raise ValueError(
f"{AUTH_TYPE.value} is not a valid auth type for self-hosted deployments."
"'cloud' is not a valid auth type for self-hosted deployments."
)
if raw_auth_type == "disabled":
logger.warning(
"AUTH_TYPE='disabled' is no longer supported. "
"Using 'basic' instead. Please update your configuration."
)
logger.notice(f"Using Auth Type: {AUTH_TYPE.value}")

View File

@@ -15,6 +15,7 @@ from onyx.chat.citation_processor import DynamicCitationProcessor
from onyx.chat.emitter import Emitter
from onyx.chat.models import ChatMessageSimple
from onyx.chat.models import LlmStepResult
from onyx.chat.tool_call_args_streaming import maybe_emit_argument_delta
from onyx.configs.app_configs import LOG_ONYX_MODEL_INTERACTIONS
from onyx.configs.app_configs import PROMPT_CACHE_CHAT_HISTORY
from onyx.configs.constants import MessageType
@@ -54,6 +55,7 @@ from onyx.server.query_and_chat.streaming_models import ReasoningStart
from onyx.tools.models import ToolCallKickoff
from onyx.tracing.framework.create import generation_span
from onyx.utils.b64 import get_image_type_from_bytes
from onyx.utils.jsonriver import Parser
from onyx.utils.logger import setup_logger
from onyx.utils.postgres_sanitization import sanitize_string
from onyx.utils.text_processing import find_all_json_objects
@@ -1009,6 +1011,7 @@ def run_llm_step_pkt_generator(
)
id_to_tool_call_map: dict[int, dict[str, Any]] = {}
arg_parsers: dict[int, Parser] = {}
reasoning_start = False
answer_start = False
accumulated_reasoning = ""
@@ -1215,7 +1218,14 @@ def run_llm_step_pkt_generator(
yield from _close_reasoning_if_active()
for tool_call_delta in delta.tool_calls:
# maybe_emit depends and update being called first and attaching the delta
_update_tool_call_with_delta(id_to_tool_call_map, tool_call_delta)
yield from maybe_emit_argument_delta(
tool_calls_in_progress=id_to_tool_call_map,
tool_call_delta=tool_call_delta,
placement=_current_placement(),
parsers=arg_parsers,
)
# Flush any tail text buffered while checking for split "<function_calls" markers.
filtered_content_tail = xml_tool_call_content_filter.flush()

View File

@@ -0,0 +1,77 @@
from collections.abc import Generator
from collections.abc import Mapping
from typing import Any
from typing import Type
from onyx.llm.model_response import ChatCompletionDeltaToolCall
from onyx.server.query_and_chat.placement import Placement
from onyx.server.query_and_chat.streaming_models import Packet
from onyx.server.query_and_chat.streaming_models import ToolCallArgumentDelta
from onyx.tools.built_in_tools import TOOL_NAME_TO_CLASS
from onyx.tools.interface import Tool
from onyx.utils.jsonriver import Parser
def _get_tool_class(
tool_calls_in_progress: Mapping[int, Mapping[str, Any]],
tool_call_delta: ChatCompletionDeltaToolCall,
) -> Type[Tool] | None:
"""Look up the Tool subclass for a streaming tool call delta."""
tool_name = tool_calls_in_progress.get(tool_call_delta.index, {}).get("name")
if not tool_name:
return None
return TOOL_NAME_TO_CLASS.get(tool_name)
def maybe_emit_argument_delta(
tool_calls_in_progress: Mapping[int, Mapping[str, Any]],
tool_call_delta: ChatCompletionDeltaToolCall,
placement: Placement,
parsers: dict[int, Parser],
) -> Generator[Packet, None, None]:
"""Emit decoded tool-call argument deltas to the frontend.
Uses a ``jsonriver.Parser`` per tool-call index to incrementally parse
the JSON argument string and extract only the newly-appended content
for each string-valued argument.
NOTE: Non-string arguments (numbers, booleans, null, arrays, objects)
are skipped — they are available in the final tool-call kickoff packet.
``parsers`` is a mutable dict keyed by tool-call index. A new
``Parser`` is created automatically for each new index.
"""
tool_cls = _get_tool_class(tool_calls_in_progress, tool_call_delta)
if not tool_cls or not tool_cls.should_emit_argument_deltas():
return
fn = tool_call_delta.function
delta_fragment = fn.arguments if fn else None
if not delta_fragment:
return
idx = tool_call_delta.index
if idx not in parsers:
parsers[idx] = Parser()
parser = parsers[idx]
deltas = parser.feed(delta_fragment)
argument_deltas: dict[str, str] = {}
for delta in deltas:
if isinstance(delta, dict):
for key, value in delta.items():
if isinstance(value, str):
argument_deltas[key] = argument_deltas.get(key, "") + value
if not argument_deltas:
return
tc_data = tool_calls_in_progress[tool_call_delta.index]
yield Packet(
placement=placement,
obj=ToolCallArgumentDelta(
tool_type=tc_data.get("name", ""),
argument_deltas=argument_deltas,
),
)

View File

@@ -68,6 +68,10 @@ FILE_TOKEN_COUNT_THRESHOLD = int(
os.environ.get("FILE_TOKEN_COUNT_THRESHOLD", str(_DEFAULT_FILE_TOKEN_LIMIT))
)
# Maximum upload size for a single user file (chat/projects) in MB.
USER_FILE_MAX_UPLOAD_SIZE_MB = int(os.environ.get("USER_FILE_MAX_UPLOAD_SIZE_MB") or 50)
USER_FILE_MAX_UPLOAD_SIZE_BYTES = USER_FILE_MAX_UPLOAD_SIZE_MB * 1024 * 1024
# If set to true, will show extra/uncommon connectors in the "Other" category
SHOW_EXTRA_CONNECTORS = os.environ.get("SHOW_EXTRA_CONNECTORS", "").lower() == "true"
@@ -92,19 +96,12 @@ WEB_DOMAIN = os.environ.get("WEB_DOMAIN") or "http://localhost:3000"
#####
# Auth Configs
#####
# Upgrades users from disabled auth to basic auth and shows warning.
_auth_type_str = (os.environ.get("AUTH_TYPE") or "basic").lower()
if _auth_type_str == "disabled":
logger.warning(
"AUTH_TYPE='disabled' is no longer supported. "
"Defaulting to 'basic'. Please update your configuration. "
"Your existing data will be migrated automatically."
)
_auth_type_str = AuthType.BASIC.value
try:
# Silently default to basic - warnings/errors logged in verify_auth_setting()
# which only runs on app startup, not during migrations/scripts
_auth_type_str = (os.environ.get("AUTH_TYPE") or "").lower()
if _auth_type_str in [auth_type.value for auth_type in AuthType]:
AUTH_TYPE = AuthType(_auth_type_str)
except ValueError:
logger.error(f"Invalid AUTH_TYPE: {_auth_type_str}. Defaulting to 'basic'.")
else:
AUTH_TYPE = AuthType.BASIC
PASSWORD_MIN_LENGTH = int(os.getenv("PASSWORD_MIN_LENGTH", 8))
@@ -288,8 +285,9 @@ OPENSEARCH_TEXT_ANALYZER = os.environ.get("OPENSEARCH_TEXT_ANALYZER") or "englis
# environments we always want to be dual indexing into both OpenSearch and Vespa
# to stress test the new codepaths. Only enable this if there is some instance
# of OpenSearch running for the relevant Onyx instance.
# NOTE: Now enabled on by default, unless the env indicates otherwise.
ENABLE_OPENSEARCH_INDEXING_FOR_ONYX = (
os.environ.get("ENABLE_OPENSEARCH_INDEXING_FOR_ONYX", "").lower() == "true"
os.environ.get("ENABLE_OPENSEARCH_INDEXING_FOR_ONYX", "true").lower() == "true"
)
# NOTE: This effectively does nothing anymore, admins can now toggle whether
# retrieval is through OpenSearch. This value is only used as a final fallback

View File

@@ -1,4 +1,5 @@
import asyncio
from collections.abc import AsyncGenerator
from collections.abc import AsyncIterable
from collections.abc import Iterable
from datetime import datetime
@@ -204,7 +205,7 @@ def _manage_async_retrieval(
end_time: datetime | None = end
async def _async_fetch() -> AsyncIterable[Document]:
async def _async_fetch() -> AsyncGenerator[Document, None]:
intents = Intents.default()
intents.message_content = True
async with Client(intents=intents) as discord_client:
@@ -227,22 +228,23 @@ def _manage_async_retrieval(
def run_and_yield() -> Iterable[Document]:
loop = asyncio.new_event_loop()
async_gen = _async_fetch()
try:
# Get the async generator
async_gen = _async_fetch()
# Convert to AsyncIterator
async_iter = async_gen.__aiter__()
while True:
try:
# Create a coroutine by calling anext with the async iterator
next_coro = anext(async_iter)
# Run the coroutine to get the next document
doc = loop.run_until_complete(next_coro)
doc = loop.run_until_complete(anext(async_gen))
yield doc
except StopAsyncIteration:
break
finally:
loop.close()
# Must close the async generator before the loop so the Discord
# client's `async with` block can await its shutdown coroutine.
# The nested try/finally ensures the loop always closes even if
# aclose() raises (same pattern as cursor.close() before conn.close()).
try:
loop.run_until_complete(async_gen.aclose())
finally:
loop.close()
return run_and_yield()

View File

@@ -1722,6 +1722,7 @@ class GoogleDriveConnector(
primary_admin_email=self.primary_admin_email,
google_domain=self.google_domain,
),
retriever_email=file.user_email,
):
slim_batch.append(doc)

View File

@@ -476,6 +476,7 @@ def _get_external_access_for_raw_gdrive_file(
company_domain: str,
retriever_drive_service: GoogleDriveService | None,
admin_drive_service: GoogleDriveService,
fallback_user_email: str,
add_prefix: bool = False,
) -> ExternalAccess:
"""
@@ -484,6 +485,8 @@ def _get_external_access_for_raw_gdrive_file(
add_prefix: When True, prefix group IDs with source type (for indexing path).
When False (default), leave unprefixed (for permission sync path
where upsert_document_external_perms handles prefixing).
fallback_user_email: When permission info can't be retrieved (e.g. externally-owned
files), fall back to granting access to this user.
"""
external_access_fn = cast(
Callable[
@@ -492,6 +495,7 @@ def _get_external_access_for_raw_gdrive_file(
str,
GoogleDriveService | None,
GoogleDriveService,
str,
bool,
],
ExternalAccess,
@@ -507,6 +511,7 @@ def _get_external_access_for_raw_gdrive_file(
company_domain,
retriever_drive_service,
admin_drive_service,
fallback_user_email,
add_prefix,
)
@@ -672,6 +677,7 @@ def _convert_drive_item_to_document(
creds, user_email=permission_sync_context.primary_admin_email
),
add_prefix=True, # Indexing path - prefix here
fallback_user_email=retriever_email,
)
if permission_sync_context
else None
@@ -753,6 +759,7 @@ def build_slim_document(
# if not specified, we will not sync permissions
# will also be a no-op if EE is not enabled
permission_sync_context: PermissionSyncContext | None,
retriever_email: str,
) -> SlimDocument | None:
if file.get("mimeType") in [DRIVE_FOLDER_TYPE, DRIVE_SHORTCUT_TYPE]:
return None
@@ -774,6 +781,7 @@ def build_slim_document(
creds,
user_email=permission_sync_context.primary_admin_email,
),
fallback_user_email=retriever_email,
)
if permission_sync_context
else None

View File

@@ -44,6 +44,7 @@ from onyx.connectors.google_utils.shared_constants import (
from onyx.db.credentials import update_credential_json
from onyx.db.models import User
from onyx.key_value_store.factory import get_kv_store
from onyx.key_value_store.interface import unwrap_str
from onyx.server.documents.models import CredentialBase
from onyx.server.documents.models import GoogleAppCredentials
from onyx.server.documents.models import GoogleServiceAccountKey
@@ -89,7 +90,7 @@ def _get_current_oauth_user(creds: OAuthCredentials, source: DocumentSource) ->
def verify_csrf(credential_id: int, state: str) -> None:
csrf = get_kv_store().load(KV_CRED_KEY.format(str(credential_id)))
csrf = unwrap_str(get_kv_store().load(KV_CRED_KEY.format(str(credential_id))))
if csrf != state:
raise PermissionError(
"State from Google Drive Connector callback does not match expected"
@@ -178,7 +179,9 @@ def get_auth_url(credential_id: int, source: DocumentSource) -> str:
params = parse_qs(parsed_url.query)
get_kv_store().store(
KV_CRED_KEY.format(credential_id), params.get("state", [None])[0], encrypt=True
KV_CRED_KEY.format(credential_id),
{"value": params.get("state", [None])[0]},
encrypt=True,
)
return str(auth_url)

View File

@@ -458,7 +458,7 @@ def get_all_hierarchy_nodes_for_source(
def _get_accessible_hierarchy_nodes_for_source(
db_session: Session,
source: DocumentSource,
user_email: str | None, # noqa: ARG001
user_email: str, # noqa: ARG001
external_group_ids: list[str], # noqa: ARG001
) -> list[HierarchyNode]:
"""
@@ -485,7 +485,7 @@ def _get_accessible_hierarchy_nodes_for_source(
def get_accessible_hierarchy_nodes_for_source(
db_session: Session,
source: DocumentSource,
user_email: str | None,
user_email: str,
external_group_ids: list[str],
) -> list[HierarchyNode]:
"""

View File

@@ -36,9 +36,11 @@ from sqlalchemy import Text
from sqlalchemy import text
from sqlalchemy import UniqueConstraint
from sqlalchemy.dialects import postgresql
from sqlalchemy import event
from sqlalchemy.engine.interfaces import Dialect
from sqlalchemy.orm import DeclarativeBase
from sqlalchemy.orm import Mapped
from sqlalchemy.orm import Mapper
from sqlalchemy.orm import mapped_column
from sqlalchemy.orm import relationship
from sqlalchemy.types import LargeBinary
@@ -117,10 +119,50 @@ class Base(DeclarativeBase):
__abstract__ = True
class EncryptedString(TypeDecorator):
class _EncryptedBase(TypeDecorator):
"""Base for encrypted column types that wrap values in SensitiveValue."""
impl = LargeBinary
# This type's behavior is fully deterministic and doesn't depend on any external factors.
cache_ok = True
_is_json: bool = False
def wrap_raw(self, value: Any) -> SensitiveValue:
"""Encrypt a raw value and wrap it in SensitiveValue.
Called by the attribute set event so the Python-side type is always
SensitiveValue, regardless of whether the value was loaded from the DB
or assigned in application code.
"""
if self._is_json:
if not isinstance(value, dict):
raise TypeError(
f"EncryptedJson column expected dict, got {type(value).__name__}"
)
raw_str = json.dumps(value)
else:
if not isinstance(value, str):
raise TypeError(
f"EncryptedString column expected str, got {type(value).__name__}"
)
raw_str = value
return SensitiveValue(
encrypted_bytes=encrypt_string_to_bytes(raw_str),
decrypt_fn=decrypt_bytes_to_string,
is_json=self._is_json,
)
def compare_values(self, x: Any, y: Any) -> bool:
if x is None or y is None:
return x == y
if isinstance(x, SensitiveValue):
x = x.get_value(apply_mask=False)
if isinstance(y, SensitiveValue):
y = y.get_value(apply_mask=False)
return x == y
class EncryptedString(_EncryptedBase):
_is_json: bool = False
def process_bind_param(
self, value: str | SensitiveValue[str] | None, dialect: Dialect # noqa: ARG002
@@ -144,20 +186,9 @@ class EncryptedString(TypeDecorator):
)
return None
def compare_values(self, x: Any, y: Any) -> bool:
if x is None or y is None:
return x == y
if isinstance(x, SensitiveValue):
x = x.get_value(apply_mask=False)
if isinstance(y, SensitiveValue):
y = y.get_value(apply_mask=False)
return x == y
class EncryptedJson(TypeDecorator):
impl = LargeBinary
# This type's behavior is fully deterministic and doesn't depend on any external factors.
cache_ok = True
class EncryptedJson(_EncryptedBase):
_is_json: bool = True
def process_bind_param(
self,
@@ -165,9 +196,7 @@ class EncryptedJson(TypeDecorator):
dialect: Dialect, # noqa: ARG002
) -> bytes | None:
if value is not None:
# Handle both raw dicts and SensitiveValue wrappers
if isinstance(value, SensitiveValue):
# Get raw value for storage
value = value.get_value(apply_mask=False)
json_str = json.dumps(value)
return encrypt_string_to_bytes(json_str)
@@ -184,14 +213,40 @@ class EncryptedJson(TypeDecorator):
)
return None
def compare_values(self, x: Any, y: Any) -> bool:
if x is None or y is None:
return x == y
if isinstance(x, SensitiveValue):
x = x.get_value(apply_mask=False)
if isinstance(y, SensitiveValue):
y = y.get_value(apply_mask=False)
return x == y
_REGISTERED_ATTRS: set[str] = set()
@event.listens_for(Mapper, "mapper_configured")
def _register_sensitive_value_set_events(
mapper: Mapper,
class_: type,
) -> None:
"""Auto-wrap raw values in SensitiveValue when assigned to encrypted columns."""
for prop in mapper.column_attrs:
for col in prop.columns:
if isinstance(col.type, _EncryptedBase):
col_type = col.type
attr = getattr(class_, prop.key)
# Guard against double-registration (e.g. if mapper is
# re-configured in test setups)
attr_key = f"{class_.__qualname__}.{prop.key}"
if attr_key in _REGISTERED_ATTRS:
continue
_REGISTERED_ATTRS.add(attr_key)
@event.listens_for(attr, "set", retval=True)
def _wrap_value(
target: Any, # noqa: ARG001
value: Any,
oldvalue: Any, # noqa: ARG001
initiator: Any, # noqa: ARG001
_col_type: _EncryptedBase = col_type,
) -> Any:
if value is not None and not isinstance(value, SensitiveValue):
return _col_type.wrap_raw(value)
return value
class NullFilteredString(TypeDecorator):

View File

@@ -23,6 +23,7 @@ from onyx.db.models import EncryptedJson
from onyx.db.models import EncryptedString
from onyx.utils.encryption import decrypt_bytes_to_string
from onyx.utils.logger import setup_logger
from onyx.utils.variable_functionality import global_version
logger = setup_logger()
@@ -83,6 +84,9 @@ def rotate_encryption_key(
is preserved on crash. Already-rotated rows are detected and skipped,
making the operation safe to re-run.
"""
if not global_version.is_ee_version():
raise RuntimeError("EE mode is not enabled — rotation requires EE encryption.")
if not ENCRYPTION_KEY_SECRET:
raise RuntimeError(
"ENCRYPTION_KEY_SECRET is not set — cannot rotate. "
@@ -118,19 +122,19 @@ def rotate_encryption_key(
decrypted_str = raw_bytes.decode("utf-8")
else:
decrypted_str = decrypt_bytes_to_string(raw_bytes, key=old_key)
# For EncryptedJson, parse back to dict so the TypeDecorator
# can json.dumps() it cleanly (avoids double-encoding).
value: Any = json.loads(decrypted_str) if is_json else decrypted_str
except (ValueError, UnicodeDecodeError) as e:
pk_vals = [row[i] for i in range(len(pk_names))]
logger.warning(
f"Could not decrypt {table_name}.{col_name} "
f"Could not decrypt/parse {table_name}.{col_name} "
f"row {pk_vals} — skipping: {e}"
)
continue
if not dry_run:
# For EncryptedJson, parse back to dict so the TypeDecorator
# can json.dumps() it cleanly (avoids double-encoding).
value: Any = json.loads(decrypted_str) if is_json else decrypted_str
pk_filters = [pk_attr == row[i] for i, pk_attr in enumerate(pk_attrs)]
update_stmt = (
update(model_cls).where(*pk_filters).values({col_name: value})

View File

@@ -1,4 +1,5 @@
import abc
from typing import cast
from onyx.utils.special_types import JSON_ro
@@ -7,6 +8,19 @@ class KvKeyNotFoundError(Exception):
pass
def unwrap_str(val: JSON_ro) -> str:
"""Unwrap a string stored as {"value": str} in the encrypted KV store.
Also handles legacy plain-string values cached in Redis."""
if isinstance(val, dict):
try:
return cast(str, val["value"])
except KeyError:
raise ValueError(
f"Expected dict with 'value' key, got keys: {list(val.keys())}"
)
return cast(str, val)
class KeyValueStore:
# In the Multi Tenant case, the tenant context is picked up automatically, it does not need to be passed in
# It's read from the global thread level variable

View File

@@ -10,6 +10,7 @@ from onyx.mcp_server.utils import get_indexed_sources
from onyx.mcp_server.utils import require_access_token
from onyx.utils.logger import setup_logger
from onyx.utils.variable_functionality import build_api_server_url_for_http_requests
from onyx.utils.variable_functionality import global_version
logger = setup_logger()
@@ -26,6 +27,14 @@ async def search_indexed_documents(
Use this tool for information that is not public knowledge and specific to the user,
their team, their work, or their organization/company.
Note: In CE mode, this tool uses the chat endpoint internally which invokes an LLM
on every call, consuming tokens and adding latency.
Additionally, CE callers receive a truncated snippet (blurb) instead of a full document chunk,
but this should still be sufficient for most use cases. CE mode functionality should be swapped
when a dedicated CE search endpoint is implemented.
In EE mode, the dedicated search endpoint is used instead.
To find a list of available sources, use the `indexed_sources` resource.
Returns chunks of text as search results with snippets, scores, and metadata.
@@ -111,48 +120,73 @@ async def search_indexed_documents(
if time_cutoff_dt:
filters["time_cutoff"] = time_cutoff_dt.isoformat()
# Build the search request using the new SendSearchQueryRequest format
search_request = {
"search_query": query,
"filters": filters,
"num_docs_fed_to_llm_selection": limit,
"run_query_expansion": False,
"include_content": True,
"stream": False,
}
is_ee = global_version.is_ee_version()
base_url = build_api_server_url_for_http_requests(respect_env_override_if_set=True)
auth_headers = {"Authorization": f"Bearer {access_token.token}"}
search_request: dict[str, Any]
if is_ee:
# EE: use the dedicated search endpoint (no LLM invocation)
search_request = {
"search_query": query,
"filters": filters,
"num_docs_fed_to_llm_selection": limit,
"run_query_expansion": False,
"include_content": True,
"stream": False,
}
endpoint = f"{base_url}/search/send-search-message"
error_key = "error"
docs_key = "search_docs"
content_field = "content"
else:
# CE: fall back to the chat endpoint (invokes LLM, consumes tokens)
search_request = {
"message": query,
"stream": False,
"chat_session_info": {},
}
if filters:
search_request["internal_search_filters"] = filters
endpoint = f"{base_url}/chat/send-chat-message"
error_key = "error_msg"
docs_key = "top_documents"
content_field = "blurb"
# Call the API server using the new send-search-message route
try:
response = await get_http_client().post(
f"{build_api_server_url_for_http_requests(respect_env_override_if_set=True)}/search/send-search-message",
endpoint,
json=search_request,
headers={"Authorization": f"Bearer {access_token.token}"},
headers=auth_headers,
)
response.raise_for_status()
result = response.json()
# Check for error in response
if result.get("error"):
if result.get(error_key):
return {
"documents": [],
"total_results": 0,
"query": query,
"error": result.get("error"),
"error": result.get(error_key),
}
# Return simplified format for MCP clients
fields_to_return = [
"semantic_identifier",
"content",
"source_type",
"link",
"score",
]
documents = [
{key: doc.get(key) for key in fields_to_return}
for doc in result.get("search_docs", [])
{
"semantic_identifier": doc.get("semantic_identifier"),
"content": doc.get(content_field),
"source_type": doc.get("source_type"),
"link": doc.get("link"),
"score": doc.get("score"),
}
for doc in result.get(docs_key, [])
]
# NOTE: search depth is controlled by the backend persona defaults, not `limit`.
# `limit` only caps the returned list; fewer results may be returned if the
# backend retrieves fewer documents than requested.
documents = documents[:limit]
logger.info(
f"Onyx MCP Server: Internal search returned {len(documents)} results"
)
@@ -160,7 +194,6 @@ async def search_indexed_documents(
"documents": documents,
"total_results": len(documents),
"query": query,
"executed_queries": result.get("all_executed_queries", [query]),
}
except Exception as e:
logger.error(f"Onyx MCP Server: Document search error: {e}", exc_info=True)

View File

@@ -1905,7 +1905,7 @@ def get_connector_by_id(
@router.post("/connector-request")
def submit_connector_request(
request_data: ConnectorRequestSubmission,
user: User | None = Depends(current_user),
user: User = Depends(current_user),
) -> StatusResponse:
"""
Submit a connector request for Cloud deployments.
@@ -1918,7 +1918,7 @@ def submit_connector_request(
raise HTTPException(status_code=400, detail="Connector name cannot be empty")
# Get user identifier for telemetry
user_email = user.email if user else None
user_email = user.email
distinct_id = user_email or tenant_id
# Track connector request via PostHog telemetry (Cloud only)

View File

@@ -57,9 +57,6 @@ def list_messages(
db_session: Session = Depends(get_session),
) -> MessageListResponse:
"""Get all messages for a build session."""
if user is None:
raise HTTPException(status_code=401, detail="Authentication required")
session_manager = SessionManager(db_session)
messages = session_manager.list_messages(session_id, user.id)

View File

@@ -54,18 +54,14 @@ def _require_opensearch(db_session: Session) -> None:
)
def _get_user_access_info(
user: User | None, db_session: Session
) -> tuple[str | None, list[str]]:
if not user:
return None, []
def _get_user_access_info(user: User, db_session: Session) -> tuple[str, list[str]]:
return user.email, get_user_external_group_ids(db_session, user)
@router.get(HIERARCHY_NODES_LIST_PATH)
def list_accessible_hierarchy_nodes(
source: DocumentSource,
user: User | None = Depends(current_user),
user: User = Depends(current_user),
db_session: Session = Depends(get_session),
) -> HierarchyNodesResponse:
_require_opensearch(db_session)
@@ -92,7 +88,7 @@ def list_accessible_hierarchy_nodes(
@router.post(HIERARCHY_NODE_DOCUMENTS_PATH)
def list_accessible_hierarchy_node_documents(
documents_request: HierarchyNodeDocumentsRequest,
user: User | None = Depends(current_user),
user: User = Depends(current_user),
db_session: Session = Depends(get_session),
) -> HierarchyNodeDocumentsResponse:
_require_opensearch(db_session)

View File

@@ -1013,7 +1013,7 @@ def get_mcp_servers_for_assistant(
@router.get("/servers", response_model=MCPServersResponse)
def get_mcp_servers_for_user(
db: Session = Depends(get_session),
user: User | None = Depends(current_user),
user: User = Depends(current_user),
) -> MCPServersResponse:
"""List all MCP servers for use in agent configuration and chat UI.

View File

@@ -10,6 +10,8 @@ from pydantic import Field
from sqlalchemy.orm import Session
from onyx.configs.app_configs import FILE_TOKEN_COUNT_THRESHOLD
from onyx.configs.app_configs import USER_FILE_MAX_UPLOAD_SIZE_BYTES
from onyx.configs.app_configs import USER_FILE_MAX_UPLOAD_SIZE_MB
from onyx.db.llm import fetch_default_llm_model
from onyx.file_processing.extract_file_text import extract_file_text
from onyx.file_processing.extract_file_text import get_file_ext
@@ -35,6 +37,38 @@ def get_safe_filename(upload: UploadFile) -> str:
return upload.filename
def get_upload_size_bytes(upload: UploadFile) -> int | None:
"""Best-effort file size in bytes without consuming the stream."""
if upload.size is not None:
return upload.size
try:
current_pos = upload.file.tell()
upload.file.seek(0, 2)
size = upload.file.tell()
upload.file.seek(current_pos)
return size
except Exception as e:
logger.warning(
"Could not determine upload size via stream seek "
f"(filename='{get_safe_filename(upload)}', "
f"error_type={type(e).__name__}, error={e})"
)
return None
def is_upload_too_large(upload: UploadFile, max_bytes: int) -> bool:
"""Return True when upload size is known and exceeds max_bytes."""
size_bytes = get_upload_size_bytes(upload)
if size_bytes is None:
logger.warning(
"Could not determine upload size; skipping size-limit check for "
f"'{get_safe_filename(upload)}'"
)
return False
return size_bytes > max_bytes
# Guard against extremely large images
Image.MAX_IMAGE_PIXELS = 12000 * 12000
@@ -159,6 +193,18 @@ def categorize_uploaded_files(
for upload in files:
try:
filename = get_safe_filename(upload)
# Size limit is a hard safety cap and is enforced even when token
# threshold checks are skipped via SKIP_USERFILE_THRESHOLD settings.
if is_upload_too_large(upload, USER_FILE_MAX_UPLOAD_SIZE_BYTES):
results.rejected.append(
RejectedFile(
filename=filename,
reason=f"Exceeds {USER_FILE_MAX_UPLOAD_SIZE_MB} MB file size limit",
)
)
continue
extension = get_file_ext(filename)
# If image, estimate tokens via dedicated method first

View File

@@ -41,6 +41,7 @@ class StreamingType(Enum):
REASONING_DONE = "reasoning_done"
CITATION_INFO = "citation_info"
TOOL_CALL_DEBUG = "tool_call_debug"
TOOL_CALL_ARGUMENT_DELTA = "tool_call_argument_delta"
MEMORY_TOOL_START = "memory_tool_start"
MEMORY_TOOL_DELTA = "memory_tool_delta"
@@ -259,6 +260,15 @@ class CustomToolDelta(BaseObj):
file_ids: list[str] | None = None
class ToolCallArgumentDelta(BaseObj):
type: Literal["tool_call_argument_delta"] = (
StreamingType.TOOL_CALL_ARGUMENT_DELTA.value
)
tool_type: str
argument_deltas: dict[str, Any]
################################################
# File Reader Packets
################################################
@@ -379,6 +389,7 @@ PacketObj = Union[
# Citation Packets
CitationInfo,
ToolCallDebug,
ToolCallArgumentDelta,
# Deep Research Packets
DeepResearchPlanStart,
DeepResearchPlanDelta,

View File

@@ -78,6 +78,7 @@ class Settings(BaseModel):
# User Knowledge settings
user_knowledge_enabled: bool | None = True
user_file_max_upload_size_mb: int | None = None
# Connector settings
show_extra_connectors: bool | None = True

View File

@@ -3,6 +3,7 @@ from onyx.configs.app_configs import DISABLE_USER_KNOWLEDGE
from onyx.configs.app_configs import ENABLE_OPENSEARCH_INDEXING_FOR_ONYX
from onyx.configs.app_configs import ONYX_QUERY_HISTORY_TYPE
from onyx.configs.app_configs import SHOW_EXTRA_CONNECTORS
from onyx.configs.app_configs import USER_FILE_MAX_UPLOAD_SIZE_MB
from onyx.configs.constants import KV_SETTINGS_KEY
from onyx.configs.constants import OnyxRedisLocks
from onyx.key_value_store.factory import get_kv_store
@@ -50,6 +51,7 @@ def load_settings() -> Settings:
if DISABLE_USER_KNOWLEDGE:
settings.user_knowledge_enabled = False
settings.user_file_max_upload_size_mb = USER_FILE_MAX_UPLOAD_SIZE_MB
settings.show_extra_connectors = SHOW_EXTRA_CONNECTORS
settings.opensearch_indexing_enabled = ENABLE_OPENSEARCH_INDEXING_FOR_ONYX
return settings

View File

@@ -56,3 +56,23 @@ def get_built_in_tool_ids() -> list[str]:
def get_built_in_tool_by_id(in_code_tool_id: str) -> Type[BUILT_IN_TOOL_TYPES]:
return BUILT_IN_TOOL_MAP[in_code_tool_id]
def _build_tool_name_to_class() -> dict[str, Type[BUILT_IN_TOOL_TYPES]]:
"""Build a mapping from LLM-facing tool name to tool class."""
result: dict[str, Type[BUILT_IN_TOOL_TYPES]] = {}
for cls in BUILT_IN_TOOL_MAP.values():
name_attr = cls.__dict__.get("name")
if isinstance(name_attr, property) and name_attr.fget is not None:
tool_name = name_attr.fget(cls)
elif isinstance(name_attr, str):
tool_name = name_attr
else:
raise ValueError(
f"Built-in tool {cls.__name__} must define a valid LLM-facing tool name"
)
result[tool_name] = cls
return result
TOOL_NAME_TO_CLASS: dict[str, Type[BUILT_IN_TOOL_TYPES]] = _build_tool_name_to_class()

View File

@@ -92,3 +92,7 @@ class Tool(abc.ABC, Generic[TOverride]):
**llm_kwargs: Any,
) -> ToolResponse:
raise NotImplementedError
@classmethod
def should_emit_argument_deltas(cls) -> bool:
return False

View File

@@ -376,3 +376,8 @@ class PythonTool(Tool[PythonToolOverrideKwargs]):
rich_response=None,
llm_facing_response=llm_response,
)
@classmethod
@override
def should_emit_argument_deltas(cls) -> bool:
return True

View File

@@ -11,18 +11,20 @@ logger = setup_logger()
# IMPORTANT DO NOT DELETE, THIS IS USED BY fetch_versioned_implementation
def _encrypt_string(input_str: str, key: str | None = None) -> bytes:
if ENCRYPTION_KEY_SECRET or key:
def _encrypt_string(input_str: str, key: str | None = None) -> bytes: # noqa: ARG001
if ENCRYPTION_KEY_SECRET:
logger.warning("MIT version of Onyx does not support encryption of secrets.")
elif key is not None:
logger.debug("MIT encrypt called with explicit key — key ignored.")
return input_str.encode()
# IMPORTANT DO NOT DELETE, THIS IS USED BY fetch_versioned_implementation
def _decrypt_bytes(input_bytes: bytes, key: str | None = None) -> str: # noqa: ARG001
if key is not None:
logger.warning(
"MIT version of Onyx does not support decryption with a provided key."
)
if ENCRYPTION_KEY_SECRET:
logger.warning("MIT version of Onyx does not support decryption of secrets.")
elif key is not None:
logger.debug("MIT decrypt called with explicit key — key ignored.")
return input_bytes.decode()

View File

@@ -128,6 +128,8 @@ class SensitiveValue(Generic[T]):
value = self._decrypt()
if not apply_mask:
# Callers must not mutate the returned dict — doing so would
# desync the cache from the encrypted bytes and the DB.
return value
# Apply masking
@@ -174,18 +176,20 @@ class SensitiveValue(Generic[T]):
)
def __eq__(self, other: Any) -> bool:
"""Prevent direct comparison which might expose value."""
if isinstance(other, SensitiveValue):
# Compare encrypted bytes for equality check
return self._encrypted_bytes == other._encrypted_bytes
raise SensitiveAccessError(
"Cannot compare SensitiveValue with non-SensitiveValue. "
"Use .get_value(apply_mask=True/False) to access the value for comparison."
)
"""Compare SensitiveValues by their decrypted content."""
# NOTE: if you attempt to compare a string/dict to a SensitiveValue,
# this comparison will return NotImplemented, which then evaluates to False.
# This is the convention and required for SQLAlchemy's attribute tracking.
if not isinstance(other, SensitiveValue):
return NotImplemented
return self._decrypt() == other._decrypt()
def __hash__(self) -> int:
"""Allow hashing based on encrypted bytes."""
return hash(self._encrypted_bytes)
"""Hash based on decrypted content."""
value = self._decrypt()
if isinstance(value, dict):
return hash(json.dumps(value, sort_keys=True))
return hash(value)
# Prevent JSON serialization
def __json__(self) -> Any:

View File

@@ -2,7 +2,6 @@ import contextvars
import threading
import uuid
from enum import Enum
from typing import cast
import requests
@@ -15,6 +14,7 @@ from onyx.db.engine.sql_engine import get_session_with_current_tenant
from onyx.db.models import User
from onyx.key_value_store.factory import get_kv_store
from onyx.key_value_store.interface import KvKeyNotFoundError
from onyx.key_value_store.interface import unwrap_str
from onyx.utils.logger import setup_logger
from onyx.utils.variable_functionality import (
fetch_versioned_implementation_with_fallback,
@@ -25,6 +25,7 @@ from shared_configs.contextvars import get_current_tenant_id
logger = setup_logger()
_DANSWER_TELEMETRY_ENDPOINT = "https://telemetry.onyx.app/anonymous_telemetry"
_CACHED_UUID: str | None = None
_CACHED_INSTANCE_DOMAIN: str | None = None
@@ -62,10 +63,10 @@ def get_or_generate_uuid() -> str:
kv_store = get_kv_store()
try:
_CACHED_UUID = cast(str, kv_store.load(KV_CUSTOMER_UUID_KEY))
_CACHED_UUID = unwrap_str(kv_store.load(KV_CUSTOMER_UUID_KEY))
except KvKeyNotFoundError:
_CACHED_UUID = str(uuid.uuid4())
kv_store.store(KV_CUSTOMER_UUID_KEY, _CACHED_UUID, encrypt=True)
kv_store.store(KV_CUSTOMER_UUID_KEY, {"value": _CACHED_UUID}, encrypt=True)
return _CACHED_UUID
@@ -79,14 +80,16 @@ def _get_or_generate_instance_domain() -> str | None: #
kv_store = get_kv_store()
try:
_CACHED_INSTANCE_DOMAIN = cast(str, kv_store.load(KV_INSTANCE_DOMAIN_KEY))
_CACHED_INSTANCE_DOMAIN = unwrap_str(kv_store.load(KV_INSTANCE_DOMAIN_KEY))
except KvKeyNotFoundError:
with get_session_with_current_tenant() as db_session:
first_user = db_session.query(User).first()
if first_user:
_CACHED_INSTANCE_DOMAIN = first_user.email.split("@")[-1]
kv_store.store(
KV_INSTANCE_DOMAIN_KEY, _CACHED_INSTANCE_DOMAIN, encrypt=True
KV_INSTANCE_DOMAIN_KEY,
{"value": _CACHED_INSTANCE_DOMAIN},
encrypt=True,
)
return _CACHED_INSTANCE_DOMAIN

View File

@@ -1,48 +1,93 @@
"""Decrypt a raw hex-encoded credential value.
Usage:
python -m scripts.decrypt <hex_value>
python -m scripts.decrypt <hex_value> --key "my-encryption-key"
python -m scripts.decrypt <hex_value> --key ""
Pass --key "" to skip decryption and just decode the raw bytes as UTF-8.
Omit --key to use the current ENCRYPTION_KEY_SECRET from the environment.
"""
import argparse
import binascii
import json
import os
import sys
from onyx.utils.encryption import decrypt_bytes_to_string
parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(parent_dir)
from onyx.utils.encryption import decrypt_bytes_to_string # noqa: E402
from onyx.utils.variable_functionality import global_version # noqa: E402
def decrypt_raw_credential(encrypted_value: str) -> None:
"""Decrypt and display a raw encrypted credential value
def decrypt_raw_credential(encrypted_value: str, key: str | None = None) -> None:
"""Decrypt and display a raw encrypted credential value.
Args:
encrypted_value: The hex encoded encrypted credential value
encrypted_value: The hex-encoded encrypted credential value.
key: Encryption key to use. None means use ENCRYPTION_KEY_SECRET,
empty string means just decode as UTF-8.
"""
# Strip common hex prefixes
if encrypted_value.startswith("\\x"):
encrypted_value = encrypted_value[2:]
elif encrypted_value.startswith("x"):
encrypted_value = encrypted_value[1:]
print(encrypted_value)
try:
# If string starts with 'x', remove it as it's just a prefix indicating hex
if encrypted_value.startswith("x"):
encrypted_value = encrypted_value[1:]
elif encrypted_value.startswith("\\x"):
encrypted_value = encrypted_value[2:]
# Convert hex string to bytes
encrypted_bytes = binascii.unhexlify(encrypted_value)
# Decrypt the bytes
decrypted_str = decrypt_bytes_to_string(encrypted_bytes)
# Parse and pretty print the decrypted JSON
decrypted_json = json.loads(decrypted_str)
print("Decrypted credential value:")
print(json.dumps(decrypted_json, indent=2))
raw_bytes = binascii.unhexlify(encrypted_value)
except binascii.Error:
print("Error: Invalid hex encoded string")
print("Error: Invalid hex-encoded string")
sys.exit(1)
except json.JSONDecodeError as e:
print(f"Decrypted raw value (not JSON): {e}")
if key == "":
# Empty key → just decode as UTF-8, no decryption
try:
decrypted_str = raw_bytes.decode("utf-8")
except UnicodeDecodeError as e:
print(f"Error decoding bytes as UTF-8: {e}")
sys.exit(1)
else:
print(key)
try:
decrypted_str = decrypt_bytes_to_string(raw_bytes, key=key)
except Exception as e:
print(f"Error decrypting value: {e}")
sys.exit(1)
except Exception as e:
print(f"Error decrypting value: {e}")
# Try to pretty-print as JSON, otherwise print raw
try:
parsed = json.loads(decrypted_str)
print(json.dumps(parsed, indent=2))
except json.JSONDecodeError:
print(decrypted_str)
def main() -> None:
parser = argparse.ArgumentParser(
description="Decrypt a hex-encoded credential value."
)
parser.add_argument(
"value",
help="Hex-encoded encrypted value to decrypt.",
)
parser.add_argument(
"--key",
default=None,
help=(
"Encryption key. Omit to use ENCRYPTION_KEY_SECRET from env. "
'Pass "" (empty) to just decode as UTF-8 without decryption.'
),
)
args = parser.parse_args()
global_version.set_ee()
decrypt_raw_credential(args.value, key=args.key)
global_version.unset_ee()
if __name__ == "__main__":
if len(sys.argv) != 2:
print("Usage: python decrypt.py <hex_encoded_encrypted_value>")
sys.exit(1)
encrypted_value = sys.argv[1]
decrypt_raw_credential(encrypted_value)
main()

View File

@@ -86,8 +86,16 @@ def main() -> None:
if args.all_tenants:
tenant_ids = get_all_tenant_ids()
print(f"Found {len(tenant_ids)} tenant(s)")
failed_tenants: list[str] = []
for tid in tenant_ids:
_run_for_tenant(tid, old_key, dry_run=args.dry_run)
try:
_run_for_tenant(tid, old_key, dry_run=args.dry_run)
except Exception as e:
print(f" ERROR for tenant {tid}: {e}")
failed_tenants.append(tid)
if failed_tenants:
print(f"FAILED tenants ({len(failed_tenants)}): {failed_tenants}")
sys.exit(1)
else:
tenant_id = args.tenant_id or POSTGRES_DEFAULT_SCHEMA
_run_for_tenant(tenant_id, old_key, dry_run=args.dry_run)

View File

@@ -0,0 +1,90 @@
"""Test that Credential with nested JSON round-trips through SensitiveValue correctly.
Exercises the full encrypt → store → read → decrypt → SensitiveValue path
with realistic nested OAuth credential data, and verifies SQLAlchemy dirty
tracking works with nested dict comparison.
Requires a running Postgres instance.
"""
from sqlalchemy.orm import Session
from onyx.configs.constants import DocumentSource
from onyx.db.models import Credential
from onyx.utils.sensitive import SensitiveValue
# NOTE: this is not the real shape of a Drive credential,
# but it is intended to test nested JSON credential handling
_NESTED_CRED_JSON = {
"oauth_tokens": {
"access_token": "ya29.abc123",
"refresh_token": "1//xEg-def456",
},
"scopes": ["read", "write", "admin"],
"client_config": {
"client_id": "123.apps.googleusercontent.com",
"client_secret": "GOCSPX-secret",
},
}
def test_nested_credential_json_round_trip(db_session: Session) -> None:
"""Nested OAuth credential survives encrypt → store → read → decrypt."""
credential = Credential(
source=DocumentSource.GOOGLE_DRIVE,
credential_json=_NESTED_CRED_JSON,
)
db_session.add(credential)
db_session.flush()
# Immediate read (no DB round-trip) — tests the set event wrapping
assert isinstance(credential.credential_json, SensitiveValue)
assert credential.credential_json.get_value(apply_mask=False) == _NESTED_CRED_JSON
# DB round-trip — tests process_result_value
db_session.expire(credential)
reloaded = credential.credential_json
assert isinstance(reloaded, SensitiveValue)
assert reloaded.get_value(apply_mask=False) == _NESTED_CRED_JSON
db_session.rollback()
def test_reassign_same_nested_json_not_dirty(db_session: Session) -> None:
"""Re-assigning the same nested dict should not mark the session dirty."""
credential = Credential(
source=DocumentSource.GOOGLE_DRIVE,
credential_json=_NESTED_CRED_JSON,
)
db_session.add(credential)
db_session.flush()
# Clear dirty state from the insert
db_session.expire(credential)
_ = credential.credential_json # force reload
# Re-assign identical value
credential.credential_json = _NESTED_CRED_JSON # type: ignore[assignment]
assert not db_session.is_modified(credential)
db_session.rollback()
def test_assign_different_nested_json_is_dirty(db_session: Session) -> None:
"""Assigning a different nested dict should mark the session dirty."""
credential = Credential(
source=DocumentSource.GOOGLE_DRIVE,
credential_json=_NESTED_CRED_JSON,
)
db_session.add(credential)
db_session.flush()
db_session.expire(credential)
_ = credential.credential_json # force reload
modified_cred = {**_NESTED_CRED_JSON, "scopes": ["read"]}
credential.credential_json = modified_cred # type: ignore[assignment]
assert db_session.is_modified(credential)
db_session.rollback()

View File

@@ -85,7 +85,7 @@ def test_group_overlap_filter(
results = _get_accessible_hierarchy_nodes_for_source(
db_session,
source=DocumentSource.GOOGLE_DRIVE,
user_email=None,
user_email="",
external_group_ids=["group_engineering"],
)
result_ids = {n.raw_node_id for n in results}
@@ -124,7 +124,7 @@ def test_no_credentials_returns_only_public(
results = _get_accessible_hierarchy_nodes_for_source(
db_session,
source=DocumentSource.GOOGLE_DRIVE,
user_email=None,
user_email="",
external_group_ids=[],
)
result_ids = {n.raw_node_id for n in results}

View File

@@ -0,0 +1,85 @@
"""Tests that SlackBot CRUD operations return properly typed SensitiveValue fields.
Regression test for the bug where insert_slack_bot/update_slack_bot returned
objects with raw string tokens instead of SensitiveValue wrappers, causing
'str object has no attribute get_value' errors in SlackBot.from_model().
"""
from uuid import uuid4
from sqlalchemy.orm import Session
from onyx.db.slack_bot import insert_slack_bot
from onyx.db.slack_bot import update_slack_bot
from onyx.server.manage.models import SlackBot
from onyx.utils.sensitive import SensitiveValue
def _unique(prefix: str) -> str:
return f"{prefix}-{uuid4().hex[:8]}"
def test_insert_slack_bot_returns_sensitive_values(db_session: Session) -> None:
bot_token = _unique("xoxb-insert")
app_token = _unique("xapp-insert")
user_token = _unique("xoxp-insert")
slack_bot = insert_slack_bot(
db_session=db_session,
name=_unique("test-bot-insert"),
enabled=True,
bot_token=bot_token,
app_token=app_token,
user_token=user_token,
)
assert isinstance(slack_bot.bot_token, SensitiveValue)
assert isinstance(slack_bot.app_token, SensitiveValue)
assert isinstance(slack_bot.user_token, SensitiveValue)
assert slack_bot.bot_token.get_value(apply_mask=False) == bot_token
assert slack_bot.app_token.get_value(apply_mask=False) == app_token
assert slack_bot.user_token.get_value(apply_mask=False) == user_token
# Verify from_model works without error
pydantic_bot = SlackBot.from_model(slack_bot)
assert pydantic_bot.bot_token # masked, but not empty
assert pydantic_bot.app_token
def test_update_slack_bot_returns_sensitive_values(db_session: Session) -> None:
slack_bot = insert_slack_bot(
db_session=db_session,
name=_unique("test-bot-update"),
enabled=True,
bot_token=_unique("xoxb-update"),
app_token=_unique("xapp-update"),
)
new_bot_token = _unique("xoxb-update-new")
new_app_token = _unique("xapp-update-new")
new_user_token = _unique("xoxp-update-new")
updated = update_slack_bot(
db_session=db_session,
slack_bot_id=slack_bot.id,
name=_unique("test-bot-updated"),
enabled=False,
bot_token=new_bot_token,
app_token=new_app_token,
user_token=new_user_token,
)
assert isinstance(updated.bot_token, SensitiveValue)
assert isinstance(updated.app_token, SensitiveValue)
assert isinstance(updated.user_token, SensitiveValue)
assert updated.bot_token.get_value(apply_mask=False) == new_bot_token
assert updated.app_token.get_value(apply_mask=False) == new_app_token
assert updated.user_token.get_value(apply_mask=False) == new_user_token
# Verify from_model works without error
pydantic_bot = SlackBot.from_model(updated)
assert pydantic_bot.bot_token
assert pydantic_bot.app_token
assert pydantic_bot.user_token is not None

View File

@@ -148,8 +148,16 @@ class TestOAuthConfigCRUD:
)
# Secrets should be preserved
assert updated_config.client_id == original_client_id
assert updated_config.client_secret == original_client_secret
assert updated_config.client_id is not None
assert original_client_id is not None
assert updated_config.client_id.get_value(
apply_mask=False
) == original_client_id.get_value(apply_mask=False)
assert updated_config.client_secret is not None
assert original_client_secret is not None
assert updated_config.client_secret.get_value(
apply_mask=False
) == original_client_secret.get_value(apply_mask=False)
# But name should be updated
assert updated_config.name == new_name
@@ -173,9 +181,14 @@ class TestOAuthConfigCRUD:
)
# client_id should be cleared (empty string)
assert updated_config.client_id == ""
assert updated_config.client_id is not None
assert updated_config.client_id.get_value(apply_mask=False) == ""
# client_secret should be preserved
assert updated_config.client_secret == original_client_secret
assert updated_config.client_secret is not None
assert original_client_secret is not None
assert updated_config.client_secret.get_value(
apply_mask=False
) == original_client_secret.get_value(apply_mask=False)
def test_update_oauth_config_clear_client_secret(self, db_session: Session) -> None:
"""Test clearing client_secret while preserving client_id"""
@@ -190,9 +203,14 @@ class TestOAuthConfigCRUD:
)
# client_secret should be cleared (empty string)
assert updated_config.client_secret == ""
assert updated_config.client_secret is not None
assert updated_config.client_secret.get_value(apply_mask=False) == ""
# client_id should be preserved
assert updated_config.client_id == original_client_id
assert updated_config.client_id is not None
assert original_client_id is not None
assert updated_config.client_id.get_value(
apply_mask=False
) == original_client_id.get_value(apply_mask=False)
def test_update_oauth_config_clear_both_secrets(self, db_session: Session) -> None:
"""Test clearing both client_id and client_secret"""
@@ -207,8 +225,10 @@ class TestOAuthConfigCRUD:
)
# Both should be cleared (empty strings)
assert updated_config.client_id == ""
assert updated_config.client_secret == ""
assert updated_config.client_id is not None
assert updated_config.client_id.get_value(apply_mask=False) == ""
assert updated_config.client_secret is not None
assert updated_config.client_secret.get_value(apply_mask=False) == ""
def test_update_oauth_config_authorization_url(self, db_session: Session) -> None:
"""Test updating authorization_url"""
@@ -275,7 +295,8 @@ class TestOAuthConfigCRUD:
assert updated_config.token_url == new_token_url
assert updated_config.scopes == new_scopes
assert updated_config.additional_params == new_params
assert updated_config.client_id == new_client_id
assert updated_config.client_id is not None
assert updated_config.client_id.get_value(apply_mask=False) == new_client_id
def test_delete_oauth_config(self, db_session: Session) -> None:
"""Test deleting an OAuth configuration"""
@@ -416,7 +437,8 @@ class TestOAuthUserTokenCRUD:
assert user_token.id is not None
assert user_token.oauth_config_id == oauth_config.id
assert user_token.user_id == user.id
assert user_token.token_data == token_data
assert user_token.token_data is not None
assert user_token.token_data.get_value(apply_mask=False) == token_data
assert user_token.created_at is not None
assert user_token.updated_at is not None
@@ -446,8 +468,13 @@ class TestOAuthUserTokenCRUD:
# Should be the same token record (updated, not inserted)
assert updated_token.id == initial_token_id
assert updated_token.token_data == updated_token_data
assert updated_token.token_data != initial_token_data
assert updated_token.token_data is not None
assert (
updated_token.token_data.get_value(apply_mask=False) == updated_token_data
)
assert (
updated_token.token_data.get_value(apply_mask=False) != initial_token_data
)
def test_get_user_oauth_token(self, db_session: Session) -> None:
"""Test retrieving a user's OAuth token"""
@@ -463,7 +490,8 @@ class TestOAuthUserTokenCRUD:
assert retrieved_token is not None
assert retrieved_token.id == created_token.id
assert retrieved_token.token_data == token_data
assert retrieved_token.token_data is not None
assert retrieved_token.token_data.get_value(apply_mask=False) == token_data
def test_get_user_oauth_token_not_found(self, db_session: Session) -> None:
"""Test retrieving a non-existent user token returns None"""
@@ -519,7 +547,8 @@ class TestOAuthUserTokenCRUD:
retrieved_token = get_user_oauth_token(oauth_config.id, user.id, db_session)
assert retrieved_token is not None
assert retrieved_token.id == updated_token.id
assert retrieved_token.token_data == token_data2
assert retrieved_token.token_data is not None
assert retrieved_token.token_data.get_value(apply_mask=False) == token_data2
def test_cascade_delete_user_tokens_on_config_deletion(
self, db_session: Session

View File

@@ -374,8 +374,14 @@ class TestOAuthTokenManagerCodeExchange:
assert call_args[0][0] == oauth_config.token_url
assert call_args[1]["data"]["grant_type"] == "authorization_code"
assert call_args[1]["data"]["code"] == "auth_code_123"
assert call_args[1]["data"]["client_id"] == oauth_config.client_id
assert call_args[1]["data"]["client_secret"] == oauth_config.client_secret
assert oauth_config.client_id is not None
assert oauth_config.client_secret is not None
assert call_args[1]["data"]["client_id"] == oauth_config.client_id.get_value(
apply_mask=False
)
assert call_args[1]["data"][
"client_secret"
] == oauth_config.client_secret.get_value(apply_mask=False)
assert call_args[1]["data"]["redirect_uri"] == "https://example.com/callback"
@patch("onyx.auth.oauth_token_manager.requests.post")

View File

@@ -950,6 +950,7 @@ from onyx.server.query_and_chat.streaming_models import Packet
from onyx.server.query_and_chat.streaming_models import PythonToolDelta
from onyx.server.query_and_chat.streaming_models import PythonToolStart
from onyx.server.query_and_chat.streaming_models import SectionEnd
from onyx.server.query_and_chat.streaming_models import ToolCallArgumentDelta
from onyx.tools.tool_implementations.python.python_tool import PythonTool
from tests.external_dependency_unit.answer.stream_test_builder import StreamTestBuilder
from tests.external_dependency_unit.answer.stream_test_utils import create_chat_session
@@ -1294,9 +1295,18 @@ def test_code_interpreter_replay_packets_include_code_and_output(
).expect(
Packet(
placement=create_placement(0),
obj=PythonToolStart(code=code),
obj=ToolCallArgumentDelta(
tool_type="python",
argument_deltas={"code": code},
),
),
forward=2,
).expect(
Packet(
placement=create_placement(0),
obj=PythonToolStart(code=code),
),
forward=False,
).expect(
Packet(
placement=create_placement(0),

View File

@@ -64,7 +64,8 @@ class TestBotConfigAPI:
db_session.commit()
assert config is not None
assert config.bot_token == "test_token_123"
assert config.bot_token is not None
assert config.bot_token.get_value(apply_mask=False) == "test_token_123"
# Cleanup
delete_discord_bot_config(db_session)

View File

@@ -0,0 +1,54 @@
from unittest.mock import MagicMock
import pytest
import onyx.auth.users as users
from onyx.auth.users import verify_auth_setting
from onyx.configs.constants import AuthType
def test_verify_auth_setting_raises_for_cloud(
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""Cloud auth type is not valid for self-hosted deployments."""
monkeypatch.setenv("AUTH_TYPE", "cloud")
with pytest.raises(ValueError, match="'cloud' is not a valid auth type"):
verify_auth_setting()
def test_verify_auth_setting_warns_for_disabled(
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""Disabled auth type logs a deprecation warning."""
monkeypatch.setenv("AUTH_TYPE", "disabled")
mock_logger = MagicMock()
monkeypatch.setattr(users, "logger", mock_logger)
monkeypatch.setattr(users, "AUTH_TYPE", AuthType.BASIC)
verify_auth_setting()
mock_logger.warning.assert_called_once()
assert "no longer supported" in mock_logger.warning.call_args[0][0]
@pytest.mark.parametrize(
"auth_type",
[AuthType.BASIC, AuthType.GOOGLE_OAUTH, AuthType.OIDC, AuthType.SAML],
)
def test_verify_auth_setting_valid_auth_types(
monkeypatch: pytest.MonkeyPatch,
auth_type: AuthType,
) -> None:
"""Valid auth types work without errors or warnings."""
monkeypatch.setenv("AUTH_TYPE", auth_type.value)
mock_logger = MagicMock()
monkeypatch.setattr(users, "logger", mock_logger)
monkeypatch.setattr(users, "AUTH_TYPE", auth_type)
verify_auth_setting()
mock_logger.warning.assert_not_called()
mock_logger.notice.assert_called_once_with(f"Using Auth Type: {auth_type.value}")

View File

@@ -0,0 +1,630 @@
from typing import Any
from unittest.mock import MagicMock
from unittest.mock import patch
from onyx.chat.tool_call_args_streaming import maybe_emit_argument_delta
from onyx.server.query_and_chat.placement import Placement
from onyx.server.query_and_chat.streaming_models import ToolCallArgumentDelta
from onyx.utils.jsonriver import Parser
def _make_tool_call_delta(
index: int = 0,
name: str | None = None,
arguments: str | None = None,
function_is_none: bool = False,
) -> MagicMock:
"""Create a mock tool_call_delta matching the LiteLLM streaming shape."""
delta = MagicMock()
delta.index = index
if function_is_none:
delta.function = None
else:
delta.function = MagicMock()
delta.function.name = name
delta.function.arguments = arguments
return delta
def _make_placement() -> Placement:
return Placement(turn_index=0, tab_index=0)
def _mock_tool_class(emit: bool = True) -> MagicMock:
cls = MagicMock()
cls.should_emit_argument_deltas.return_value = emit
return cls
def _collect(
tc_map: dict[int, dict[str, Any]],
delta: MagicMock,
placement: Placement | None = None,
parsers: dict[int, Parser] | None = None,
) -> list[Any]:
"""Run maybe_emit_argument_delta and return the yielded packets."""
return list(
maybe_emit_argument_delta(
tc_map,
delta,
placement or _make_placement(),
parsers if parsers is not None else {},
)
)
def _stream_fragments(
fragments: list[str],
tc_map: dict[int, dict[str, Any]],
placement: Placement | None = None,
) -> list[str]:
"""Feed fragments into maybe_emit_argument_delta one by one, returning
all emitted content values concatenated per-key as a flat list."""
pl = placement or _make_placement()
parsers: dict[int, Parser] = {}
emitted: list[str] = []
for frag in fragments:
tc_map[0]["arguments"] += frag
delta = _make_tool_call_delta(arguments=frag)
for packet in maybe_emit_argument_delta(tc_map, delta, pl, parsers=parsers):
obj = packet.obj
assert isinstance(obj, ToolCallArgumentDelta)
for value in obj.argument_deltas.values():
emitted.append(value)
return emitted
class TestMaybeEmitArgumentDeltaGuards:
"""Tests for conditions that cause no packet to be emitted."""
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_no_emission_when_tool_does_not_opt_in(
self, mock_get_tool: MagicMock
) -> None:
"""Tools that return False from should_emit_argument_deltas emit nothing."""
mock_get_tool.return_value = _mock_tool_class(emit=False)
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": '{"code": "x'}
}
assert _collect(tc_map, _make_tool_call_delta(arguments="x")) == []
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_no_emission_when_tool_class_unknown(
self, mock_get_tool: MagicMock
) -> None:
mock_get_tool.return_value = None
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "unknown", "arguments": '{"code": "x'}
}
assert _collect(tc_map, _make_tool_call_delta(arguments="x")) == []
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_no_emission_when_no_argument_fragment(
self, mock_get_tool: MagicMock
) -> None:
mock_get_tool.return_value = _mock_tool_class()
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": '{"code": "x'}
}
assert _collect(tc_map, _make_tool_call_delta(arguments=None)) == []
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_no_emission_when_key_value_not_started(
self, mock_get_tool: MagicMock
) -> None:
"""Key exists in JSON but its string value hasn't begun yet."""
mock_get_tool.return_value = _mock_tool_class()
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": '{"code":'}
}
assert _collect(tc_map, _make_tool_call_delta(arguments=":")) == []
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_no_emission_before_any_key(self, mock_get_tool: MagicMock) -> None:
"""Only the opening brace has arrived — no key to stream yet."""
mock_get_tool.return_value = _mock_tool_class()
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": "{"}
}
assert _collect(tc_map, _make_tool_call_delta(arguments="{")) == []
class TestMaybeEmitArgumentDeltaBasic:
"""Tests for correct packet content and incremental emission."""
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_emits_packet_with_correct_fields(self, mock_get_tool: MagicMock) -> None:
mock_get_tool.return_value = _mock_tool_class()
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": ""}
}
fragments = ['{"code": "', "print(1)", '"}']
pl = _make_placement()
parsers: dict[int, Parser] = {}
all_packets = []
for frag in fragments:
tc_map[0]["arguments"] += frag
packets = _collect(
tc_map, _make_tool_call_delta(arguments=frag), pl, parsers
)
all_packets.extend(packets)
assert len(all_packets) >= 1
# Verify packet structure
obj = all_packets[0].obj
assert isinstance(obj, ToolCallArgumentDelta)
assert obj.tool_type == "python"
# All emitted content should reconstruct the value
full_code = ""
for p in all_packets:
assert isinstance(p.obj, ToolCallArgumentDelta)
if "code" in p.obj.argument_deltas:
full_code += p.obj.argument_deltas["code"]
assert full_code == "print(1)"
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_emits_only_new_content_on_subsequent_call(
self, mock_get_tool: MagicMock
) -> None:
"""After a first emission, subsequent calls emit only the diff."""
mock_get_tool.return_value = _mock_tool_class()
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": ""}
}
parsers: dict[int, Parser] = {}
pl = _make_placement()
# First fragment opens the string
tc_map[0]["arguments"] = '{"code": "abc'
packets_1 = _collect(
tc_map, _make_tool_call_delta(arguments='{"code": "abc'), pl, parsers
)
code_1 = ""
for p in packets_1:
assert isinstance(p.obj, ToolCallArgumentDelta)
code_1 += p.obj.argument_deltas.get("code", "")
assert code_1 == "abc"
# Second fragment appends more
tc_map[0]["arguments"] = '{"code": "abcdef'
packets_2 = _collect(
tc_map, _make_tool_call_delta(arguments="def"), pl, parsers
)
code_2 = ""
for p in packets_2:
assert isinstance(p.obj, ToolCallArgumentDelta)
code_2 += p.obj.argument_deltas.get("code", "")
assert code_2 == "def"
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_handles_multiple_keys_sequentially(self, mock_get_tool: MagicMock) -> None:
"""When a second key starts, emissions switch to that key."""
mock_get_tool.return_value = _mock_tool_class()
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": ""}
}
fragments = [
'{"code": "x',
'", "output": "hello',
'"}',
]
emitted = _stream_fragments(fragments, tc_map)
full = "".join(emitted)
assert "x" in full
assert "hello" in full
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_delta_spans_key_boundary(self, mock_get_tool: MagicMock) -> None:
"""A single delta contains the end of one value and the start of the next key."""
mock_get_tool.return_value = _mock_tool_class()
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": ""}
}
fragments = [
'{"code": "x',
'y", "lang": "py',
'"}',
]
emitted = _stream_fragments(fragments, tc_map)
full = "".join(emitted)
assert "xy" in full
assert "py" in full
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_empty_value_emits_nothing(self, mock_get_tool: MagicMock) -> None:
"""An empty string value has nothing to emit."""
mock_get_tool.return_value = _mock_tool_class()
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": ""}
}
# Opening quote just arrived, value is empty
tc_map[0]["arguments"] = '{"code": "'
packets = _collect(tc_map, _make_tool_call_delta(arguments='{"code": "'))
# No string content yet, so either no packet or empty deltas
for p in packets:
assert isinstance(p.obj, ToolCallArgumentDelta)
assert p.obj.argument_deltas.get("code", "") == ""
class TestMaybeEmitArgumentDeltaDecoding:
"""Tests verifying that JSON escape sequences are properly decoded."""
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_decodes_newlines(self, mock_get_tool: MagicMock) -> None:
mock_get_tool.return_value = _mock_tool_class()
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": ""}
}
fragments = ['{"code": "line1\\nline2"}']
emitted = _stream_fragments(fragments, tc_map)
assert "".join(emitted) == "line1\nline2"
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_decodes_tabs(self, mock_get_tool: MagicMock) -> None:
mock_get_tool.return_value = _mock_tool_class()
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": ""}
}
fragments = ['{"code": "\\tindented"}']
emitted = _stream_fragments(fragments, tc_map)
assert "".join(emitted) == "\tindented"
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_decodes_escaped_quotes(self, mock_get_tool: MagicMock) -> None:
mock_get_tool.return_value = _mock_tool_class()
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": ""}
}
fragments = ['{"code": "say \\"hi\\""}']
emitted = _stream_fragments(fragments, tc_map)
assert "".join(emitted) == 'say "hi"'
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_decodes_escaped_backslashes(self, mock_get_tool: MagicMock) -> None:
mock_get_tool.return_value = _mock_tool_class()
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": ""}
}
fragments = ['{"code": "path\\\\dir"}']
emitted = _stream_fragments(fragments, tc_map)
assert "".join(emitted) == "path\\dir"
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_decodes_unicode_escape(self, mock_get_tool: MagicMock) -> None:
mock_get_tool.return_value = _mock_tool_class()
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": ""}
}
fragments = ['{"code": "\\u0041"}']
emitted = _stream_fragments(fragments, tc_map)
assert "".join(emitted) == "A"
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_incomplete_escape_at_end_decoded_on_next_chunk(
self, mock_get_tool: MagicMock
) -> None:
"""A trailing backslash (incomplete escape) is completed in the next chunk."""
mock_get_tool.return_value = _mock_tool_class()
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": ""}
}
fragments = ['{"code": "hello\\', 'n"}']
emitted = _stream_fragments(fragments, tc_map)
assert "".join(emitted) == "hello\n"
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_incomplete_unicode_escape_completed_on_next_chunk(
self, mock_get_tool: MagicMock
) -> None:
"""A partial \\uXX sequence is completed in the next chunk."""
mock_get_tool.return_value = _mock_tool_class()
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": ""}
}
fragments = ['{"code": "hello\\u00', '41"}']
emitted = _stream_fragments(fragments, tc_map)
assert "".join(emitted) == "helloA"
class TestArgumentDeltaStreamingE2E:
"""Simulates realistic sequences of LLM argument deltas to verify
the full pipeline produces correct decoded output."""
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_realistic_python_code_streaming(self, mock_get_tool: MagicMock) -> None:
"""Streams: {"code": "print('hello')\\nprint('world')"}"""
mock_get_tool.return_value = _mock_tool_class()
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": ""}
}
fragments = [
'{"',
"code",
'": "',
"print(",
"'hello')",
"\\n",
"print(",
"'world')",
'"}',
]
full = "".join(_stream_fragments(fragments, tc_map))
assert full == "print('hello')\nprint('world')"
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_streaming_with_tabs_and_newlines(self, mock_get_tool: MagicMock) -> None:
"""Streams code with tabs and newlines."""
mock_get_tool.return_value = _mock_tool_class()
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": ""}
}
fragments = [
'{"code": "',
"if True:",
"\\n",
"\\t",
"pass",
'"}',
]
full = "".join(_stream_fragments(fragments, tc_map))
assert full == "if True:\n\tpass"
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_split_escape_sequence(self, mock_get_tool: MagicMock) -> None:
"""An escape sequence split across two fragments (backslash in one,
'n' in the next) should still decode correctly."""
mock_get_tool.return_value = _mock_tool_class()
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": ""}
}
fragments = [
'{"code": "hello',
"\\",
"n",
'world"}',
]
full = "".join(_stream_fragments(fragments, tc_map))
assert full == "hello\nworld"
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_multiple_newlines_and_indentation(self, mock_get_tool: MagicMock) -> None:
"""Streams a multi-line function with multiple escape sequences."""
mock_get_tool.return_value = _mock_tool_class()
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": ""}
}
fragments = [
'{"code": "',
"def foo():",
"\\n",
"\\t",
"x = 1",
"\\n",
"\\t",
"return x",
'"}',
]
full = "".join(_stream_fragments(fragments, tc_map))
assert full == "def foo():\n\tx = 1\n\treturn x"
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_two_keys_streamed_sequentially(self, mock_get_tool: MagicMock) -> None:
"""Streams code first, then a second key (language) — both decoded."""
mock_get_tool.return_value = _mock_tool_class()
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": ""}
}
fragments = [
'{"code": "',
"x = 1",
'", "language": "',
"python",
'"}',
]
emitted = _stream_fragments(fragments, tc_map)
# Should have emissions for both keys
full = "".join(emitted)
assert "x = 1" in full
assert "python" in full
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_code_containing_dict_literal(self, mock_get_tool: MagicMock) -> None:
"""Python code like `x = {"key": "val"}` contains JSON-like patterns.
The escaped quotes inside the *outer* JSON value should prevent the
inner `"key":` from being mistaken for a top-level JSON key."""
mock_get_tool.return_value = _mock_tool_class()
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": ""}
}
# The LLM sends: {"code": "x = {\"key\": \"val\"}"}
# The inner quotes are escaped as \" in the JSON value.
fragments = [
'{"code": "',
"x = {",
'\\"key\\"',
": ",
'\\"val\\"',
"}",
'"}',
]
full = "".join(_stream_fragments(fragments, tc_map))
assert full == 'x = {"key": "val"}'
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_code_with_colon_in_value(self, mock_get_tool: MagicMock) -> None:
"""Colons inside the string value should not confuse key detection."""
mock_get_tool.return_value = _mock_tool_class()
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": ""}
}
fragments = [
'{"code": "',
"url = ",
'\\"https://example.com\\"',
'"}',
]
full = "".join(_stream_fragments(fragments, tc_map))
assert full == 'url = "https://example.com"'
class TestMaybeEmitArgumentDeltaEdgeCases:
"""Edge cases not covered by the standard test classes."""
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_no_emission_when_function_is_none(self, mock_get_tool: MagicMock) -> None:
"""Some delta chunks have function=None (e.g. role-only deltas)."""
mock_get_tool.return_value = _mock_tool_class()
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": '{"code": "x'}
}
delta = _make_tool_call_delta(arguments=None, function_is_none=True)
assert _collect(tc_map, delta) == []
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_multiple_concurrent_tool_calls(self, mock_get_tool: MagicMock) -> None:
"""Two tool calls streaming at different indices in parallel."""
mock_get_tool.return_value = _mock_tool_class()
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": ""},
1: {"id": "tc_2", "name": "python", "arguments": ""},
}
parsers: dict[int, Parser] = {}
pl = _make_placement()
# Feed full JSON to index 0
tc_map[0]["arguments"] = '{"code": "aaa"}'
packets_0 = _collect(
tc_map,
_make_tool_call_delta(index=0, arguments='{"code": "aaa"}'),
pl,
parsers,
)
code_0 = ""
for p in packets_0:
assert isinstance(p.obj, ToolCallArgumentDelta)
code_0 += p.obj.argument_deltas.get("code", "")
assert code_0 == "aaa"
# Feed full JSON to index 1
tc_map[1]["arguments"] = '{"code": "bbb"}'
packets_1 = _collect(
tc_map,
_make_tool_call_delta(index=1, arguments='{"code": "bbb"}'),
pl,
parsers,
)
code_1 = ""
for p in packets_1:
assert isinstance(p.obj, ToolCallArgumentDelta)
code_1 += p.obj.argument_deltas.get("code", "")
assert code_1 == "bbb"
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_delta_with_four_arguments(self, mock_get_tool: MagicMock) -> None:
"""A single delta contains four complete key-value pairs."""
mock_get_tool.return_value = _mock_tool_class()
full = '{"a": "one", "b": "two", "c": "three", "d": "four"}'
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": ""}
}
tc_map[0]["arguments"] = full
parsers: dict[int, Parser] = {}
packets = _collect(
tc_map, _make_tool_call_delta(arguments=full), parsers=parsers
)
# Collect all argument deltas across packets
all_deltas: dict[str, str] = {}
for p in packets:
assert isinstance(p.obj, ToolCallArgumentDelta)
for k, v in p.obj.argument_deltas.items():
all_deltas[k] = all_deltas.get(k, "") + v
assert all_deltas == {
"a": "one",
"b": "two",
"c": "three",
"d": "four",
}
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_delta_on_second_arg_after_first_complete(
self, mock_get_tool: MagicMock
) -> None:
"""First argument is fully complete; delta only adds to the second."""
mock_get_tool.return_value = _mock_tool_class()
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": ""}
}
fragments = [
'{"code": "print(1)", "lang": "py',
'"}',
]
emitted = _stream_fragments(fragments, tc_map)
full = "".join(emitted)
assert "print(1)" in full
assert "py" in full
@patch("onyx.chat.tool_call_args_streaming._get_tool_class")
def test_non_string_values_skipped(self, mock_get_tool: MagicMock) -> None:
"""Non-string values (numbers, booleans, null) are skipped — they are
available in the final tool-call kickoff packet. String arguments
following them are still emitted."""
mock_get_tool.return_value = _mock_tool_class()
tc_map: dict[int, dict[str, Any]] = {
0: {"id": "tc_1", "name": "python", "arguments": ""}
}
fragments = ['{"timeout": 30, "code": "hello"}']
emitted = _stream_fragments(fragments, tc_map)
full = "".join(emitted)
assert full == "hello"

View File

@@ -0,0 +1,43 @@
"""Unit tests for _get_user_access_info helper function.
These tests mock all database operations and don't require a real database.
"""
from unittest.mock import MagicMock
from unittest.mock import patch
from sqlalchemy.orm import Session
from onyx.server.features.hierarchy.api import _get_user_access_info
def test_get_user_access_info_returns_email_and_groups() -> None:
"""_get_user_access_info returns the user's email and external group IDs."""
mock_user = MagicMock()
mock_user.email = "test@example.com"
mock_db_session = MagicMock(spec=Session)
with patch(
"onyx.server.features.hierarchy.api.get_user_external_group_ids",
return_value=["group1", "group2"],
):
email, groups = _get_user_access_info(mock_user, mock_db_session)
assert email == "test@example.com"
assert groups == ["group1", "group2"]
def test_get_user_access_info_with_no_groups() -> None:
"""User with no external groups returns empty list."""
mock_user = MagicMock()
mock_user.email = "solo@example.com"
mock_db_session = MagicMock(spec=Session)
with patch(
"onyx.server.features.hierarchy.api.get_user_external_group_ids",
return_value=[],
):
email, groups = _get_user_access_info(mock_user, mock_db_session)
assert email == "solo@example.com"
assert groups == []

View File

@@ -0,0 +1,188 @@
from io import BytesIO
from unittest.mock import MagicMock
import pytest
from fastapi import UploadFile
from onyx.server.features.projects import projects_file_utils as utils
class _Tokenizer:
def encode(self, text: str) -> list[int]:
return [1] * len(text)
class _NonSeekableFile(BytesIO):
def tell(self) -> int:
raise OSError("tell not supported")
def seek(self, *_args: object, **_kwargs: object) -> int:
raise OSError("seek not supported")
def _make_upload(filename: str, size: int, content: bytes | None = None) -> UploadFile:
payload = content if content is not None else (b"x" * size)
return UploadFile(filename=filename, file=BytesIO(payload), size=size)
def _make_upload_no_size(filename: str, content: bytes) -> UploadFile:
return UploadFile(filename=filename, file=BytesIO(content), size=None)
def _patch_common_dependencies(monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.setattr(utils, "fetch_default_llm_model", lambda _db: None)
monkeypatch.setattr(utils, "get_tokenizer", lambda **_kwargs: _Tokenizer())
monkeypatch.setattr(utils, "is_file_password_protected", lambda **_kwargs: False)
def test_get_upload_size_bytes_falls_back_to_stream_size() -> None:
upload = UploadFile(filename="example.txt", file=BytesIO(b"abcdef"), size=None)
upload.file.seek(2)
size = utils.get_upload_size_bytes(upload)
assert size == 6
assert upload.file.tell() == 2
def test_get_upload_size_bytes_logs_warning_when_stream_size_unavailable(
caplog: pytest.LogCaptureFixture,
) -> None:
upload = UploadFile(filename="non_seekable.txt", file=_NonSeekableFile(), size=None)
caplog.set_level("WARNING")
size = utils.get_upload_size_bytes(upload)
assert size is None
assert "Could not determine upload size via stream seek" in caplog.text
assert "non_seekable.txt" in caplog.text
def test_is_upload_too_large_logs_warning_when_size_unknown(
monkeypatch: pytest.MonkeyPatch,
caplog: pytest.LogCaptureFixture,
) -> None:
upload = _make_upload("size_unknown.txt", size=1)
monkeypatch.setattr(utils, "get_upload_size_bytes", lambda _upload: None)
caplog.set_level("WARNING")
is_too_large = utils.is_upload_too_large(upload, max_bytes=100)
assert is_too_large is False
assert "Could not determine upload size; skipping size-limit check" in caplog.text
assert "size_unknown.txt" in caplog.text
def test_categorize_uploaded_files_accepts_size_under_limit(
monkeypatch: pytest.MonkeyPatch,
) -> None:
_patch_common_dependencies(monkeypatch)
monkeypatch.setattr(utils, "USER_FILE_MAX_UPLOAD_SIZE_BYTES", 100)
monkeypatch.setattr(utils, "USER_FILE_MAX_UPLOAD_SIZE_MB", 1)
monkeypatch.setattr(utils, "estimate_image_tokens_for_upload", lambda _upload: 10)
upload = _make_upload("small.png", size=99)
result = utils.categorize_uploaded_files([upload], MagicMock())
assert len(result.acceptable) == 1
assert len(result.rejected) == 0
def test_categorize_uploaded_files_uses_seek_fallback_when_upload_size_missing(
monkeypatch: pytest.MonkeyPatch,
) -> None:
_patch_common_dependencies(monkeypatch)
monkeypatch.setattr(utils, "USER_FILE_MAX_UPLOAD_SIZE_BYTES", 100)
monkeypatch.setattr(utils, "USER_FILE_MAX_UPLOAD_SIZE_MB", 1)
monkeypatch.setattr(utils, "estimate_image_tokens_for_upload", lambda _upload: 10)
upload = _make_upload_no_size("small.png", content=b"x" * 99)
result = utils.categorize_uploaded_files([upload], MagicMock())
assert len(result.acceptable) == 1
assert len(result.rejected) == 0
def test_categorize_uploaded_files_accepts_size_at_limit(
monkeypatch: pytest.MonkeyPatch,
) -> None:
_patch_common_dependencies(monkeypatch)
monkeypatch.setattr(utils, "USER_FILE_MAX_UPLOAD_SIZE_BYTES", 100)
monkeypatch.setattr(utils, "USER_FILE_MAX_UPLOAD_SIZE_MB", 1)
monkeypatch.setattr(utils, "estimate_image_tokens_for_upload", lambda _upload: 10)
upload = _make_upload("edge.png", size=100)
result = utils.categorize_uploaded_files([upload], MagicMock())
assert len(result.acceptable) == 1
assert len(result.rejected) == 0
def test_categorize_uploaded_files_rejects_size_over_limit_with_reason(
monkeypatch: pytest.MonkeyPatch,
) -> None:
_patch_common_dependencies(monkeypatch)
monkeypatch.setattr(utils, "USER_FILE_MAX_UPLOAD_SIZE_BYTES", 100)
monkeypatch.setattr(utils, "USER_FILE_MAX_UPLOAD_SIZE_MB", 1)
monkeypatch.setattr(utils, "estimate_image_tokens_for_upload", lambda _upload: 10)
upload = _make_upload("large.png", size=101)
result = utils.categorize_uploaded_files([upload], MagicMock())
assert len(result.acceptable) == 0
assert len(result.rejected) == 1
assert result.rejected[0].reason == "Exceeds 1 MB file size limit"
def test_categorize_uploaded_files_mixed_batch_keeps_valid_and_rejects_oversized(
monkeypatch: pytest.MonkeyPatch,
) -> None:
_patch_common_dependencies(monkeypatch)
monkeypatch.setattr(utils, "USER_FILE_MAX_UPLOAD_SIZE_BYTES", 100)
monkeypatch.setattr(utils, "USER_FILE_MAX_UPLOAD_SIZE_MB", 1)
monkeypatch.setattr(utils, "estimate_image_tokens_for_upload", lambda _upload: 10)
small = _make_upload("small.png", size=50)
large = _make_upload("large.png", size=101)
result = utils.categorize_uploaded_files([small, large], MagicMock())
assert [file.filename for file in result.acceptable] == ["small.png"]
assert len(result.rejected) == 1
assert result.rejected[0].filename == "large.png"
assert result.rejected[0].reason == "Exceeds 1 MB file size limit"
def test_categorize_uploaded_files_enforces_size_limit_even_when_threshold_is_skipped(
monkeypatch: pytest.MonkeyPatch,
) -> None:
_patch_common_dependencies(monkeypatch)
monkeypatch.setattr(utils, "SKIP_USERFILE_THRESHOLD", True)
monkeypatch.setattr(utils, "USER_FILE_MAX_UPLOAD_SIZE_BYTES", 100)
monkeypatch.setattr(utils, "USER_FILE_MAX_UPLOAD_SIZE_MB", 1)
upload = _make_upload("oversized.pdf", size=101)
result = utils.categorize_uploaded_files([upload], MagicMock())
assert len(result.acceptable) == 0
assert len(result.rejected) == 1
assert result.rejected[0].reason == "Exceeds 1 MB file size limit"
def test_categorize_uploaded_files_checks_size_before_text_extraction(
monkeypatch: pytest.MonkeyPatch,
) -> None:
_patch_common_dependencies(monkeypatch)
monkeypatch.setattr(utils, "USER_FILE_MAX_UPLOAD_SIZE_BYTES", 100)
monkeypatch.setattr(utils, "USER_FILE_MAX_UPLOAD_SIZE_MB", 1)
extract_mock = MagicMock(return_value="this should not run")
monkeypatch.setattr(utils, "extract_file_text", extract_mock)
oversized_doc = _make_upload("oversized.pdf", size=101)
result = utils.categorize_uploaded_files([oversized_doc], MagicMock())
extract_mock.assert_not_called()
assert len(result.acceptable) == 0
assert len(result.rejected) == 1
assert result.rejected[0].reason == "Exceeds 1 MB file size limit"

View File

@@ -0,0 +1,32 @@
import pytest
from onyx.key_value_store.interface import KvKeyNotFoundError
from onyx.server.settings import store as settings_store
class _FakeKvStore:
def load(self, _key: str) -> dict:
raise KvKeyNotFoundError()
class _FakeCache:
def __init__(self) -> None:
self._vals: dict[str, bytes] = {}
def get(self, key: str) -> bytes | None:
return self._vals.get(key)
def set(self, key: str, value: str, ex: int | None = None) -> None: # noqa: ARG002
self._vals[key] = value.encode("utf-8")
def test_load_settings_includes_user_file_max_upload_size_mb(
monkeypatch: pytest.MonkeyPatch,
) -> None:
monkeypatch.setattr(settings_store, "get_kv_store", lambda: _FakeKvStore())
monkeypatch.setattr(settings_store, "get_cache_backend", lambda: _FakeCache())
monkeypatch.setattr(settings_store, "USER_FILE_MAX_UPLOAD_SIZE_MB", 77)
settings = settings_store.load_settings()
assert settings.user_file_max_upload_size_mb == 77

View File

@@ -147,15 +147,18 @@ class TestSensitiveValueString:
)
assert sensitive1 != sensitive2
def test_equality_with_non_sensitive_raises(self) -> None:
"""Test that comparing with non-SensitiveValue raises error."""
def test_equality_with_non_sensitive_returns_not_equal(self) -> None:
"""Test that comparing with non-SensitiveValue is always not-equal.
Returns NotImplemented so Python falls back to identity comparison.
This is required for compatibility with SQLAlchemy's attribute tracking.
"""
sensitive = SensitiveValue(
encrypted_bytes=_encrypt_string("secret"),
decrypt_fn=_decrypt_string,
is_json=False,
)
with pytest.raises(SensitiveAccessError):
_ = sensitive == "secret"
assert not (sensitive == "secret")
class TestSensitiveValueJson:

22
cli/Dockerfile Normal file
View File

@@ -0,0 +1,22 @@
FROM golang:1.26-alpine@sha256:2389ebfa5b7f43eeafbd6be0c3700cc46690ef842ad962f6c5bd6be49ed82039 AS builder
WORKDIR /app
COPY ./ .
ARG TARGETARCH
RUN CGO_ENABLED=0 GOOS=linux GOARCH=${TARGETARCH} go build -ldflags="-s -w" -o onyx-cli .
RUN mkdir -p /home/onyx/.config
FROM scratch
COPY --from=builder /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/
COPY --from=builder --chown=65534:65534 /home/onyx /home/onyx
COPY --from=builder /app/onyx-cli /onyx-cli
ENV HOME=/home/onyx
ENV XDG_CONFIG_HOME=/home/onyx/.config
USER 65534:65534
ENTRYPOINT ["/onyx-cli"]

View File

@@ -61,6 +61,9 @@ services:
- POSTGRES_HOST=relational_db
- POSTGRES_DEFAULT_SCHEMA=${POSTGRES_DEFAULT_SCHEMA:-}
- VESPA_HOST=index
- OPENSEARCH_HOST=${OPENSEARCH_HOST:-opensearch}
- OPENSEARCH_ADMIN_PASSWORD=${OPENSEARCH_ADMIN_PASSWORD:-StrongPassword123!}
- ENABLE_OPENSEARCH_INDEXING_FOR_ONYX=${OPENSEARCH_FOR_ONYX_ENABLED:-true}
- REDIS_HOST=cache
- WEB_DOMAIN=${WEB_DOMAIN:-}
# MinIO configuration
@@ -77,6 +80,7 @@ services:
- DISABLE_RERANK_FOR_STREAMING=${DISABLE_RERANK_FOR_STREAMING:-}
- MODEL_SERVER_HOST=${MODEL_SERVER_HOST:-inference_model_server}
- MODEL_SERVER_PORT=${MODEL_SERVER_PORT:-}
- CODE_INTERPRETER_BASE_URL=${CODE_INTERPRETER_BASE_URL:-http://code-interpreter:8000}
- LOG_ONYX_MODEL_INTERACTIONS=${LOG_ONYX_MODEL_INTERACTIONS:-}
- LOG_VESPA_TIMING_INFORMATION=${LOG_VESPA_TIMING_INFORMATION:-}
- LOG_ENDPOINT_LATENCY=${LOG_ENDPOINT_LATENCY:-}
@@ -168,6 +172,9 @@ services:
- POSTGRES_DB=${POSTGRES_DB:-}
- POSTGRES_DEFAULT_SCHEMA=${POSTGRES_DEFAULT_SCHEMA:-}
- VESPA_HOST=index
- OPENSEARCH_HOST=${OPENSEARCH_HOST:-opensearch}
- OPENSEARCH_ADMIN_PASSWORD=${OPENSEARCH_ADMIN_PASSWORD:-StrongPassword123!}
- ENABLE_OPENSEARCH_INDEXING_FOR_ONYX=${OPENSEARCH_FOR_ONYX_ENABLED:-true}
- REDIS_HOST=cache
- WEB_DOMAIN=${WEB_DOMAIN:-}
# MinIO configuration
@@ -424,6 +431,50 @@ services:
max-size: "50m"
max-file: "6"
opensearch:
image: opensearchproject/opensearch:3.4.0
restart: unless-stopped
# Controls whether this service runs. In order to enable it, add
# opensearch-enabled to COMPOSE_PROFILES in the environment for this
# docker-compose.
# NOTE: Now enabled on by default. To explicitly disable this service,
# uncomment this profile and ensure COMPOSE_PROFILES in your env does not
# list the profile, or when running docker compose, include all desired
# service names but this one. Additionally set
# OPENSEARCH_FOR_ONYX_ENABLED=false in your env.
# profiles: ["opensearch-enabled"]
environment:
# We need discovery.type=single-node so that OpenSearch doesn't try
# forming a cluster and waiting for other nodes to become live.
- discovery.type=single-node
- OPENSEARCH_INITIAL_ADMIN_PASSWORD=${OPENSEARCH_ADMIN_PASSWORD:-StrongPassword123!}
# This and the JVM config below come from the example in https://docs.opensearch.org/latest/install-and-configure/install-opensearch/docker/
# We do this to avoid unstable performance from page swaps.
- bootstrap.memory_lock=true # Disable JVM heap memory swapping.
# Java heap should be ~50% of memory limit. For now we assume a limit of
# 4g although in practice the container can request more than this.
# See https://opster.com/guides/opensearch/opensearch-basics/opensearch-heap-size-usage-and-jvm-garbage-collection/
# Xms is the starting size, Xmx is the maximum size. These should be the
# same.
- "OPENSEARCH_JAVA_OPTS=-Xms2g -Xmx2g"
volumes:
- opensearch-data:/usr/share/opensearch/data
# These come from the example in https://docs.opensearch.org/latest/install-and-configure/install-opensearch/docker/
ulimits:
# Similarly to bootstrap.memory_lock, we don't want to impose limits on
# how much memory a process can lock from being swapped.
memlock:
soft: -1 # Set memlock to unlimited (no soft or hard limit).
hard: -1
nofile:
soft: 65536 # Maximum number of open files for the opensearch user - set to at least 65536.
hard: 65536
logging:
driver: json-file
options:
max-size: "50m"
max-file: "6"
nginx:
image: nginx:1.25.5-alpine
restart: unless-stopped
@@ -508,3 +559,5 @@ volumes:
model_cache_huggingface:
indexing_huggingface_model_cache:
# mcp_server_logs:
# Persistent data for OpenSearch.
opensearch-data:

View File

@@ -21,6 +21,9 @@ services:
- AUTH_TYPE=${AUTH_TYPE:-oidc}
- POSTGRES_HOST=relational_db
- VESPA_HOST=index
- OPENSEARCH_HOST=${OPENSEARCH_HOST:-opensearch}
- OPENSEARCH_ADMIN_PASSWORD=${OPENSEARCH_ADMIN_PASSWORD:-StrongPassword123!}
- ENABLE_OPENSEARCH_INDEXING_FOR_ONYX=${OPENSEARCH_FOR_ONYX_ENABLED:-true}
- REDIS_HOST=cache
- MODEL_SERVER_HOST=${MODEL_SERVER_HOST:-inference_model_server}
# MinIO configuration
@@ -55,6 +58,9 @@ services:
- AUTH_TYPE=${AUTH_TYPE:-oidc}
- POSTGRES_HOST=relational_db
- VESPA_HOST=index
- OPENSEARCH_HOST=${OPENSEARCH_HOST:-opensearch}
- OPENSEARCH_ADMIN_PASSWORD=${OPENSEARCH_ADMIN_PASSWORD:-StrongPassword123!}
- ENABLE_OPENSEARCH_INDEXING_FOR_ONYX=${OPENSEARCH_FOR_ONYX_ENABLED:-true}
- REDIS_HOST=cache
- MODEL_SERVER_HOST=${MODEL_SERVER_HOST:-inference_model_server}
- INDEXING_MODEL_SERVER_HOST=${INDEXING_MODEL_SERVER_HOST:-indexing_model_server}
@@ -228,6 +234,50 @@ services:
max-size: "50m"
max-file: "6"
opensearch:
image: opensearchproject/opensearch:3.4.0
restart: unless-stopped
# Controls whether this service runs. In order to enable it, add
# opensearch-enabled to COMPOSE_PROFILES in the environment for this
# docker-compose.
# NOTE: Now enabled on by default. To explicitly disable this service,
# uncomment this profile and ensure COMPOSE_PROFILES in your env does not
# list the profile, or when running docker compose, include all desired
# service names but this one. Additionally set
# OPENSEARCH_FOR_ONYX_ENABLED=false in your env.
# profiles: ["opensearch-enabled"]
environment:
# We need discovery.type=single-node so that OpenSearch doesn't try
# forming a cluster and waiting for other nodes to become live.
- discovery.type=single-node
- OPENSEARCH_INITIAL_ADMIN_PASSWORD=${OPENSEARCH_ADMIN_PASSWORD:-StrongPassword123!}
# This and the JVM config below come from the example in https://docs.opensearch.org/latest/install-and-configure/install-opensearch/docker/
# We do this to avoid unstable performance from page swaps.
- bootstrap.memory_lock=true # Disable JVM heap memory swapping.
# Java heap should be ~50% of memory limit. For now we assume a limit of
# 4g although in practice the container can request more than this.
# See https://opster.com/guides/opensearch/opensearch-basics/opensearch-heap-size-usage-and-jvm-garbage-collection/
# Xms is the starting size, Xmx is the maximum size. These should be the
# same.
- "OPENSEARCH_JAVA_OPTS=-Xms2g -Xmx2g"
volumes:
- opensearch-data:/usr/share/opensearch/data
# These come from the example in https://docs.opensearch.org/latest/install-and-configure/install-opensearch/docker/
ulimits:
# Similarly to bootstrap.memory_lock, we don't want to impose limits on
# how much memory a process can lock from being swapped.
memlock:
soft: -1 # Set memlock to unlimited (no soft or hard limit).
hard: -1
nofile:
soft: 65536 # Maximum number of open files for the opensearch user - set to at least 65536.
hard: 65536
logging:
driver: json-file
options:
max-size: "50m"
max-file: "6"
nginx:
image: nginx:1.25.5-alpine
restart: unless-stopped
@@ -315,3 +365,5 @@ volumes:
model_cache_huggingface:
indexing_huggingface_model_cache:
# mcp_server_logs:
# Persistent data for OpenSearch.
opensearch-data:

View File

@@ -21,8 +21,12 @@ services:
- AUTH_TYPE=${AUTH_TYPE:-oidc}
- POSTGRES_HOST=relational_db
- VESPA_HOST=index
- OPENSEARCH_HOST=${OPENSEARCH_HOST:-opensearch}
- OPENSEARCH_ADMIN_PASSWORD=${OPENSEARCH_ADMIN_PASSWORD:-StrongPassword123!}
- ENABLE_OPENSEARCH_INDEXING_FOR_ONYX=${OPENSEARCH_FOR_ONYX_ENABLED:-true}
- REDIS_HOST=cache
- MODEL_SERVER_HOST=${MODEL_SERVER_HOST:-inference_model_server}
- CODE_INTERPRETER_BASE_URL=${CODE_INTERPRETER_BASE_URL:-http://code-interpreter:8000}
- USE_IAM_AUTH=${USE_IAM_AUTH}
- AWS_REGION_NAME=${AWS_REGION_NAME-}
- AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID-}
@@ -68,6 +72,9 @@ services:
- AUTH_TYPE=${AUTH_TYPE:-oidc}
- POSTGRES_HOST=relational_db
- VESPA_HOST=index
- OPENSEARCH_HOST=${OPENSEARCH_HOST:-opensearch}
- OPENSEARCH_ADMIN_PASSWORD=${OPENSEARCH_ADMIN_PASSWORD:-StrongPassword123!}
- ENABLE_OPENSEARCH_INDEXING_FOR_ONYX=${OPENSEARCH_FOR_ONYX_ENABLED:-true}
- REDIS_HOST=cache
- MODEL_SERVER_HOST=${MODEL_SERVER_HOST:-inference_model_server}
- INDEXING_MODEL_SERVER_HOST=${INDEXING_MODEL_SERVER_HOST:-indexing_model_server}
@@ -251,6 +258,50 @@ services:
max-size: "50m"
max-file: "6"
opensearch:
image: opensearchproject/opensearch:3.4.0
restart: unless-stopped
# Controls whether this service runs. In order to enable it, add
# opensearch-enabled to COMPOSE_PROFILES in the environment for this
# docker-compose.
# NOTE: Now enabled on by default. To explicitly disable this service,
# uncomment this profile and ensure COMPOSE_PROFILES in your env does not
# list the profile, or when running docker compose, include all desired
# service names but this one. Additionally set
# OPENSEARCH_FOR_ONYX_ENABLED=false in your env.
# profiles: ["opensearch-enabled"]
environment:
# We need discovery.type=single-node so that OpenSearch doesn't try
# forming a cluster and waiting for other nodes to become live.
- discovery.type=single-node
- OPENSEARCH_INITIAL_ADMIN_PASSWORD=${OPENSEARCH_ADMIN_PASSWORD:-StrongPassword123!}
# This and the JVM config below come from the example in https://docs.opensearch.org/latest/install-and-configure/install-opensearch/docker/
# We do this to avoid unstable performance from page swaps.
- bootstrap.memory_lock=true # Disable JVM heap memory swapping.
# Java heap should be ~50% of memory limit. For now we assume a limit of
# 4g although in practice the container can request more than this.
# See https://opster.com/guides/opensearch/opensearch-basics/opensearch-heap-size-usage-and-jvm-garbage-collection/
# Xms is the starting size, Xmx is the maximum size. These should be the
# same.
- "OPENSEARCH_JAVA_OPTS=-Xms2g -Xmx2g"
volumes:
- opensearch-data:/usr/share/opensearch/data
# These come from the example in https://docs.opensearch.org/latest/install-and-configure/install-opensearch/docker/
ulimits:
# Similarly to bootstrap.memory_lock, we don't want to impose limits on
# how much memory a process can lock from being swapped.
memlock:
soft: -1 # Set memlock to unlimited (no soft or hard limit).
hard: -1
nofile:
soft: 65536 # Maximum number of open files for the opensearch user - set to at least 65536.
hard: 65536
logging:
driver: json-file
options:
max-size: "50m"
max-file: "6"
nginx:
image: nginx:1.25.5-alpine
restart: unless-stopped
@@ -343,3 +394,5 @@ volumes:
# mcp_server_logs:
# Shared volume for persistent document storage (Craft file-system mode)
file-system:
# Persistent data for OpenSearch.
opensearch-data:

View File

@@ -22,8 +22,12 @@ services:
- AUTH_TYPE=${AUTH_TYPE:-oidc}
- POSTGRES_HOST=relational_db
- VESPA_HOST=index
- OPENSEARCH_HOST=${OPENSEARCH_HOST:-opensearch}
- OPENSEARCH_ADMIN_PASSWORD=${OPENSEARCH_ADMIN_PASSWORD:-StrongPassword123!}
- ENABLE_OPENSEARCH_INDEXING_FOR_ONYX=${OPENSEARCH_FOR_ONYX_ENABLED:-true}
- REDIS_HOST=cache
- MODEL_SERVER_HOST=${MODEL_SERVER_HOST:-inference_model_server}
- CODE_INTERPRETER_BASE_URL=${CODE_INTERPRETER_BASE_URL:-http://code-interpreter:8000}
- USE_IAM_AUTH=${USE_IAM_AUTH}
- AWS_REGION_NAME=${AWS_REGION_NAME-}
- AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID-}
@@ -73,6 +77,9 @@ services:
- AUTH_TYPE=${AUTH_TYPE:-oidc}
- POSTGRES_HOST=relational_db
- VESPA_HOST=index
- OPENSEARCH_HOST=${OPENSEARCH_HOST:-opensearch}
- OPENSEARCH_ADMIN_PASSWORD=${OPENSEARCH_ADMIN_PASSWORD:-StrongPassword123!}
- ENABLE_OPENSEARCH_INDEXING_FOR_ONYX=${OPENSEARCH_FOR_ONYX_ENABLED:-true}
- REDIS_HOST=cache
- MODEL_SERVER_HOST=${MODEL_SERVER_HOST:-inference_model_server}
- INDEXING_MODEL_SERVER_HOST=${INDEXING_MODEL_SERVER_HOST:-indexing_model_server}
@@ -270,6 +277,50 @@ services:
max-size: "50m"
max-file: "6"
opensearch:
image: opensearchproject/opensearch:3.4.0
restart: unless-stopped
# Controls whether this service runs. In order to enable it, add
# opensearch-enabled to COMPOSE_PROFILES in the environment for this
# docker-compose.
# NOTE: Now enabled on by default. To explicitly disable this service,
# uncomment this profile and ensure COMPOSE_PROFILES in your env does not
# list the profile, or when running docker compose, include all desired
# service names but this one. Additionally set
# OPENSEARCH_FOR_ONYX_ENABLED=false in your env.
# profiles: ["opensearch-enabled"]
environment:
# We need discovery.type=single-node so that OpenSearch doesn't try
# forming a cluster and waiting for other nodes to become live.
- discovery.type=single-node
- OPENSEARCH_INITIAL_ADMIN_PASSWORD=${OPENSEARCH_ADMIN_PASSWORD:-StrongPassword123!}
# This and the JVM config below come from the example in https://docs.opensearch.org/latest/install-and-configure/install-opensearch/docker/
# We do this to avoid unstable performance from page swaps.
- bootstrap.memory_lock=true # Disable JVM heap memory swapping.
# Java heap should be ~50% of memory limit. For now we assume a limit of
# 4g although in practice the container can request more than this.
# See https://opster.com/guides/opensearch/opensearch-basics/opensearch-heap-size-usage-and-jvm-garbage-collection/
# Xms is the starting size, Xmx is the maximum size. These should be the
# same.
- "OPENSEARCH_JAVA_OPTS=-Xms2g -Xmx2g"
volumes:
- opensearch-data:/usr/share/opensearch/data
# These come from the example in https://docs.opensearch.org/latest/install-and-configure/install-opensearch/docker/
ulimits:
# Similarly to bootstrap.memory_lock, we don't want to impose limits on
# how much memory a process can lock from being swapped.
memlock:
soft: -1 # Set memlock to unlimited (no soft or hard limit).
hard: -1
nofile:
soft: 65536 # Maximum number of open files for the opensearch user - set to at least 65536.
hard: 65536
logging:
driver: json-file
options:
max-size: "50m"
max-file: "6"
nginx:
image: nginx:1.25.5-alpine
restart: unless-stopped
@@ -380,3 +431,5 @@ volumes:
# mcp_server_logs:
# Shared volume for persistent document storage (Craft file-system mode)
file-system:
# Persistent data for OpenSearch.
opensearch-data:

View File

@@ -57,6 +57,9 @@ services:
condition: service_started
index:
condition: service_started
opensearch:
condition: service_started
required: false
cache:
condition: service_started
inference_model_server:
@@ -78,9 +81,10 @@ services:
- VESPA_HOST=${VESPA_HOST:-index}
- OPENSEARCH_HOST=${OPENSEARCH_HOST:-opensearch}
- OPENSEARCH_ADMIN_PASSWORD=${OPENSEARCH_ADMIN_PASSWORD:-StrongPassword123!}
- ENABLE_OPENSEARCH_INDEXING_FOR_ONYX=${OPENSEARCH_FOR_ONYX_ENABLED:-false}
- ENABLE_OPENSEARCH_INDEXING_FOR_ONYX=${OPENSEARCH_FOR_ONYX_ENABLED:-true}
- REDIS_HOST=${REDIS_HOST:-cache}
- MODEL_SERVER_HOST=${MODEL_SERVER_HOST:-inference_model_server}
- CODE_INTERPRETER_BASE_URL=${CODE_INTERPRETER_BASE_URL:-http://code-interpreter:8000}
- S3_ENDPOINT_URL=${S3_ENDPOINT_URL:-http://minio:9000}
- S3_AWS_ACCESS_KEY_ID=${S3_AWS_ACCESS_KEY_ID:-minioadmin}
- S3_AWS_SECRET_ACCESS_KEY=${S3_AWS_SECRET_ACCESS_KEY:-minioadmin}
@@ -139,11 +143,19 @@ services:
- path: .env
required: false
depends_on:
- relational_db
- index
- cache
- inference_model_server
- indexing_model_server
relational_db:
condition: service_started
index:
condition: service_started
opensearch:
condition: service_started
required: false
cache:
condition: service_started
inference_model_server:
condition: service_started
indexing_model_server:
condition: service_started
restart: unless-stopped
environment:
- FILE_STORE_BACKEND=${FILE_STORE_BACKEND:-s3}
@@ -151,7 +163,7 @@ services:
- VESPA_HOST=${VESPA_HOST:-index}
- OPENSEARCH_HOST=${OPENSEARCH_HOST:-opensearch}
- OPENSEARCH_ADMIN_PASSWORD=${OPENSEARCH_ADMIN_PASSWORD:-StrongPassword123!}
- ENABLE_OPENSEARCH_INDEXING_FOR_ONYX=${OPENSEARCH_FOR_ONYX_ENABLED:-false}
- ENABLE_OPENSEARCH_INDEXING_FOR_ONYX=${OPENSEARCH_FOR_ONYX_ENABLED:-true}
- REDIS_HOST=${REDIS_HOST:-cache}
- MODEL_SERVER_HOST=${MODEL_SERVER_HOST:-inference_model_server}
- INDEXING_MODEL_SERVER_HOST=${INDEXING_MODEL_SERVER_HOST:-indexing_model_server}
@@ -406,7 +418,12 @@ services:
# Controls whether this service runs. In order to enable it, add
# opensearch-enabled to COMPOSE_PROFILES in the environment for this
# docker-compose.
profiles: ["opensearch-enabled"]
# NOTE: Now enabled on by default. To explicitly disable this service,
# uncomment this profile and ensure COMPOSE_PROFILES in your env does not
# list the profile, or when running docker compose, include all desired
# service names but this one. Additionally set
# OPENSEARCH_FOR_ONYX_ENABLED=false in your env.
# profiles: ["opensearch-enabled"]
environment:
# We need discovery.type=single-node so that OpenSearch doesn't try
# forming a cluster and waiting for other nodes to become live.
@@ -416,11 +433,11 @@ services:
# We do this to avoid unstable performance from page swaps.
- bootstrap.memory_lock=true # Disable JVM heap memory swapping.
# Java heap should be ~50% of memory limit. For now we assume a limit of
# 2g although in practice the container can request more than this.
# 4g although in practice the container can request more than this.
# See https://opster.com/guides/opensearch/opensearch-basics/opensearch-heap-size-usage-and-jvm-garbage-collection/
# Xms is the starting size, Xmx is the maximum size. These should be the
# same.
- "OPENSEARCH_JAVA_OPTS=-Xms1g -Xmx1g"
- "OPENSEARCH_JAVA_OPTS=-Xms2g -Xmx2g"
volumes:
- opensearch-data:/usr/share/opensearch/data
# These come from the example in https://docs.opensearch.org/latest/install-and-configure/install-opensearch/docker/

View File

@@ -67,10 +67,8 @@ POSTGRES_PASSWORD=password
## remove s3-filestore from COMPOSE_PROFILES and set FILE_STORE_BACKEND=postgres.
COMPOSE_PROFILES=s3-filestore
FILE_STORE_BACKEND=s3
## Settings for enabling OpenSearch. Uncomment these and comment out
## COMPOSE_PROFILES above.
# COMPOSE_PROFILES=s3-filestore,opensearch-enabled
# OPENSEARCH_FOR_ONYX_ENABLED=true
## Setting for enabling OpenSearch.
OPENSEARCH_FOR_ONYX_ENABLED=true
## MinIO/S3 Configuration (only needed when FILE_STORE_BACKEND=s3)
S3_ENDPOINT_URL=http://minio:9000

View File

@@ -5,7 +5,7 @@ home: https://www.onyx.app/
sources:
- "https://github.com/onyx-dot-app/onyx"
type: application
version: 0.4.32
version: 0.4.33
appVersion: latest
annotations:
category: Productivity

View File

@@ -76,7 +76,10 @@ vespa:
memory: 32000Mi
opensearch:
enabled: false
# Enabled by default. Override to false and set the appropriate env vars in
# the instance-specific values yaml if using AWS-managed OpenSearch, or simply
# override to false to entirely disable.
enabled: true
# These values are passed to the opensearch subchart.
# See https://github.com/opensearch-project/helm-charts/blob/main/charts/opensearch/values.yaml
@@ -1158,8 +1161,10 @@ auth:
opensearch:
# Enable or disable this secret entirely. Will remove from env var
# configurations and remove any created secrets.
# Set to true when opensearch.enabled is true.
enabled: false
# Enabled by default. Override to false and set the appropriate env vars in
# the instance-specific values yaml if using AWS-managed OpenSearch, or
# simply override to false to entirely disable.
enabled: true
# Overwrite the default secret name, ignored if existingSecret is defined.
secretName: 'onyx-opensearch'
# Use a secret specified elsewhere.
@@ -1261,3 +1266,5 @@ configMap:
SKIP_USERFILE_THRESHOLD: ""
# For multi-tenant: comma-separated list of tenant IDs to skip threshold
SKIP_USERFILE_THRESHOLD_TENANT_IDS: ""
# Maximum user upload file size in MB for chat/projects uploads
USER_FILE_MAX_UPLOAD_SIZE_MB: ""

View File

@@ -18,6 +18,10 @@ variable "INTEGRATION_REPOSITORY" {
default = "onyxdotapp/onyx-integration"
}
variable "CLI_REPOSITORY" {
default = "onyxdotapp/onyx-cli"
}
variable "TAG" {
default = "latest"
}
@@ -64,3 +68,13 @@ target "integration" {
tags = ["${INTEGRATION_REPOSITORY}:${TAG}"]
}
target "cli" {
context = "cli"
dockerfile = "Dockerfile"
cache-from = ["type=registry,ref=${CLI_REPOSITORY}:latest"]
cache-to = ["type=inline"]
tags = ["${CLI_REPOSITORY}:${TAG}"]
}

3
web/.gitignore vendored
View File

@@ -47,3 +47,6 @@ next-env.d.ts
# generated clients ... in particular, the API to the Onyx backend itself!
/src/lib/generated
.jest-cache
# storybook
storybook-static

80
web/.storybook/README.md Normal file
View File

@@ -0,0 +1,80 @@
# Onyx Storybook
Storybook is an isolated development environment for UI components. It renders each component in a standalone "story" outside of the main app, so you can visually verify appearance, interact with props, and catch regressions without navigating through the full application.
The Onyx Storybook covers the full component library — from low-level `@opal/core` primitives up through `refresh-components` — giving designers and engineers a shared reference for every visual state.
**Production:** [onyx-storybook.vercel.app](https://onyx-storybook.vercel.app)
## Running Locally
```bash
cd web
npm run storybook # dev server on http://localhost:6006
npm run storybook:build # static build to storybook-static/
```
The dev server hot-reloads when you edit a component or story file.
## Writing Stories
Stories are **co-located** next to their component source:
```
lib/opal/src/core/interactive/
├── components.tsx ← the component
├── Interactive.stories.tsx ← the story
└── styles.css
src/refresh-components/buttons/
├── Button.tsx
└── Button.stories.tsx
```
### Minimal Template
```tsx
import type { Meta, StoryObj } from "@storybook/react";
import { MyComponent } from "./MyComponent";
const meta: Meta<typeof MyComponent> = {
title: "Category/MyComponent", // sidebar path
component: MyComponent,
tags: ["autodocs"], // generates a docs page from props
};
export default meta;
type Story = StoryObj<typeof MyComponent>;
export const Default: Story = {
args: { label: "Hello" },
};
```
### Conventions
- **Title format:** `Core/Name`, `Components/Name`, `Layouts/Name`, or `refresh-components/category/Name`
- **Tags:** Add `tags: ["autodocs"]` to auto-generate a props docs page
- **Decorators:** Components that use Radix tooltips need a `TooltipPrimitive.Provider` decorator
- **Layout:** Use `parameters: { layout: "fullscreen" }` for modals/popovers that use portals
## Dark Mode
Use the theme toggle (paint roller icon) in the Storybook toolbar to switch between light and dark modes. This adds/removes the `dark` class on the preview body, matching the app's `darkMode: "class"` Tailwind config. All color tokens from `colors.css` adapt automatically.
## Deployment
The production Storybook is deployed as a static site on Vercel. The build runs `npm run storybook:build` which outputs to `storybook-static/`, and Vercel serves that directory.
Deploys are triggered on merges to `main` when files in `web/lib/opal/`, `web/src/refresh-components/`, or `web/.storybook/` change.
## Component Layers
The sidebar organizes components by their layer in the design system:
| Layer | Path | Examples |
|-------|------|----------|
| **Core** | `lib/opal/src/core/` | Interactive, Hoverable |
| **Components** | `lib/opal/src/components/` | Button, OpenButton, Tag |
| **Layouts** | `lib/opal/src/layouts/` | Content, ContentAction, IllustrationContent |
| **refresh-components** | `src/refresh-components/` | Inputs, tables, modals, text, cards, tiles, etc. |

42
web/.storybook/main.ts Normal file
View File

@@ -0,0 +1,42 @@
import type { StorybookConfig } from "@storybook/react-vite";
import path from "path";
const config: StorybookConfig = {
stories: [
"../lib/opal/src/**/*.stories.@(ts|tsx)",
"../src/refresh-components/**/*.stories.@(ts|tsx)",
],
addons: ["@storybook/addon-essentials", "@storybook/addon-themes"],
framework: {
name: "@storybook/react-vite",
options: {},
},
staticDirs: ["../public"],
docs: {
autodocs: "tag",
},
typescript: {
reactDocgen: "react-docgen-typescript",
},
viteFinal: async (config) => {
config.resolve = config.resolve ?? {};
config.resolve.alias = {
...config.resolve.alias,
"@": path.resolve(__dirname, "../src"),
"@opal": path.resolve(__dirname, "../lib/opal/src"),
"@public": path.resolve(__dirname, "../public"),
// Next.js module stubs for Vite
"next/link": path.resolve(__dirname, "mocks/next-link.tsx"),
"next/navigation": path.resolve(__dirname, "mocks/next-navigation.tsx"),
"next/image": path.resolve(__dirname, "mocks/next-image.tsx"),
};
// Process CSS with Tailwind via PostCSS
config.css = config.css ?? {};
config.css.postcss = path.resolve(__dirname, "..");
return config;
},
};
export default config;

View File

@@ -0,0 +1,28 @@
import React from "react";
interface ImageProps {
src: string;
alt: string;
width?: number;
height?: number;
fill?: boolean;
[key: string]: unknown;
}
function Image({ src, alt, width, height, fill, ...props }: ImageProps) {
const fillStyle: React.CSSProperties = fill
? { position: "absolute", inset: 0, width: "100%", height: "100%" }
: {};
return (
<img
{...(props as React.ImgHTMLAttributes<HTMLImageElement>)}
src={src}
alt={alt}
width={fill ? undefined : width}
height={fill ? undefined : height}
style={{ ...(props.style as React.CSSProperties), ...fillStyle }}
/>
);
}
export default Image;

View File

@@ -0,0 +1,28 @@
import React from "react";
interface LinkProps {
href: string;
children: React.ReactNode;
[key: string]: unknown;
}
function Link({
href,
children,
prefetch: _prefetch,
scroll: _scroll,
shallow: _shallow,
replace: _replace,
passHref: _passHref,
locale: _locale,
legacyBehavior: _legacyBehavior,
...props
}: LinkProps) {
return (
<a href={href} {...props}>
{children}
</a>
);
}
export default Link;

View File

@@ -0,0 +1,30 @@
export function useRouter() {
return {
push: (_url: string) => {},
replace: (_url: string) => {},
back: () => {},
forward: () => {},
refresh: () => {},
prefetch: (_url: string) => Promise.resolve(),
};
}
export function usePathname() {
return "/";
}
export function useSearchParams() {
return new URLSearchParams() as ReadonlyURLSearchParams;
}
export function useParams() {
return {};
}
export function redirect(_url: string): never {
throw new Error("redirect() called in Storybook");
}
export function notFound(): never {
throw new Error("notFound() called in Storybook");
}

View File

@@ -0,0 +1,11 @@
<!-- Preconnect for fonts loaded via globals.css @import -->
<link
rel="preconnect"
href="https://fonts.googleapis.com"
crossorigin="anonymous"
/>
<link
rel="preconnect"
href="https://fonts.gstatic.com"
crossorigin="anonymous"
/>

27
web/.storybook/preview.ts Normal file
View File

@@ -0,0 +1,27 @@
import type { Preview } from "@storybook/react";
import { withThemeByClassName } from "@storybook/addon-themes";
import "../src/app/globals.css";
const preview: Preview = {
parameters: {
layout: "centered",
backgrounds: { disable: true },
controls: {
matchers: {
color: /(background|color)$/i,
date: /Date$/i,
},
},
},
decorators: [
withThemeByClassName({
themes: {
light: "",
dark: "dark",
},
defaultTheme: "light",
}),
],
};
export default preview;

View File

@@ -156,6 +156,7 @@ module.exports = {
"**/src/app/**/*.test.tsx",
"**/src/components/**/*.test.tsx",
"**/src/lib/**/*.test.tsx",
"**/src/providers/**/*.test.tsx",
"**/src/refresh-components/**/*.test.tsx",
"**/src/hooks/**/*.test.tsx",
"**/src/sections/**/*.test.tsx",

View File

@@ -39,7 +39,7 @@ type ButtonProps = InteractiveStatelessProps &
/** Tooltip text shown on hover. */
tooltip?: string;
/** Width preset. `"auto"` shrink-wraps, `"full"` stretches to parent width. */
/** Width preset. `"fit"` shrink-wraps, `"full"` stretches to parent width. */
width?: WidthVariant;
/** Which side the tooltip appears on. */

View File

@@ -0,0 +1,83 @@
# LineItemButton
**Import:** `import { LineItemButton, type LineItemButtonProps } from "@opal/components";`
A composite component that wraps `Interactive.Stateful > Interactive.Container > ContentAction` into a single API. Use it for selectable list rows such as model pickers, menu items, or any row that acts like a button.
## Architecture
```
Interactive.Stateful <- selectVariant, state, interaction, onClick, href, ref
└─ Interactive.Container <- type, width, roundingVariant
└─ ContentAction <- withInteractive, paddingVariant="lg"
├─ Content <- icon, title, description, sizePreset, variant, ...
└─ rightChildren
```
`paddingVariant` is hardcoded to `"lg"` and `withInteractive` is always `true`. These are not exposed as props.
## Props
### Interactive surface
| Prop | Type | Default | Description |
|------|------|---------|-------------|
| `selectVariant` | `"select-light" \| "select-heavy"` | `"select-light"` | Interactive select variant |
| `state` | `InteractiveStatefulState` | `"empty"` | Value state (`"empty"`, `"filled"`, `"selected"`) |
| `interaction` | `InteractiveStatefulInteraction` | `"rest"` | JS-controlled interaction state override |
| `onClick` | `MouseEventHandler<HTMLElement>` | — | Click handler |
| `href` | `string` | — | Renders an anchor instead of a div |
| `target` | `string` | — | Anchor target (e.g. `"_blank"`) |
| `group` | `string` | — | Interactive group key |
| `ref` | `React.Ref<HTMLElement>` | — | Forwarded ref |
### Sizing
| Prop | Type | Default | Description |
|------|------|---------|-------------|
| `roundingVariant` | `InteractiveContainerRoundingVariant` | `"default"` | Corner rounding preset (height is content-driven) |
| `width` | `WidthVariant` | `"full"` | Container width |
| `type` | `"submit" \| "button" \| "reset"` | `"button"` | HTML button type |
| `tooltip` | `string` | — | Tooltip text shown on hover |
| `tooltipSide` | `TooltipSide` | `"top"` | Tooltip side |
### Content (pass-through to ContentAction)
| Prop | Type | Default | Description |
|------|------|---------|-------------|
| `title` | `string` | **(required)** | Row label |
| `icon` | `IconFunctionComponent` | — | Left icon |
| `description` | `string` | — | Description below the title |
| `sizePreset` | `SizePreset` | `"headline"` | Content size preset |
| `variant` | `ContentVariant` | `"heading"` | Content layout variant |
| `rightChildren` | `ReactNode` | — | Content after the label (e.g. action button) |
All other `ContentAction` / `Content` props (`editable`, `onTitleChange`, `optional`, `auxIcon`, `tag`, etc.) are also passed through. Note: `withInteractive` is always `true` inside `LineItemButton` and cannot be overridden.
## Usage
```tsx
import { LineItemButton } from "@opal/components";
// Simple selectable row
<LineItemButton
selectVariant="select-heavy"
state={isSelected ? "selected" : "empty"}
roundingVariant="compact"
onClick={handleClick}
title="gpt-4o"
sizePreset="main-ui"
variant="section"
/>
// With right-side action
<LineItemButton
selectVariant="select-heavy"
state={isSelected ? "selected" : "empty"}
onClick={handleClick}
title="claude-opus-4"
sizePreset="main-ui"
variant="section"
rightChildren={<Tag title="Default" color="blue" />}
/>
```

View File

@@ -0,0 +1,137 @@
import "@opal/components/tooltip.css";
import {
Interactive,
type InteractiveStatefulState,
type InteractiveStatefulInteraction,
type InteractiveStatefulProps,
InteractiveContainerRoundingVariant,
} from "@opal/core";
import { type WidthVariant } from "@opal/shared";
import type { TooltipSide } from "@opal/components";
import type { DistributiveOmit } from "@opal/types";
import type { ContentActionProps } from "@opal/layouts/content-action/components";
import { ContentAction } from "@opal/layouts";
import * as TooltipPrimitive from "@radix-ui/react-tooltip";
// ---------------------------------------------------------------------------
// Types
// ---------------------------------------------------------------------------
type ContentPassthroughProps = DistributiveOmit<
ContentActionProps,
"paddingVariant" | "widthVariant" | "ref" | "withInteractive"
>;
type LineItemButtonOwnProps = {
/** Interactive select variant. @default "select-light" */
selectVariant?: "select-light" | "select-heavy";
/** Value state. @default "empty" */
state?: InteractiveStatefulState;
/** JS-controllable interaction state override. @default "rest" */
interaction?: InteractiveStatefulInteraction;
/** Click handler. */
onClick?: InteractiveStatefulProps["onClick"];
/** When provided, renders an anchor instead of a div. */
href?: string;
/** Anchor target (e.g. "_blank"). */
target?: string;
/** Interactive group key. */
group?: string;
/** Forwarded ref. */
ref?: React.Ref<HTMLElement>;
/** Corner rounding preset (height is always content-driven). @default "default" */
roundingVariant?: InteractiveContainerRoundingVariant;
/** Container width. @default "full" */
width?: WidthVariant;
/** HTML button type. @default "button" */
type?: "submit" | "button" | "reset";
/** Tooltip text shown on hover. */
tooltip?: string;
/** Which side the tooltip appears on. @default "top" */
tooltipSide?: TooltipSide;
};
type LineItemButtonProps = ContentPassthroughProps & LineItemButtonOwnProps;
// ---------------------------------------------------------------------------
// LineItemButton
// ---------------------------------------------------------------------------
function LineItemButton({
// Interactive surface
selectVariant = "select-light",
state,
interaction,
onClick,
href,
target,
group,
ref,
// Sizing
roundingVariant = "default",
width = "full",
type = "button",
tooltip,
tooltipSide = "top",
// ContentAction pass-through
...contentActionProps
}: LineItemButtonProps) {
const item = (
<Interactive.Stateful
variant={selectVariant}
state={state}
interaction={interaction}
onClick={onClick}
href={href}
target={target}
group={group}
ref={ref}
>
<Interactive.Container
type={type}
widthVariant={width}
heightVariant="lg"
roundingVariant={roundingVariant}
>
<ContentAction
{...(contentActionProps as ContentActionProps)}
withInteractive
paddingVariant="fit"
/>
</Interactive.Container>
</Interactive.Stateful>
);
if (!tooltip) return item;
return (
<TooltipPrimitive.Root>
<TooltipPrimitive.Trigger asChild>{item}</TooltipPrimitive.Trigger>
<TooltipPrimitive.Portal>
<TooltipPrimitive.Content
className="opal-tooltip"
side={tooltipSide}
sideOffset={4}
>
{tooltip}
</TooltipPrimitive.Content>
</TooltipPrimitive.Portal>
</TooltipPrimitive.Root>
);
}
export { LineItemButton, type LineItemButtonProps };

View File

@@ -56,7 +56,7 @@ type SelectButtonProps = InteractiveStatefulProps &
/** Tooltip text shown on hover. */
tooltip?: string;
/** Width preset. `"auto"` shrink-wraps, `"full"` stretches to parent width. */
/** Width preset. `"fit"` shrink-wraps, `"full"` stretches to parent width. */
width?: WidthVariant;
/** Which side the tooltip appears on. */

View File

@@ -19,6 +19,12 @@ export {
type OpenButtonProps,
} from "@opal/components/buttons/open-button/components";
/* LineItemButton */
export {
LineItemButton,
type LineItemButtonProps,
} from "@opal/components/buttons/line-item-button/components";
/* Tag */
export {
Tag,

View File

@@ -0,0 +1,47 @@
import type { Meta, StoryObj } from "@storybook/react";
import { Tag } from "@opal/components";
import { SvgAlertCircle } from "@opal/icons";
const TAG_COLORS = ["green", "purple", "blue", "gray", "amber"] as const;
const meta: Meta<typeof Tag> = {
title: "opal/components/Tag",
component: Tag,
tags: ["autodocs"],
};
export default meta;
type Story = StoryObj<typeof Tag>;
export const Default: Story = {
args: {
title: "Label",
},
};
export const AllColors: Story = {
render: () => (
<div className="flex items-center gap-2">
{TAG_COLORS.map((color) => (
<Tag key={color} title={color} color={color} />
))}
</div>
),
};
export const WithIcon: Story = {
args: {
title: "Alert",
icon: SvgAlertCircle,
},
};
export const AllColorsWithIcon: Story = {
render: () => (
<div className="flex items-center gap-2">
{TAG_COLORS.map((color) => (
<Tag key={color} title={color} color={color} icon={SvgAlertCircle} />
))}
</div>
),
};

View File

@@ -2,6 +2,7 @@ import "@opal/core/animations/styles.css";
import React, { createContext, useContext, useState, useCallback } from "react";
import { cn } from "@opal/utils";
import type { WithoutStyles } from "@opal/types";
import { widthVariants, type WidthVariant } from "@opal/shared";
// ---------------------------------------------------------------------------
// Context-per-group registry
@@ -38,6 +39,10 @@ interface HoverableRootProps
extends WithoutStyles<React.HTMLAttributes<HTMLDivElement>> {
children: React.ReactNode;
group: string;
/** Width preset. @default "auto" */
widthVariant?: WidthVariant;
/** Ref forwarded to the root `<div>`. */
ref?: React.Ref<HTMLDivElement>;
}
type HoverableItemVariant = "opacity-on-hover";
@@ -47,6 +52,8 @@ interface HoverableItemProps
children: React.ReactNode;
group?: string;
variant?: HoverableItemVariant;
/** Ref forwarded to the item `<div>`. */
ref?: React.Ref<HTMLDivElement>;
}
// ---------------------------------------------------------------------------
@@ -77,6 +84,8 @@ interface HoverableItemProps
function HoverableRoot({
group,
children,
widthVariant = "auto",
ref,
onMouseEnter: consumerMouseEnter,
onMouseLeave: consumerMouseLeave,
...props
@@ -103,7 +112,13 @@ function HoverableRoot({
return (
<GroupContext.Provider value={hovered}>
<div {...props} onMouseEnter={onMouseEnter} onMouseLeave={onMouseLeave}>
<div
{...props}
ref={ref}
className={cn(widthVariants[widthVariant])}
onMouseEnter={onMouseEnter}
onMouseLeave={onMouseLeave}
>
{children}
</div>
</GroupContext.Provider>
@@ -147,6 +162,7 @@ function HoverableItem({
group,
variant = "opacity-on-hover",
children,
ref,
...props
}: HoverableItemProps) {
const contextValue = useContext(
@@ -165,6 +181,7 @@ function HoverableItem({
return (
<div
{...props}
ref={ref}
className={cn("hoverable-item")}
data-hoverable-variant={variant}
data-hoverable-active={

View File

@@ -10,7 +10,7 @@ Structural container shared by both `Interactive.Stateless` and `Interactive.Sta
|------|------|---------|-------------|
| `heightVariant` | `SizeVariant` | `"lg"` | Height preset (`2xs``lg`, `fit`) |
| `roundingVariant` | `"default" \| "compact" \| "mini"` | `"default"` | Border-radius preset |
| `widthVariant` | `WidthVariant` | — | Width preset (`auto`, `full`) |
| `widthVariant` | `WidthVariant` | — | Width preset (`"auto"`, `"fit"`, `"full"`) |
| `border` | `boolean` | `false` | Renders a 1px border |
| `type` | `"submit" \| "button" \| "reset"` | — | When set, renders a `<button>` element |

View File

@@ -78,7 +78,7 @@ interface InteractiveContainerProps
/**
* Width preset controlling the container's horizontal size.
*
* @default "auto"
* @default "fit"
*/
widthVariant?: WidthVariant;
}
@@ -101,7 +101,7 @@ function InteractiveContainer({
border,
roundingVariant = "default",
heightVariant = "lg",
widthVariant = "auto",
widthVariant = "fit",
...props
}: InteractiveContainerProps) {
const { allowClick } = useDisabled();

View File

@@ -59,8 +59,8 @@
--------------------------------------------------------------------------- */
.interactive[data-interactive-variant="select-heavy"][data-interactive-state="filled"] {
@apply bg-transparent;
--interactive-foreground: var(--text-04);
--interactive-foreground-icon: var(--text-04);
--interactive-foreground: var(--action-link-05);
--interactive-foreground-icon: var(--action-link-05);
}
.interactive[data-interactive-variant="select-heavy"][data-interactive-state="filled"]:hover:not(
[data-disabled]
@@ -76,9 +76,7 @@
.interactive[data-interactive-variant="select-heavy"][data-interactive-state="filled"][data-interaction="active"]:not(
[data-disabled]
) {
@apply bg-background-neutral-00;
--interactive-foreground: var(--text-05);
--interactive-foreground-icon: var(--text-05);
@apply bg-background-tint-00;
}
.interactive[data-interactive-variant="select-heavy"][data-interactive-state="filled"][data-disabled] {
@apply bg-transparent;
@@ -158,8 +156,8 @@
--------------------------------------------------------------------------- */
.interactive[data-interactive-variant="select-light"][data-interactive-state="filled"] {
@apply bg-transparent;
--interactive-foreground: var(--text-04);
--interactive-foreground-icon: var(--text-04);
--interactive-foreground: var(--action-link-05);
--interactive-foreground-icon: var(--action-link-05);
}
.interactive[data-interactive-variant="select-light"][data-interactive-state="filled"]:hover:not(
[data-disabled]
@@ -175,9 +173,7 @@
.interactive[data-interactive-variant="select-light"][data-interactive-state="filled"][data-interaction="active"]:not(
[data-disabled]
) {
@apply bg-background-neutral-00;
--interactive-foreground: var(--text-05);
--interactive-foreground-icon: var(--text-05);
@apply bg-background-tint-00;
}
.interactive[data-interactive-variant="select-light"][data-interactive-state="filled"][data-disabled] {
@apply bg-transparent;

View File

@@ -40,6 +40,9 @@ interface BodyLayoutProps {
/** Title prominence. Default: `"default"`. */
prominence?: BodyProminence;
/** Ref forwarded to the root `<div>`. */
ref?: React.Ref<HTMLDivElement>;
}
// ---------------------------------------------------------------------------
@@ -80,6 +83,7 @@ function BodyLayout({
sizePreset = "main-ui",
orientation = "inline",
prominence = "default",
ref,
}: BodyLayoutProps) {
const config = BODY_PRESETS[sizePreset];
const titleColorClass =
@@ -87,6 +91,7 @@ function BodyLayout({
return (
<div
ref={ref}
className="opal-content-body"
data-orientation={orientation}
style={{ gap: config.gap }}

View File

@@ -48,6 +48,12 @@ interface ContentLgProps {
/** Size preset. Default: `"headline"`. */
sizePreset?: ContentLgSizePreset;
/** When `true`, the title color hooks into `Interactive`'s `--interactive-foreground` variable. */
withInteractive?: boolean;
/** Ref forwarded to the root `<div>`. */
ref?: React.Ref<HTMLDivElement>;
}
// ---------------------------------------------------------------------------
@@ -86,6 +92,8 @@ function ContentLg({
description,
editable,
onTitleChange,
withInteractive,
ref,
}: ContentLgProps) {
const [editing, setEditing] = useState(false);
const [editValue, setEditValue] = useState(title);
@@ -104,7 +112,12 @@ function ContentLg({
}
return (
<div className="opal-content-lg" style={{ gap: config.gap }}>
<div
ref={ref}
className="opal-content-lg"
data-interactive={withInteractive || undefined}
style={{ gap: config.gap }}
>
{Icon && (
<div
className={cn(

View File

@@ -61,6 +61,12 @@ interface ContentMdProps {
/** Size preset. Default: `"main-ui"`. */
sizePreset?: ContentMdSizePreset;
/** When `true`, the title color hooks into `Interactive`'s `--interactive-foreground` variable. */
withInteractive?: boolean;
/** Ref forwarded to the root `<div>`. */
ref?: React.Ref<HTMLDivElement>;
}
// ---------------------------------------------------------------------------
@@ -130,6 +136,8 @@ function ContentMd({
auxIcon,
tag,
sizePreset = "main-ui",
withInteractive,
ref,
}: ContentMdProps) {
const [editing, setEditing] = useState(false);
const [editValue, setEditValue] = useState(title);
@@ -149,7 +157,12 @@ function ContentMd({
}
return (
<div className="opal-content-md" style={{ gap: config.gap }}>
<div
ref={ref}
className="opal-content-md"
data-interactive={withInteractive || undefined}
style={{ gap: config.gap }}
>
{Icon && (
<div
className={cn(

View File

@@ -40,6 +40,12 @@ interface ContentSmProps {
/** Title prominence. Default: `"default"`. */
prominence?: ContentSmProminence;
/** When `true`, the title color hooks into `Interactive`'s `--interactive-foreground` variable. */
withInteractive?: boolean;
/** Ref forwarded to the root `<div>`. */
ref?: React.Ref<HTMLDivElement>;
}
// ---------------------------------------------------------------------------
@@ -80,14 +86,18 @@ function ContentSm({
sizePreset = "main-ui",
orientation = "inline",
prominence = "default",
withInteractive,
ref,
}: ContentSmProps) {
const config = CONTENT_SM_PRESETS[sizePreset];
return (
<div
ref={ref}
className="opal-content-sm"
data-orientation={orientation}
data-prominence={prominence}
data-interactive={withInteractive || undefined}
style={{ gap: config.gap }}
>
{Icon && (

View File

@@ -60,6 +60,12 @@ interface ContentXlProps {
/** Optional tertiary icon rendered in the icon row. */
moreIcon2?: IconFunctionComponent;
/** When `true`, the title color hooks into `Interactive`'s `--interactive-foreground` variable. */
withInteractive?: boolean;
/** Ref forwarded to the root `<div>`. */
ref?: React.Ref<HTMLDivElement>;
}
// ---------------------------------------------------------------------------
@@ -106,6 +112,8 @@ function ContentXl({
onTitleChange,
moreIcon1: MoreIcon1,
moreIcon2: MoreIcon2,
withInteractive,
ref,
}: ContentXlProps) {
const [editing, setEditing] = useState(false);
const [editValue, setEditValue] = useState(title);
@@ -124,7 +132,11 @@ function ContentXl({
}
return (
<div className="opal-content-xl">
<div
ref={ref}
className="opal-content-xl"
data-interactive={withInteractive || undefined}
>
{(Icon || MoreIcon1 || MoreIcon2) && (
<div className="opal-content-xl-icon-row">
{Icon && (

View File

@@ -52,6 +52,9 @@ interface HeadingLayoutProps {
/** Variant controls icon placement. `"heading"` = top, `"section"` = inline. Default: `"heading"`. */
variant?: HeadingVariant;
/** Ref forwarded to the root `<div>`. */
ref?: React.Ref<HTMLDivElement>;
}
// ---------------------------------------------------------------------------
@@ -91,6 +94,7 @@ function HeadingLayout({
description,
editable,
onTitleChange,
ref,
}: HeadingLayoutProps) {
const [editing, setEditing] = useState(false);
const [editValue, setEditValue] = useState(title);
@@ -112,6 +116,7 @@ function HeadingLayout({
return (
<div
ref={ref}
className="opal-content-heading"
data-icon-placement={iconPlacement}
style={{ gap: iconPlacement === "left" ? config.gap : undefined }}

View File

@@ -61,6 +61,9 @@ interface LabelLayoutProps {
/** Size preset. Default: `"main-ui"`. */
sizePreset?: LabelSizePreset;
/** Ref forwarded to the root `<div>`. */
ref?: React.Ref<HTMLDivElement>;
}
// ---------------------------------------------------------------------------
@@ -130,6 +133,7 @@ function LabelLayout({
auxIcon,
tag,
sizePreset = "main-ui",
ref,
}: LabelLayoutProps) {
const [editing, setEditing] = useState(false);
const [editValue, setEditValue] = useState(title);
@@ -149,7 +153,7 @@ function LabelLayout({
}
return (
<div className="opal-content-label" style={{ gap: config.gap }}>
<div ref={ref} className="opal-content-label" style={{ gap: config.gap }}>
{Icon && (
<div
className={cn(

View File

@@ -54,11 +54,18 @@ interface ContentBaseProps {
* Uses the shared `WidthVariant` scale from `@opal/shared`.
*
* - `"auto"` — Shrink-wraps to content width
* - `"fit"` — Shrink-wraps to content width
* - `"full"` — Stretches to fill the parent's width
*
* @default "auto"
* @default "fit"
*/
widthVariant?: WidthVariant;
/** When `true`, the title color hooks into `Interactive.Stateful`/`Interactive.Stateless`'s `--interactive-foreground` variable. */
withInteractive?: boolean;
/** Ref forwarded to the root `<div>` of the resolved layout. */
ref?: React.Ref<HTMLDivElement>;
}
// ---------------------------------------------------------------------------
@@ -122,11 +129,11 @@ function Content(props: ContentProps) {
sizePreset = "headline",
variant = "heading",
widthVariant = "auto",
withInteractive,
ref,
...rest
} = props;
const widthClass = widthVariants[widthVariant];
let layout: React.ReactNode = null;
// ContentXl / ContentLg: headline/section presets
@@ -135,6 +142,8 @@ function Content(props: ContentProps) {
layout = (
<ContentXl
sizePreset={sizePreset}
withInteractive={withInteractive}
ref={ref}
{...(rest as Omit<ContentXlProps, "sizePreset">)}
/>
);
@@ -142,6 +151,8 @@ function Content(props: ContentProps) {
layout = (
<ContentLg
sizePreset={sizePreset}
withInteractive={withInteractive}
ref={ref}
{...(rest as Omit<ContentLgProps, "sizePreset">)}
/>
);
@@ -154,6 +165,8 @@ function Content(props: ContentProps) {
layout = (
<ContentMd
sizePreset={sizePreset}
withInteractive={withInteractive}
ref={ref}
{...(rest as Omit<ContentMdProps, "sizePreset">)}
/>
);
@@ -164,6 +177,8 @@ function Content(props: ContentProps) {
layout = (
<ContentSm
sizePreset={sizePreset}
withInteractive={withInteractive}
ref={ref}
{...(rest as Omit<
React.ComponentProps<typeof ContentSm>,
"sizePreset"
@@ -178,11 +193,7 @@ function Content(props: ContentProps) {
`Content: no layout matched for sizePreset="${sizePreset}" variant="${variant}"`
);
// "auto" → return layout directly (a block div with w-auto still
// stretches to its parent, defeating shrink-to-content).
if (widthVariant === "auto") return layout;
return <div className={widthClass}>{layout}</div>;
return <div className={widthVariants[widthVariant]}>{layout}</div>;
}
// ---------------------------------------------------------------------------

View File

@@ -22,6 +22,7 @@
.opal-content-xl-icon-row {
@apply flex flex-row items-center;
gap: 0.25rem;
}
/* ---------------------------------------------------------------------------
@@ -63,7 +64,7 @@
}
.opal-content-xl-title {
@apply text-left overflow-hidden;
@apply text-left overflow-hidden text-text-04;
display: -webkit-box;
-webkit-box-orient: vertical;
-webkit-line-clamp: 1;
@@ -162,7 +163,7 @@
}
.opal-content-lg-title {
@apply text-left overflow-hidden;
@apply text-left overflow-hidden text-text-04;
display: -webkit-box;
-webkit-box-orient: vertical;
-webkit-line-clamp: 1;
@@ -255,7 +256,7 @@
}
.opal-content-md-title {
@apply text-left overflow-hidden;
@apply text-left overflow-hidden text-text-04;
display: -webkit-box;
-webkit-box-orient: vertical;
-webkit-line-clamp: 1;
@@ -361,8 +362,12 @@
@apply text-text-03;
}
.opal-content-sm[data-prominence="muted"] .opal-content-sm-icon {
@apply text-text-02;
}
.opal-content-sm[data-prominence="muted-2x"] .opal-content-sm-icon {
@apply text-text-02 stroke-text-02;
@apply text-text-02;
}
/* ---------------------------------------------------------------------------
@@ -385,3 +390,44 @@
.opal-content-sm[data-prominence="muted-2x"] .opal-content-sm-title {
@apply text-text-02;
}
/* ===========================================================================
Interactive-foreground opt-in
When a Content variant is nested inside an Interactive and
`withInteractive` is set, the title and icon delegate their color to the
`--interactive-foreground` / `--interactive-foreground-icon` CSS variables
controlled by the ancestor Interactive variant.
=========================================================================== */
.opal-content-xl[data-interactive] .opal-content-xl-title {
color: var(--interactive-foreground);
}
.opal-content-xl[data-interactive] .opal-content-xl-icon {
color: var(--interactive-foreground-icon);
}
.opal-content-lg[data-interactive] .opal-content-lg-title {
color: var(--interactive-foreground);
}
.opal-content-lg[data-interactive] .opal-content-lg-icon {
color: var(--interactive-foreground-icon);
}
.opal-content-md[data-interactive] .opal-content-md-title {
color: var(--interactive-foreground);
}
.opal-content-md[data-interactive] .opal-content-md-icon {
color: var(--interactive-foreground-icon);
}
.opal-content-sm[data-interactive] .opal-content-sm-title {
color: var(--interactive-foreground);
}
.opal-content-sm[data-interactive] .opal-content-sm-icon {
color: var(--interactive-foreground-icon);
}

View File

@@ -67,10 +67,12 @@ type SizeVariant = keyof typeof sizeVariants;
* | Key | Tailwind class |
* |--------|----------------|
* | `auto` | `w-auto` |
* | `fit` | `w-fit` |
* | `full` | `w-full` |
*/
const widthVariants = {
auto: "w-auto",
fit: "w-fit",
full: "w-full",
} as const;

View File

@@ -30,6 +30,11 @@ export interface IconProps extends SVGProps<SVGSVGElement> {
/** Strips `className` and `style` from a props type to enforce design-system styling. */
export type WithoutStyles<T> = Omit<T, "className" | "style">;
/** Like `Omit` but distributes over union types, preserving discriminated unions. */
export type DistributiveOmit<T, K extends keyof any> = T extends any
? Omit<T, K>
: never;
/**
* A React function component that accepts {@link IconProps}.
*

View File

@@ -8,7 +8,8 @@ const cspHeader = `
base-uri 'self';
form-action 'self';
${
process.env.NEXT_PUBLIC_CLOUD_ENABLED === "true"
process.env.NEXT_PUBLIC_CLOUD_ENABLED === "true" &&
process.env.NODE_ENV !== "development"
? "upgrade-insecure-requests;"
: ""
}

1571
web/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -24,7 +24,9 @@
"test:ci": "jest --ci --maxWorkers=2 --silent --bail",
"test:changed": "jest --onlyChanged",
"test:diff": "jest --changedSince=main",
"test:debug": "node --inspect-brk node_modules/.bin/jest --runInBand"
"test:debug": "node --inspect-brk node_modules/.bin/jest --runInBand",
"storybook": "storybook dev -p 6006",
"storybook:build": "storybook build -o storybook-static"
},
"dependencies": {
"@dnd-kit/core": "^6.1.0",
@@ -110,6 +112,11 @@
},
"devDependencies": {
"@playwright/test": "^1.39.0",
"@storybook/addon-essentials": "^8.6.18",
"@storybook/addon-themes": "^8.6.18",
"@storybook/blocks": "^8.6.18",
"@storybook/react": "^8.6.18",
"@storybook/react-vite": "^8.6.18",
"@tailwindcss/typography": "^0.5.19",
"@testing-library/jest-dom": "^6.9.1",
"@testing-library/react": "^16.3.0",
@@ -135,6 +142,7 @@
"jest-environment-jsdom": "^30.2.0",
"prettier": "3.1.0",
"stats.js": "^0.17.0",
"storybook": "^8.6.18",
"tailwindcss": "^3.4.17",
"ts-jest": "^29.2.5",
"ts-unused-exports": "^11.0.1",

Some files were not shown because too many files have changed in this diff Show More